repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1 value | license stringclasses 7 values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2 classes |
|---|---|---|---|---|---|---|---|---|
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/util/draw_thread.rs | cli/util/draw_thread.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::io::IsTerminal;
use std::sync::Arc;
use std::time::Duration;
use console_static_text::ConsoleStaticText;
use deno_core::parking_lot::Mutex;
use deno_core::unsync::spawn_blocking;
use deno_runtime::ops::tty::ConsoleSize;
use once_cell::sync::Lazy;
use super::console::new_console_static_text;
use crate::util::console::console_size;
/// Renders text that will be displayed stacked in a
/// static place on the console.
pub trait DrawThreadRenderer: Send + Sync + std::fmt::Debug {
fn render(&self, data: &ConsoleSize) -> String;
}
/// Draw thread guard. Keep this alive for the duration
/// that you wish the entry to be drawn for. Once it is
/// dropped, then the entry will be removed from the draw
/// thread.
#[derive(Debug)]
pub struct DrawThreadGuard(u16);
impl Drop for DrawThreadGuard {
fn drop(&mut self) {
DrawThread::finish_entry(self.0)
}
}
#[derive(Debug, Clone)]
struct InternalEntry {
id: u16,
renderer: Arc<dyn DrawThreadRenderer>,
}
#[derive(Debug)]
struct InternalState {
// this ensures only one actual draw thread is running
drawer_id: usize,
hide_count: usize,
has_draw_thread: bool,
next_entry_id: u16,
entries: Vec<InternalEntry>,
static_text: ConsoleStaticText,
}
impl InternalState {
pub fn should_exit_draw_thread(&self, drawer_id: usize) -> bool {
self.drawer_id != drawer_id || self.entries.is_empty()
}
}
static INTERNAL_STATE: Lazy<Arc<Mutex<InternalState>>> = Lazy::new(|| {
Arc::new(Mutex::new(InternalState {
drawer_id: 0,
hide_count: 0,
has_draw_thread: false,
entries: Vec::new(),
next_entry_id: 0,
static_text: new_console_static_text(),
}))
});
static IS_TTY_WITH_CONSOLE_SIZE: Lazy<bool> = Lazy::new(|| {
std::io::stderr().is_terminal()
&& console_size()
.map(|s| s.cols > 0 && s.rows > 0)
.unwrap_or(false)
});
/// The draw thread is responsible for rendering multiple active
/// `DrawThreadRenderer`s to stderr. It is global because the
/// concept of stderr in the process is also a global concept.
#[derive(Clone, Debug)]
pub struct DrawThread;
impl DrawThread {
/// Is using a draw thread supported.
pub fn is_supported() -> bool {
// don't put the log level in the lazy because the
// log level may change as the application runs
log::log_enabled!(log::Level::Info) && *IS_TTY_WITH_CONSOLE_SIZE
}
/// Adds a renderer to the draw thread.
pub fn add_entry(renderer: Arc<dyn DrawThreadRenderer>) -> DrawThreadGuard {
let internal_state = &*INTERNAL_STATE;
let mut internal_state = internal_state.lock();
let id = internal_state.next_entry_id;
internal_state.entries.push(InternalEntry { id, renderer });
if internal_state.next_entry_id == u16::MAX {
internal_state.next_entry_id = 0;
} else {
internal_state.next_entry_id += 1;
}
Self::maybe_start_draw_thread(&mut internal_state);
DrawThreadGuard(id)
}
/// Hides the draw thread.
pub fn hide() {
let internal_state = &*INTERNAL_STATE;
let mut internal_state = internal_state.lock();
let is_showing =
internal_state.has_draw_thread && internal_state.hide_count == 0;
internal_state.hide_count += 1;
if is_showing {
// Clear it on the current thread in order to stop it from
// showing immediately. Also, don't stop the draw thread here
// because the calling code might be called from outside a
// tokio runtime and when it goes to start the thread on the
// thread pool it might panic.
internal_state.static_text.eprint_clear();
}
}
/// Shows the draw thread if it was previously hidden.
pub fn show() {
let internal_state = &*INTERNAL_STATE;
let mut internal_state = internal_state.lock();
if internal_state.hide_count > 0 {
internal_state.hide_count -= 1;
}
}
fn finish_entry(entry_id: u16) {
let internal_state = &*INTERNAL_STATE;
let mut internal_state = internal_state.lock();
if let Some(index) =
internal_state.entries.iter().position(|e| e.id == entry_id)
{
internal_state.entries.remove(index);
if internal_state.entries.is_empty() {
Self::clear_and_stop_draw_thread(&mut internal_state);
}
}
}
fn clear_and_stop_draw_thread(internal_state: &mut InternalState) {
if internal_state.has_draw_thread {
internal_state.static_text.eprint_clear();
// bump the drawer id to exit the draw thread
internal_state.drawer_id += 1;
internal_state.has_draw_thread = false;
}
}
fn maybe_start_draw_thread(internal_state: &mut InternalState) {
if internal_state.has_draw_thread
|| internal_state.entries.is_empty()
|| !DrawThread::is_supported()
{
return;
}
internal_state.drawer_id += 1;
internal_state.has_draw_thread = true;
let drawer_id = internal_state.drawer_id;
spawn_blocking(move || {
let mut previous_size = console_size();
loop {
let mut delay_ms = 120;
{
// Get the entries to render.
let maybe_entries = {
let internal_state = &*INTERNAL_STATE;
let internal_state = internal_state.lock();
if internal_state.should_exit_draw_thread(drawer_id) {
break;
}
let should_display = internal_state.hide_count == 0;
should_display.then(|| internal_state.entries.clone())
};
if let Some(entries) = maybe_entries {
// this should always be set, but have the code handle
// it not being for some reason
let size = console_size();
// Call into the renderers outside the lock to prevent a potential
// deadlock between our internal state lock and the renderers
// internal state lock.
//
// Example deadlock if this code didn't do this:
// 1. Other thread - Renderer - acquired internal lock to update state
// 2. This thread - Acquired internal state
// 3. Other thread - Renderer - drops DrawThreadGuard
// 4. This thread - Calls renderer.render within internal lock,
// which attempts to acquire the other thread's Render's internal
// lock causing a deadlock
let mut text = String::new();
if size != previous_size {
// means the user is actively resizing the console...
// wait a little bit until they stop resizing
previous_size = size;
delay_ms = 200;
} else if let Some(size) = size {
let mut should_new_line_next = false;
for entry in entries {
let new_text = entry.renderer.render(&size);
if should_new_line_next && !new_text.is_empty() {
text.push('\n');
}
should_new_line_next = !new_text.is_empty();
text.push_str(&new_text);
}
// now reacquire the lock, ensure we should still be drawing, then
// output the text
{
let internal_state = &*INTERNAL_STATE;
let mut internal_state = internal_state.lock();
if internal_state.should_exit_draw_thread(drawer_id) {
break;
}
internal_state.static_text.eprint_with_size(
&text,
console_static_text::ConsoleSize {
cols: Some(size.cols as u16),
rows: Some(size.rows as u16),
},
);
}
}
}
}
std::thread::sleep(Duration::from_millis(delay_ms));
}
});
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/util/text_encoding.rs | cli/util/text_encoding.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap;
use std::ops::Range;
use base64::Engine;
use base64::prelude::BASE64_STANDARD;
use deno_core::ModuleSourceCode;
use deno_core::error::AnyError;
use deno_error::JsErrorBox;
use text_lines::LineAndColumnIndex;
use text_size::TextSize;
static SOURCE_MAP_PREFIX: &[u8] =
b"//# sourceMappingURL=data:application/json;base64,";
pub fn source_map_from_code(code: &[u8]) -> Option<Vec<u8>> {
let range = find_source_map_range(code)?;
let source_map_range = &code[range];
let input = source_map_range.split_at(SOURCE_MAP_PREFIX.len()).1;
let decoded_map = BASE64_STANDARD.decode(input).ok()?;
Some(decoded_map)
}
/// Truncate the source code before the source map.
pub fn code_without_source_map(code: ModuleSourceCode) -> ModuleSourceCode {
use deno_core::ModuleCodeBytes;
match code {
ModuleSourceCode::String(mut code) => {
if let Some(range) = find_source_map_range(code.as_bytes()) {
code.truncate(range.start);
}
ModuleSourceCode::String(code)
}
ModuleSourceCode::Bytes(code) => {
if let Some(range) = find_source_map_range(code.as_bytes()) {
let source_map_index = range.start;
ModuleSourceCode::Bytes(match code {
ModuleCodeBytes::Static(bytes) => {
ModuleCodeBytes::Static(&bytes[..source_map_index])
}
ModuleCodeBytes::Boxed(bytes) => {
// todo(dsherret): should be possible without cloning
ModuleCodeBytes::Boxed(
bytes[..source_map_index].to_vec().into_boxed_slice(),
)
}
ModuleCodeBytes::Arc(bytes) => ModuleCodeBytes::Boxed(
bytes[..source_map_index].to_vec().into_boxed_slice(),
),
})
} else {
ModuleSourceCode::Bytes(code)
}
}
}
}
fn find_source_map_range(code: &[u8]) -> Option<Range<usize>> {
fn last_non_blank_line_range(code: &[u8]) -> Option<Range<usize>> {
let mut hit_non_whitespace = false;
let mut range_end = code.len();
for i in (0..code.len()).rev() {
match code[i] {
b' ' | b'\t' => {
if !hit_non_whitespace {
range_end = i;
}
}
b'\n' | b'\r' => {
if hit_non_whitespace {
return Some(i + 1..range_end);
}
range_end = i;
}
_ => {
hit_non_whitespace = true;
}
}
}
None
}
let range = last_non_blank_line_range(code)?;
if code[range.start..range.end].starts_with(SOURCE_MAP_PREFIX) {
Some(range)
} else {
None
}
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Utf16Char {
pub start: TextSize,
pub end: TextSize,
}
impl Utf16Char {
pub fn len(&self) -> TextSize {
self.end - self.start
}
pub fn len_utf16(&self) -> usize {
if self.len() == TextSize::from(4) {
2
} else {
1
}
}
}
#[derive(Debug, Clone, Default, Eq, PartialEq)]
pub struct Utf16Map {
utf8_offsets: Vec<TextSize>,
utf16_lines: HashMap<u32, Vec<Utf16Char>>,
utf16_offsets: Vec<TextSize>,
}
impl Utf16Map {
pub fn new(text: &str) -> Utf16Map {
let mut utf16_lines = HashMap::new();
let mut utf16_chars = Vec::new();
let mut utf8_offsets = vec![0.into()];
let mut utf16_offsets = vec![0.into()];
let mut curr_row = 0.into();
let mut curr_col = 0.into();
let mut curr_offset_u16 = 0.into();
let mut line = 0;
for c in text.chars() {
let c_len = TextSize::of(c);
curr_row += c_len;
curr_offset_u16 += TextSize::from(c.len_utf16() as u32);
if c == '\n' {
utf8_offsets.push(curr_row);
utf16_offsets.push(curr_offset_u16);
if !utf16_chars.is_empty() {
utf16_lines.insert(line, utf16_chars);
utf16_chars = Vec::new();
}
curr_col = 0.into();
line += 1;
continue;
}
if !c.is_ascii() {
utf16_chars.push(Utf16Char {
start: curr_col,
end: curr_col + c_len,
});
}
curr_col += c_len;
}
// utf8_offsets and utf16_offsets length is equal to (# of lines + 1)
utf8_offsets.push(curr_row);
utf16_offsets.push(curr_offset_u16);
if !utf16_chars.is_empty() {
utf16_lines.insert(line, utf16_chars);
}
Utf16Map {
utf8_offsets,
utf16_lines,
utf16_offsets,
}
}
pub fn text_content_length_utf16(&self) -> TextSize {
*self.utf16_offsets.last().unwrap()
}
pub fn utf8_offsets_len(&self) -> usize {
self.utf8_offsets.len()
}
pub fn line_length_utf16(&self, line: u32) -> TextSize {
self.utf16_offsets[(line + 1) as usize] - self.utf16_offsets[line as usize]
}
pub fn utf16_to_utf8_col(&self, line: u32, mut col: u32) -> TextSize {
if let Some(utf16_chars) = self.utf16_lines.get(&line) {
for c in utf16_chars {
if col > u32::from(c.start) {
col += u32::from(c.len()) - c.len_utf16() as u32;
} else {
break;
}
}
}
col.into()
}
/// Return a u8 offset based on a u16 position.
pub fn offset(&self, line: u32, col: u32) -> Result<TextSize, AnyError> {
let col = self.utf16_to_utf8_col(line, col);
if let Some(line_offset) = self.utf8_offsets.get(line as usize) {
Ok(line_offset + col)
} else {
Err(JsErrorBox::new("OutOfRange", "The position is out of range.").into())
}
}
pub fn offset_utf16(
&self,
line: u32,
col: u32,
) -> Result<TextSize, AnyError> {
if let Some(line_offset) = self.utf16_offsets.get(line as usize) {
Ok(line_offset + TextSize::from(col))
} else {
Err(JsErrorBox::new("OutOfRange", "The position is out of range.").into())
}
}
/// Returns a u16 line and column based on a u16 offset, which
/// TypeScript offsets are returned as u16.
pub fn position_utf16(&self, offset: TextSize) -> LineAndColumnIndex {
let line = partition_point(&self.utf16_offsets, |&it| it <= offset) - 1;
let line_start_offset = self.utf16_offsets[line];
let col = offset - line_start_offset;
LineAndColumnIndex {
line_index: line,
column_index: col.into(),
}
}
/// Convert a UTF-16 byte offset to UTF-8 byte offset
pub fn utf16_to_utf8_offset(
&self,
utf16_offset: TextSize,
) -> Option<TextSize> {
if utf16_offset > self.text_content_length_utf16() {
return None;
}
let pos = self.position_utf16(utf16_offset);
let line_start_utf8 = self.utf8_offsets[pos.line_index];
let col_utf8 =
self.utf16_to_utf8_col(pos.line_index as u32, pos.column_index as u32);
Some(line_start_utf8 + col_utf8)
}
/// Convert a UTF-8 byte offset to UTF-16 byte offset
pub fn utf8_to_utf16_offset(
&self,
utf8_offset: TextSize,
) -> Option<TextSize> {
if utf8_offset > *self.utf8_offsets.last()? {
return None;
}
let line = partition_point(&self.utf8_offsets, |&it| it <= utf8_offset) - 1;
let line_start_utf8 = self.utf8_offsets[line];
let col_utf8 = utf8_offset - line_start_utf8;
let col_utf16 = self.utf8_to_utf16_col(line as u32, col_utf8);
Some(self.utf16_offsets[line] + TextSize::from(col_utf16))
}
fn utf8_to_utf16_col(&self, line: u32, col: TextSize) -> u32 {
let mut utf16_col = u32::from(col);
if let Some(utf16_chars) = self.utf16_lines.get(&line) {
for c in utf16_chars {
if col > c.start {
utf16_col -= u32::from(c.len()) - c.len_utf16() as u32;
} else {
break;
}
}
}
utf16_col
}
}
fn partition_point<T, P>(slice: &[T], mut predicate: P) -> usize
where
P: FnMut(&T) -> bool,
{
let mut left = 0;
let mut right = slice.len() - 1;
while left != right {
let mid = left + (right - left) / 2;
// SAFETY:
// When left < right, left <= mid < right.
// Therefore left always increases and right always decreases,
// and either of them is selected.
// In both cases left <= right is satisfied.
// Therefore if left < right in a step,
// left <= right is satisfied in the next step.
// Therefore as long as left != right, 0 <= left < right < len is satisfied
// and if this case 0 <= mid < len is satisfied too.
let value = unsafe { slice.get_unchecked(mid) };
if predicate(value) {
left = mid + 1;
} else {
right = mid;
}
}
left
}
#[cfg(test)]
mod tests {
use std::sync::Arc;
use deno_core::ModuleCodeBytes;
use deno_core::ModuleCodeString;
use super::*;
#[test]
fn test_source_map_from_code() {
let to_string =
|bytes: Vec<u8>| -> String { String::from_utf8(bytes.to_vec()).unwrap() };
assert_eq!(
source_map_from_code(
b"test\n//# sourceMappingURL=data:application/json;base64,dGVzdGluZ3Rlc3Rpbmc="
).map(to_string),
Some("testingtesting".to_string())
);
assert_eq!(
source_map_from_code(
b"test\n//# sourceMappingURL=data:application/json;base64,dGVzdGluZ3Rlc3Rpbmc=\n \n"
).map(to_string),
Some("testingtesting".to_string())
);
assert_eq!(
source_map_from_code(
b"test\n//# sourceMappingURL=data:application/json;base64,dGVzdGluZ3Rlc3Rpbmc=\n test\n"
).map(to_string),
None
);
assert_eq!(
source_map_from_code(
b"\"use strict\";
throw new Error(\"Hello world!\");
//# sourceMappingURL=data:application/json;base64,{"
),
None
);
}
#[test]
fn test_source_without_source_map() {
run_test("", "");
run_test("\n", "\n");
run_test("\r\n", "\r\n");
run_test("a", "a");
run_test("a\n", "a\n");
run_test("a\r\n", "a\r\n");
run_test("a\r\nb", "a\r\nb");
run_test("a\nb\n", "a\nb\n");
run_test("a\r\nb\r\n", "a\r\nb\r\n");
run_test(
"test\n//# sourceMappingURL=data:application/json;base64,test",
"test\n",
);
run_test(
"test\r\n//# sourceMappingURL=data:application/json;base64,test",
"test\r\n",
);
run_test(
"\n//# sourceMappingURL=data:application/json;base64,test",
"\n",
);
run_test(
"test\n//# sourceMappingURL=data:application/json;base64,test\n\n",
"test\n",
);
run_test(
"test\n//# sourceMappingURL=data:application/json;base64,test\n \n ",
"test\n",
);
fn run_test(input: &'static str, output: &'static str) {
let forms = [
ModuleSourceCode::String(ModuleCodeString::from_static(input)),
ModuleSourceCode::String({
let text: Arc<str> = input.into();
text.into()
}),
ModuleSourceCode::String({
let text: String = input.into();
text.into()
}),
ModuleSourceCode::Bytes(ModuleCodeBytes::Static(input.as_bytes())),
ModuleSourceCode::Bytes(ModuleCodeBytes::Boxed(
input.as_bytes().to_vec().into_boxed_slice(),
)),
ModuleSourceCode::Bytes(ModuleCodeBytes::Arc(
input.as_bytes().to_vec().into(),
)),
];
for form in forms {
let result = code_without_source_map(form);
let bytes = result.as_bytes();
assert_eq!(bytes, output.as_bytes());
}
}
}
#[test]
fn test_line_index() {
let cases = [
(0, (0, 0)),
(1, (0, 1)),
(5, (0, 5)),
(6, (1, 0)),
(7, (1, 1)),
(8, (1, 2)),
(10, (1, 4)),
(11, (1, 5)),
(12, (1, 6)),
];
let text = "hello\nworld";
let index = Utf16Map::new(text);
for (input, expected) in cases {
assert_eq!(
index.position_utf16(input.into()),
LineAndColumnIndex {
line_index: expected.0,
column_index: expected.1
}
);
}
let cases = [
(0, (0, 0)),
(1, (1, 0)),
(2, (1, 1)),
(6, (1, 5)),
(7, (2, 0)),
];
let text = "\nhello\nworld";
let index = Utf16Map::new(text);
for (input, expected) in cases {
assert_eq!(
index.position_utf16(input.into()),
LineAndColumnIndex {
line_index: expected.0,
column_index: expected.1
}
);
}
}
#[test]
fn test_char_len() {
assert_eq!('メ'.len_utf8(), 3);
assert_eq!('メ'.len_utf16(), 1);
assert_eq!('编'.len_utf8(), 3);
assert_eq!('编'.len_utf16(), 1);
assert_eq!('🦕'.len_utf8(), 4);
assert_eq!('🦕'.len_utf16(), 2);
}
#[test]
fn test_empty_index() {
let col_index = Utf16Map::new(
"
const C: char = 'x';
",
);
assert_eq!(col_index.utf16_lines.len(), 0);
}
#[test]
fn test_single_char() {
let col_index = Utf16Map::new(
"
const C: char = 'メ';
",
);
assert_eq!(col_index.utf16_lines.len(), 1);
assert_eq!(col_index.utf16_lines[&1].len(), 1);
assert_eq!(
col_index.utf16_lines[&1][0],
Utf16Char {
start: 17.into(),
end: 20.into()
}
);
// UTF-16 to UTF-8, no changes
assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
// UTF-16 to UTF-8
assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21));
let col_index = Utf16Map::new("a𐐏b");
assert_eq!(col_index.utf16_to_utf8_col(0, 3), TextSize::from(5));
}
#[test]
fn test_string() {
let col_index = Utf16Map::new(
"
const C: char = \"メ メ\";
",
);
assert_eq!(col_index.utf16_lines.len(), 1);
assert_eq!(col_index.utf16_lines[&1].len(), 2);
assert_eq!(
col_index.utf16_lines[&1][0],
Utf16Char {
start: 17.into(),
end: 20.into()
}
);
assert_eq!(
col_index.utf16_lines[&1][1],
Utf16Char {
start: 21.into(),
end: 24.into()
}
);
// UTF-16 to UTF-8
assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
// メ UTF-8: 0xE3 0x83 0xA1, UTF-16: 0x30E1
assert_eq!(col_index.utf16_to_utf8_col(1, 17), TextSize::from(17)); // first メ at 17..20
assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextSize::from(20)); // space
assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21)); // second メ at 21..24
assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextSize::from(15));
}
#[test]
fn test_offset_out_of_range() {
let text = "hello";
let map = Utf16Map::new(text);
assert_eq!(map.utf8_to_utf16_offset(TextSize::from(10)), None);
assert_eq!(map.utf16_to_utf8_offset(TextSize::from(10)), None);
}
#[test]
fn test_offset_basic_ascii() {
let text = "hello\nworld";
let map = Utf16Map::new(text);
let utf8_offset = TextSize::from(7);
let utf16_offset = map.utf8_to_utf16_offset(utf8_offset).unwrap();
assert_eq!(utf16_offset, TextSize::from(7));
let result = map.utf16_to_utf8_offset(utf16_offset).unwrap();
assert_eq!(result, utf8_offset);
}
#[test]
fn test_offset_emoji() {
let text = "hi 👋\nbye";
let map = Utf16Map::new(text);
let utf8_offset = TextSize::from(3);
let utf16_offset = map.utf8_to_utf16_offset(utf8_offset).unwrap();
assert_eq!(utf16_offset, TextSize::from(3));
let utf8_offset_after = TextSize::from(7);
let utf16_offset_after =
map.utf8_to_utf16_offset(utf8_offset_after).unwrap();
assert_eq!(utf16_offset_after, TextSize::from(5));
for (utf8_offset, _) in text.char_indices() {
let utf8_offset = TextSize::from(utf8_offset as u32);
let utf16_offset = map.utf8_to_utf16_offset(utf8_offset).unwrap();
let reverse_ut8_offset = map.utf16_to_utf8_offset(utf16_offset).unwrap();
assert_eq!(reverse_ut8_offset, utf8_offset);
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/util/display.rs | cli/util/display.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::io::Write;
use deno_core::error::AnyError;
use deno_core::serde_json;
/// A function that converts a float to a string the represents a human
/// readable version of that number.
pub fn human_size(size: f64) -> String {
let negative = if size.is_sign_positive() { "" } else { "-" };
let size = size.abs();
let units = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"];
if size < 1_f64 {
return format!("{}{}{}", negative, size, "B");
}
let delimiter = 1024_f64;
let exponent = std::cmp::min(
(size.ln() / delimiter.ln()).floor() as i32,
(units.len() - 1) as i32,
);
let pretty_bytes = format!("{:.2}", size / delimiter.powi(exponent))
.parse::<f64>()
.unwrap()
* 1_f64;
let unit = units[exponent as usize];
format!("{negative}{pretty_bytes}{unit}")
}
const BYTES_TO_KIB: u64 = 2u64.pow(10);
const BYTES_TO_MIB: u64 = 2u64.pow(20);
/// Gets the size used for downloading data. The total bytes is used to
/// determine the units to use.
pub fn human_download_size(byte_count: u64, total_bytes: u64) -> String {
return if total_bytes < BYTES_TO_MIB {
get_in_format(byte_count, BYTES_TO_KIB, "KiB")
} else {
get_in_format(byte_count, BYTES_TO_MIB, "MiB")
};
fn get_in_format(byte_count: u64, conversion: u64, suffix: &str) -> String {
let converted_value = byte_count / conversion;
let decimal = (byte_count % conversion) * 100 / conversion;
format!("{converted_value}.{decimal:0>2}{suffix}")
}
}
/// A function that converts a millisecond elapsed time to a string that
/// represents a human readable version of that time.
pub fn human_elapsed(elapsed: u128) -> String {
human_elapsed_with_ms_limit(elapsed, 1_000)
}
pub fn human_elapsed_with_ms_limit(elapsed: u128, ms_limit: u128) -> String {
if elapsed < ms_limit {
return format!("{elapsed}ms");
}
if elapsed < 1_000 * 60 {
return format!("{}s", elapsed / 1000);
}
let seconds = elapsed / 1_000;
let minutes = seconds / 60;
let seconds_remainder = seconds % 60;
format!("{minutes}m{seconds_remainder}s")
}
pub fn write_to_stdout_ignore_sigpipe(
bytes: &[u8],
) -> Result<(), std::io::Error> {
use std::io::ErrorKind;
match std::io::stdout().write_all(bytes) {
Ok(()) => Ok(()),
Err(e) => match e.kind() {
ErrorKind::BrokenPipe => Ok(()),
_ => Err(e),
},
}
}
pub fn write_json_to_stdout<T>(value: &T) -> Result<(), AnyError>
where
T: ?Sized + serde::ser::Serialize,
{
let mut writer = std::io::BufWriter::new(std::io::stdout());
serde_json::to_writer_pretty(&mut writer, value)?;
writeln!(&mut writer)?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_human_size() {
assert_eq!(human_size(1_f64), "1B");
assert_eq!(human_size((12 * 1024) as f64), "12KB");
assert_eq!(human_size((24_i64 * 1024 * 1024) as f64), "24MB");
assert_eq!(human_size((24_i64 * 1024 * 1024 * 1024) as f64), "24GB");
assert_eq!(
human_size((24_i64 * 1024 * 1024 * 1024 * 1024) as f64),
"24TB"
);
assert_eq!(human_size(0_f64), "0B");
assert_eq!(human_size(-10_f64), "-10B");
}
#[test]
fn test_human_download_size() {
assert_eq!(
human_download_size(BYTES_TO_KIB / 100 - 1, BYTES_TO_KIB),
"0.00KiB"
);
assert_eq!(
human_download_size(BYTES_TO_KIB / 100 + 1, BYTES_TO_KIB),
"0.01KiB"
);
assert_eq!(
human_download_size(BYTES_TO_KIB / 5, BYTES_TO_KIB),
"0.19KiB"
);
assert_eq!(
human_download_size(BYTES_TO_MIB - 1, BYTES_TO_MIB - 1),
"1023.99KiB"
);
assert_eq!(human_download_size(BYTES_TO_MIB, BYTES_TO_MIB), "1.00MiB");
assert_eq!(
human_download_size(BYTES_TO_MIB * 9 - 1523, BYTES_TO_MIB),
"8.99MiB"
);
}
#[test]
fn test_human_elapsed() {
assert_eq!(human_elapsed(1), "1ms");
assert_eq!(human_elapsed(256), "256ms");
assert_eq!(human_elapsed(1000), "1s");
assert_eq!(human_elapsed(1001), "1s");
assert_eq!(human_elapsed(1020), "1s");
assert_eq!(human_elapsed(70 * 1000), "1m10s");
assert_eq!(human_elapsed(86 * 1000 + 100), "1m26s");
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/util/windows.rs | cli/util/windows.rs | // Copyright 2018-2025 the Deno authors. MIT license.
/// Ensures that stdin, stdout, and stderr are open and have valid HANDLEs
/// associated with them. There are many places where a `std::fs::File` is
/// constructed from a stdio handle; if the handle is null this causes a panic.
pub fn ensure_stdio_open() {
#[cfg(windows)]
// SAFETY: winapi calls
unsafe {
use std::mem::size_of;
use winapi::shared::minwindef::DWORD;
use winapi::shared::minwindef::FALSE;
use winapi::shared::minwindef::TRUE;
use winapi::shared::ntdef::NULL;
use winapi::shared::winerror::ERROR_INVALID_HANDLE;
use winapi::um::errhandlingapi::GetLastError;
use winapi::um::fileapi::CreateFileA;
use winapi::um::fileapi::OPEN_EXISTING;
use winapi::um::handleapi::GetHandleInformation;
use winapi::um::handleapi::INVALID_HANDLE_VALUE;
use winapi::um::minwinbase::SECURITY_ATTRIBUTES;
use winapi::um::processenv::GetStdHandle;
use winapi::um::processenv::SetStdHandle;
use winapi::um::winbase::STD_ERROR_HANDLE;
use winapi::um::winbase::STD_INPUT_HANDLE;
use winapi::um::winbase::STD_OUTPUT_HANDLE;
use winapi::um::winnt::FILE_ATTRIBUTE_NORMAL;
use winapi::um::winnt::FILE_GENERIC_READ;
use winapi::um::winnt::FILE_GENERIC_WRITE;
use winapi::um::winnt::FILE_READ_ATTRIBUTES;
use winapi::um::winnt::FILE_SHARE_READ;
use winapi::um::winnt::FILE_SHARE_WRITE;
for std_handle in [STD_INPUT_HANDLE, STD_OUTPUT_HANDLE, STD_ERROR_HANDLE] {
// Check whether stdio handle is open.
let is_valid = match GetStdHandle(std_handle) {
NULL | INVALID_HANDLE_VALUE => false,
handle => {
// The stdio handle is open; check whether its handle is valid.
let mut flags: DWORD = 0;
match GetHandleInformation(handle, &mut flags) {
TRUE => true,
FALSE if GetLastError() == ERROR_INVALID_HANDLE => false,
FALSE => {
panic!("GetHandleInformation failed (error {})", GetLastError());
}
_ => unreachable!(),
}
}
};
if !is_valid {
// Open NUL device.
let desired_access = match std_handle {
STD_INPUT_HANDLE => FILE_GENERIC_READ,
_ => FILE_GENERIC_WRITE | FILE_READ_ATTRIBUTES,
};
let security_attributes = SECURITY_ATTRIBUTES {
nLength: size_of::<SECURITY_ATTRIBUTES>() as DWORD,
lpSecurityDescriptor: NULL,
bInheritHandle: TRUE,
};
let file_handle = CreateFileA(
b"\\\\?\\NUL\0" as *const _ as *mut _,
desired_access,
FILE_SHARE_READ | FILE_SHARE_WRITE,
&security_attributes as *const _ as *mut _,
OPEN_EXISTING,
FILE_ATTRIBUTE_NORMAL,
NULL,
);
match file_handle {
NULL => unreachable!(),
INVALID_HANDLE_VALUE => {
panic!("Could not open NUL device (error {})", GetLastError());
}
_ => {}
}
// Assign the opened NUL handle to the missing stdio handle.
let success = SetStdHandle(std_handle, file_handle);
match success {
TRUE => {}
FALSE => panic!("SetStdHandle failed (error {})", GetLastError()),
_ => unreachable!(),
}
}
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/util/mod.rs | cli/util/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
// Note: Only add code in this folder that has no application specific logic
pub mod archive;
pub mod console;
pub mod display;
pub mod draw_thread;
pub mod extract;
pub mod file_watcher;
pub mod fs;
pub mod path;
pub mod progress_bar;
pub mod retry;
pub mod sync;
pub mod text_encoding;
pub mod unix;
pub mod v8;
pub mod watch_env_tracker;
pub mod windows;
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/util/extract.rs | cli/util/extract.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::BTreeSet;
use std::fmt::Write as _;
use std::sync::Arc;
use deno_ast::MediaType;
use deno_ast::SourceRangedForSpanned as _;
use deno_ast::swc::ast;
use deno_ast::swc::atoms::Atom;
use deno_ast::swc::common::DUMMY_SP;
use deno_ast::swc::common::comments::CommentKind;
use deno_ast::swc::ecma_visit::Visit;
use deno_ast::swc::ecma_visit::VisitMut;
use deno_ast::swc::ecma_visit::VisitWith as _;
use deno_ast::swc::ecma_visit::visit_mut_pass;
use deno_ast::swc::utils as swc_utils;
use deno_cache_dir::file_fetcher::File;
use deno_core::ModuleSpecifier;
use deno_core::error::AnyError;
use regex::Regex;
use crate::file_fetcher::TextDecodedFile;
use crate::util::path::mapped_specifier_for_tsc;
/// Extracts doc tests from a given file, transforms them into pseudo test
/// files by wrapping the content of the doc tests in a `Deno.test` call, and
/// returns a list of the pseudo test files.
///
/// The difference from [`extract_snippet_files`] is that this function wraps
/// extracted code snippets in a `Deno.test` call.
pub fn extract_doc_tests(file: File) -> Result<Vec<File>, AnyError> {
extract_inner(file, WrapKind::DenoTest)
}
/// Extracts code snippets from a given file and returns a list of the extracted
/// files.
///
/// The difference from [`extract_doc_tests`] is that this function does *not*
/// wrap extracted code snippets in a `Deno.test` call.
pub fn extract_snippet_files(file: File) -> Result<Vec<File>, AnyError> {
extract_inner(file, WrapKind::NoWrap)
}
#[derive(Clone, Copy)]
enum WrapKind {
DenoTest,
NoWrap,
}
struct TestOrSnippet {
file: File,
has_deno_test: bool,
}
fn extract_inner(
file: File,
wrap_kind: WrapKind,
) -> Result<Vec<File>, AnyError> {
let file = TextDecodedFile::decode(file)?;
let exports = match deno_ast::parse_program(deno_ast::ParseParams {
specifier: file.specifier.clone(),
text: file.source.clone(),
media_type: file.media_type,
capture_tokens: false,
scope_analysis: false,
maybe_syntax: None,
}) {
Ok(parsed) => {
let mut c = ExportCollector::default();
c.visit_program(parsed.program().as_ref());
c
}
Err(_) => ExportCollector::default(),
};
let extracted_files = if file.media_type == MediaType::Unknown {
extract_files_from_fenced_blocks(
&file.specifier,
&file.source,
file.media_type,
)?
} else {
extract_files_from_source_comments(
&file.specifier,
file.source.clone(),
file.media_type,
)?
};
extracted_files
.into_iter()
.map(|extracted| {
let wrap_kind = if extracted.has_deno_test {
WrapKind::NoWrap
} else {
wrap_kind
};
generate_pseudo_file(extracted.file, &file.specifier, &exports, wrap_kind)
})
.collect::<Result<_, _>>()
}
fn extract_files_from_fenced_blocks(
specifier: &ModuleSpecifier,
source: &str,
media_type: MediaType,
) -> Result<Vec<TestOrSnippet>, AnyError> {
// The pattern matches code blocks as well as anything in HTML comment syntax,
// but it stores the latter without any capturing groups. This way, a simple
// check can be done to see if a block is inside a comment (and skip typechecking)
// or not by checking for the presence of capturing groups in the matches.
let blocks_regex =
lazy_regex::regex!(r"(?s)<!--.*?-->|```([^\r\n]*)\r?\n([\S\s]*?)```");
let lines_regex = lazy_regex::regex!(r"(((#!+).*)|(?:# ?)?(.*))");
extract_files_from_regex_blocks(
specifier,
source,
media_type,
/* file line index */ 0,
blocks_regex,
lines_regex,
)
}
fn extract_files_from_source_comments(
specifier: &ModuleSpecifier,
source: Arc<str>,
media_type: MediaType,
) -> Result<Vec<TestOrSnippet>, AnyError> {
let parsed_source = deno_ast::parse_module(deno_ast::ParseParams {
specifier: specifier.clone(),
text: source,
media_type,
capture_tokens: false,
maybe_syntax: None,
scope_analysis: false,
})?;
let comments = parsed_source.comments().get_vec();
let blocks_regex = lazy_regex::regex!(r"```([^\r\n]*)\r?\n([\S\s]*?)```");
let lines_regex =
lazy_regex::regex!(r"(?:\* ?)((#!+).*)|(?:\* ?)(?:\# ?)?(.*)");
let files = comments
.iter()
.filter(|comment| {
if comment.kind != CommentKind::Block || !comment.text.starts_with('*') {
return false;
}
true
})
.flat_map(|comment| {
extract_files_from_regex_blocks(
specifier,
&comment.text,
media_type,
parsed_source.text_info_lazy().line_index(comment.start()),
blocks_regex,
lines_regex,
)
})
.flatten()
.collect();
Ok(files)
}
fn extract_files_from_regex_blocks(
specifier: &ModuleSpecifier,
source: &str,
media_type: MediaType,
file_line_index: usize,
blocks_regex: &Regex,
lines_regex: &Regex,
) -> Result<Vec<TestOrSnippet>, AnyError> {
let tests_regex = lazy_regex::regex!(r"(?m)^\s*Deno\.test\(");
let files = blocks_regex
.captures_iter(source)
.filter_map(|block| {
block.get(1)?;
let maybe_attributes: Option<Vec<_>> = block
.get(1)
.map(|attributes| attributes.as_str().split(' ').collect());
let file_media_type = if let Some(attributes) = maybe_attributes {
if attributes.contains(&"ignore") {
return None;
}
match attributes.first() {
Some(&"js") => MediaType::JavaScript,
Some(&"javascript") => MediaType::JavaScript,
Some(&"mjs") => MediaType::Mjs,
Some(&"cjs") => MediaType::Cjs,
Some(&"jsx") => MediaType::Jsx,
Some(&"ts") => MediaType::TypeScript,
Some(&"typescript") => MediaType::TypeScript,
Some(&"mts") => MediaType::Mts,
Some(&"cts") => MediaType::Cts,
Some(&"tsx") => MediaType::Tsx,
_ => MediaType::Unknown,
}
} else {
media_type
};
if file_media_type == MediaType::Unknown {
return None;
}
let line_offset = source[0..block.get(0).unwrap().start()]
.chars()
.filter(|c| *c == '\n')
.count();
let line_count = block.get(0).unwrap().as_str().split('\n').count();
let body = block.get(2).unwrap();
let text = body.as_str();
// TODO(caspervonb) generate an inline source map
let mut file_source = String::new();
for line in lines_regex.captures_iter(text) {
let text = line.get(1).or_else(|| line.get(3)).unwrap();
writeln!(file_source, "{}", text.as_str()).unwrap();
}
let file_specifier = ModuleSpecifier::parse(&format!(
"{}${}-{}",
specifier,
file_line_index + line_offset + 1,
file_line_index + line_offset + line_count + 1,
))
.unwrap();
let file_specifier =
mapped_specifier_for_tsc(&file_specifier, file_media_type)
.map(|s| ModuleSpecifier::parse(&s).unwrap())
.unwrap_or(file_specifier);
let has_deno_test = tests_regex.is_match(&file_source);
let file = File {
url: file_specifier,
mtime: None,
maybe_headers: None,
source: file_source.into_bytes().into(),
loaded_from: deno_cache_dir::file_fetcher::LoadedFrom::Local,
};
Some(TestOrSnippet {
file,
has_deno_test,
})
})
.collect();
Ok(files)
}
#[derive(Default)]
struct ExportCollector {
named_exports: BTreeSet<Atom>,
default_export: Option<Atom>,
}
impl ExportCollector {
fn to_import_specifiers(
&self,
symbols_to_exclude: &rustc_hash::FxHashSet<Atom>,
) -> Vec<ast::ImportSpecifier> {
let mut import_specifiers = vec![];
if let Some(default_export) = &self.default_export {
// If the default export conflicts with a named export, a named one
// takes precedence.
if !symbols_to_exclude.contains(default_export)
&& !self.named_exports.contains(default_export)
{
import_specifiers.push(ast::ImportSpecifier::Default(
ast::ImportDefaultSpecifier {
span: DUMMY_SP,
local: ast::Ident {
span: DUMMY_SP,
ctxt: Default::default(),
sym: default_export.clone(),
optional: false,
},
},
));
}
}
for named_export in &self.named_exports {
if symbols_to_exclude.contains(named_export) {
continue;
}
import_specifiers.push(ast::ImportSpecifier::Named(
ast::ImportNamedSpecifier {
span: DUMMY_SP,
local: ast::Ident {
span: DUMMY_SP,
ctxt: Default::default(),
sym: named_export.clone(),
optional: false,
},
imported: None,
is_type_only: false,
},
));
}
import_specifiers
}
}
impl Visit for ExportCollector {
fn visit_ts_module_decl(&mut self, ts_module_decl: &ast::TsModuleDecl) {
if ts_module_decl.declare {
return;
}
ts_module_decl.visit_children_with(self);
}
fn visit_export_decl(&mut self, export_decl: &ast::ExportDecl) {
match &export_decl.decl {
ast::Decl::Class(class) => {
self.named_exports.insert(class.ident.sym.clone());
}
ast::Decl::Fn(func) => {
self.named_exports.insert(func.ident.sym.clone());
}
ast::Decl::Var(var) => {
for var_decl in &var.decls {
let atoms = extract_sym_from_pat(&var_decl.name);
self.named_exports.extend(atoms);
}
}
ast::Decl::TsEnum(ts_enum) => {
self.named_exports.insert(ts_enum.id.sym.clone());
}
ast::Decl::TsModule(ts_module) => {
if ts_module.declare {
return;
}
match &ts_module.id {
ast::TsModuleName::Ident(ident) => {
self.named_exports.insert(ident.sym.clone());
}
ast::TsModuleName::Str(s) => {
self
.named_exports
.insert(s.value.to_atom_lossy().into_owned());
}
}
}
ast::Decl::TsTypeAlias(ts_type_alias) => {
self.named_exports.insert(ts_type_alias.id.sym.clone());
}
ast::Decl::TsInterface(ts_interface) => {
self.named_exports.insert(ts_interface.id.sym.clone());
}
ast::Decl::Using(_) => {}
}
}
fn visit_export_default_decl(
&mut self,
export_default_decl: &ast::ExportDefaultDecl,
) {
match &export_default_decl.decl {
ast::DefaultDecl::Class(class) => {
if let Some(ident) = &class.ident {
self.default_export = Some(ident.sym.clone());
}
}
ast::DefaultDecl::Fn(func) => {
if let Some(ident) = &func.ident {
self.default_export = Some(ident.sym.clone());
}
}
ast::DefaultDecl::TsInterfaceDecl(iface_decl) => {
self.default_export = Some(iface_decl.id.sym.clone());
}
}
}
fn visit_export_default_expr(
&mut self,
export_default_expr: &ast::ExportDefaultExpr,
) {
if let ast::Expr::Ident(ident) = &*export_default_expr.expr {
self.default_export = Some(ident.sym.clone());
}
}
fn visit_export_named_specifier(
&mut self,
export_named_specifier: &ast::ExportNamedSpecifier,
) {
fn get_atom(export_name: &ast::ModuleExportName) -> Atom {
match export_name {
ast::ModuleExportName::Ident(ident) => ident.sym.clone(),
ast::ModuleExportName::Str(s) => s.value.to_atom_lossy().into_owned(),
}
}
match &export_named_specifier.exported {
Some(exported) => {
self.named_exports.insert(get_atom(exported));
}
None => {
self
.named_exports
.insert(get_atom(&export_named_specifier.orig));
}
}
}
fn visit_named_export(&mut self, named_export: &ast::NamedExport) {
// ExportCollector does not handle re-exports
if named_export.src.is_some() {
return;
}
named_export.visit_children_with(self);
}
}
fn extract_sym_from_pat(pat: &ast::Pat) -> Vec<Atom> {
fn rec(pat: &ast::Pat, atoms: &mut Vec<Atom>) {
match pat {
ast::Pat::Ident(binding_ident) => {
atoms.push(binding_ident.sym.clone());
}
ast::Pat::Array(array_pat) => {
for elem in array_pat.elems.iter().flatten() {
rec(elem, atoms);
}
}
ast::Pat::Rest(rest_pat) => {
rec(&rest_pat.arg, atoms);
}
ast::Pat::Object(object_pat) => {
for prop in &object_pat.props {
match prop {
ast::ObjectPatProp::Assign(assign_pat_prop) => {
atoms.push(assign_pat_prop.key.sym.clone());
}
ast::ObjectPatProp::KeyValue(key_value_pat_prop) => {
rec(&key_value_pat_prop.value, atoms);
}
ast::ObjectPatProp::Rest(rest_pat) => {
rec(&rest_pat.arg, atoms);
}
}
}
}
ast::Pat::Assign(assign_pat) => {
rec(&assign_pat.left, atoms);
}
ast::Pat::Invalid(_) | ast::Pat::Expr(_) => {}
}
}
let mut atoms = vec![];
rec(pat, &mut atoms);
atoms
}
/// Generates a "pseudo" file from a given file by applying the following
/// transformations:
///
/// 1. Injects `import` statements for expoted items from the base file
/// 2. If `wrap_kind` is [`WrapKind::DenoTest`], wraps the content of the file
/// in a `Deno.test` call.
///
/// For example, given a file that looks like:
///
/// ```ts
/// import { assertEquals } from "@std/assert/equals";
///
/// assertEquals(increment(1), 2);
/// ```
///
/// and the base file (from which the above snippet was extracted):
///
/// ```ts
/// export function increment(n: number): number {
/// return n + 1;
/// }
///
/// export const SOME_CONST = "HELLO";
/// ```
///
/// The generated pseudo test file would look like (if `wrap_in_deno_test` is enabled):
///
/// ```ts
/// import { assertEquals } from "@std/assert/equals";
/// import { increment, SOME_CONST } from "./base.ts";
///
/// Deno.test("./base.ts$1-3.ts", async () => {
/// assertEquals(increment(1), 2);
/// });
/// ```
///
/// # Edge case 1 - duplicate identifier
///
/// If a given file imports, say, `doSomething` from an external module while
/// the base file exports `doSomething` as well, the generated pseudo test file
/// would end up having two duplciate imports for `doSomething`, causing the
/// duplicate identifier error.
///
/// To avoid this issue, when a given file imports `doSomething`, this takes
/// precedence over the automatic import injection for the base file's
/// `doSomething`. So the generated pseudo test file would look like:
///
/// ```ts
/// import { assertEquals } from "@std/assert/equals";
/// import { doSomething } from "./some_external_module.ts";
///
/// Deno.test("./base.ts$1-3.ts", async () => {
/// assertEquals(doSomething(1), 2);
/// });
/// ```
///
/// # Edge case 2 - exports can't be put inside `Deno.test` blocks
///
/// All exports like `export const foo = 42` must be at the top level of the
/// module, making it impossible to wrap exports in `Deno.test` blocks. For
/// example, when the following code snippet is provided:
///
/// ```ts
/// const logger = createLogger("my-awesome-module");
///
/// export function sum(a: number, b: number): number {
/// logger.debug("sum called");
/// return a + b;
/// }
/// ```
///
/// If we applied the naive transformation to this, the generated pseudo test
/// file would look like:
///
/// ```ts
/// Deno.test("./base.ts$1-7.ts", async () => {
/// const logger = createLogger("my-awesome-module");
///
/// export function sum(a: number, b: number): number {
/// logger.debug("sum called");
/// return a + b;
/// }
/// });
/// ```
///
/// But obviously this violates the rule because `export function sum` is not
/// at the top level of the module.
///
/// To address this issue, the `export` keyword is removed so that the item can
/// stay in the `Deno.test` block's scope:
///
/// ```ts
/// Deno.test("./base.ts$1-7.ts", async () => {
/// const logger = createLogger("my-awesome-module");
///
/// function sum(a: number, b: number): number {
/// logger.debug("sum called");
/// return a + b;
/// }
/// });
/// ```
fn generate_pseudo_file(
file: File,
base_file_specifier: &ModuleSpecifier,
exports: &ExportCollector,
wrap_kind: WrapKind,
) -> Result<File, AnyError> {
let file = TextDecodedFile::decode(file)?;
let parsed = deno_ast::parse_program(deno_ast::ParseParams {
specifier: file.specifier.clone(),
text: file.source,
media_type: file.media_type,
capture_tokens: false,
scope_analysis: true,
maybe_syntax: None,
})?;
let top_level_atoms = swc_utils::collect_decls_with_ctxt::<Atom, _>(
&parsed.program_ref(),
parsed.top_level_context(),
);
let transformed =
parsed
.program_ref()
.to_owned()
.apply(&mut visit_mut_pass(Transform {
specifier: &file.specifier,
base_file_specifier,
exports_from_base: exports,
atoms_to_be_excluded_from_import: top_level_atoms,
wrap_kind,
}));
let source = deno_ast::swc::codegen::to_code_with_comments(
Some(&parsed.comments().as_single_threaded()),
&transformed,
);
log::debug!("{}:\n{}", file.specifier, source);
Ok(File {
url: file.specifier,
mtime: None,
maybe_headers: None,
source: source.into_bytes().into(),
loaded_from: deno_cache_dir::file_fetcher::LoadedFrom::Local,
})
}
struct Transform<'a> {
specifier: &'a ModuleSpecifier,
base_file_specifier: &'a ModuleSpecifier,
exports_from_base: &'a ExportCollector,
atoms_to_be_excluded_from_import: rustc_hash::FxHashSet<Atom>,
wrap_kind: WrapKind,
}
impl VisitMut for Transform<'_> {
fn visit_mut_program(&mut self, node: &mut ast::Program) {
let new_module_items = match node {
ast::Program::Module(module) => {
let mut module_decls = vec![];
let mut stmts = vec![];
for item in &module.body {
match item {
ast::ModuleItem::ModuleDecl(decl) => match self.wrap_kind {
WrapKind::NoWrap => {
module_decls.push(decl.clone());
}
// We remove `export` keywords so that they can be put inside
// `Deno.test` block scope.
WrapKind::DenoTest => match decl {
ast::ModuleDecl::ExportDecl(export_decl) => {
stmts.push(ast::Stmt::Decl(export_decl.decl.clone()));
}
ast::ModuleDecl::ExportDefaultDecl(export_default_decl) => {
let stmt = match &export_default_decl.decl {
ast::DefaultDecl::Class(class) => {
let expr = ast::Expr::Class(class.clone());
ast::Stmt::Expr(ast::ExprStmt {
span: DUMMY_SP,
expr: Box::new(expr),
})
}
ast::DefaultDecl::Fn(func) => {
let expr = ast::Expr::Fn(func.clone());
ast::Stmt::Expr(ast::ExprStmt {
span: DUMMY_SP,
expr: Box::new(expr),
})
}
ast::DefaultDecl::TsInterfaceDecl(ts_interface_decl) => {
ast::Stmt::Decl(ast::Decl::TsInterface(
ts_interface_decl.clone(),
))
}
};
stmts.push(stmt);
}
ast::ModuleDecl::ExportDefaultExpr(export_default_expr) => {
stmts.push(ast::Stmt::Expr(ast::ExprStmt {
span: DUMMY_SP,
expr: export_default_expr.expr.clone(),
}));
}
_ => {
module_decls.push(decl.clone());
}
},
},
ast::ModuleItem::Stmt(stmt) => {
stmts.push(stmt.clone());
}
}
}
let mut transformed_items = vec![];
transformed_items
.extend(module_decls.into_iter().map(ast::ModuleItem::ModuleDecl));
let import_specifiers = self
.exports_from_base
.to_import_specifiers(&self.atoms_to_be_excluded_from_import);
if !import_specifiers.is_empty() {
transformed_items.push(ast::ModuleItem::ModuleDecl(
ast::ModuleDecl::Import(ast::ImportDecl {
span: DUMMY_SP,
specifiers: import_specifiers,
src: Box::new(ast::Str {
span: DUMMY_SP,
value: self.base_file_specifier.to_string().into(),
raw: None,
}),
type_only: false,
with: None,
phase: ast::ImportPhase::Evaluation,
}),
));
}
match self.wrap_kind {
WrapKind::DenoTest => {
transformed_items.push(ast::ModuleItem::Stmt(wrap_in_deno_test(
stmts,
self.specifier.to_string().into(),
)));
}
WrapKind::NoWrap => {
transformed_items
.extend(stmts.into_iter().map(ast::ModuleItem::Stmt));
}
}
transformed_items
}
ast::Program::Script(script) => {
let mut transformed_items = vec![];
let import_specifiers = self
.exports_from_base
.to_import_specifiers(&self.atoms_to_be_excluded_from_import);
if !import_specifiers.is_empty() {
transformed_items.push(ast::ModuleItem::ModuleDecl(
ast::ModuleDecl::Import(ast::ImportDecl {
span: DUMMY_SP,
specifiers: import_specifiers,
src: Box::new(ast::Str {
span: DUMMY_SP,
value: self.base_file_specifier.to_string().into(),
raw: None,
}),
type_only: false,
with: None,
phase: ast::ImportPhase::Evaluation,
}),
));
}
match self.wrap_kind {
WrapKind::DenoTest => {
transformed_items.push(ast::ModuleItem::Stmt(wrap_in_deno_test(
script.body.clone(),
self.specifier.to_string().into(),
)));
}
WrapKind::NoWrap => {
transformed_items.extend(
script.body.clone().into_iter().map(ast::ModuleItem::Stmt),
);
}
}
transformed_items
}
};
*node = ast::Program::Module(ast::Module {
span: DUMMY_SP,
body: new_module_items,
shebang: None,
});
}
}
fn wrap_in_deno_test(stmts: Vec<ast::Stmt>, test_name: Atom) -> ast::Stmt {
ast::Stmt::Expr(ast::ExprStmt {
span: DUMMY_SP,
expr: Box::new(ast::Expr::Call(ast::CallExpr {
span: DUMMY_SP,
callee: ast::Callee::Expr(Box::new(ast::Expr::Member(ast::MemberExpr {
span: DUMMY_SP,
obj: Box::new(ast::Expr::Ident(ast::Ident {
span: DUMMY_SP,
sym: "Deno".into(),
optional: false,
..Default::default()
})),
prop: ast::MemberProp::Ident(ast::IdentName {
span: DUMMY_SP,
sym: "test".into(),
}),
}))),
args: vec![
ast::ExprOrSpread {
spread: None,
expr: Box::new(ast::Expr::Lit(ast::Lit::Str(ast::Str {
span: DUMMY_SP,
value: test_name.into(),
raw: None,
}))),
},
ast::ExprOrSpread {
spread: None,
expr: Box::new(ast::Expr::Arrow(ast::ArrowExpr {
span: DUMMY_SP,
params: vec![],
body: Box::new(ast::BlockStmtOrExpr::BlockStmt(ast::BlockStmt {
span: DUMMY_SP,
stmts,
..Default::default()
})),
is_async: true,
is_generator: false,
type_params: None,
return_type: None,
..Default::default()
})),
},
],
type_args: None,
..Default::default()
})),
})
}
#[cfg(test)]
mod tests {
use deno_ast::swc::atoms::Atom;
use pretty_assertions::assert_eq;
use super::*;
use crate::file_fetcher::TextDecodedFile;
#[test]
fn test_extract_doc_tests() {
struct Input {
source: &'static str,
specifier: &'static str,
}
struct Expected {
source: &'static str,
specifier: &'static str,
media_type: MediaType,
}
struct Test {
input: Input,
expected: Vec<Expected>,
}
let tests = [
Test {
input: Input {
source: r#""#,
specifier: "file:///main.ts",
},
expected: vec![],
},
Test {
input: Input {
source: r#"
/**
* ```ts
* import { assertEquals } from "@std/assert/equal";
*
* assertEquals(add(1, 2), 3);
* ```
*/
export function add(a: number, b: number): number {
return a + b;
}
"#,
specifier: "file:///main.ts",
},
expected: vec![Expected {
source: r#"import { assertEquals } from "@std/assert/equal";
import { add } from "file:///main.ts";
Deno.test("file:///main.ts$3-8.ts", async ()=>{
assertEquals(add(1, 2), 3);
});
"#,
specifier: "file:///main.ts$3-8.ts",
media_type: MediaType::TypeScript,
}],
},
Test {
input: Input {
source: r#"
/**
* ```ts
* foo();
* ```
*/
export function foo() {}
export default class Bar {}
"#,
specifier: "file:///main.ts",
},
expected: vec![Expected {
source: r#"import Bar, { foo } from "file:///main.ts";
Deno.test("file:///main.ts$3-6.ts", async ()=>{
foo();
});
"#,
specifier: "file:///main.ts$3-6.ts",
media_type: MediaType::TypeScript,
}],
},
Test {
input: Input {
source: r#"
/**
* ```ts
* const input = { a: 42 } satisfies Args;
* foo(input);
* ```
*/
export function foo(args: Args) {}
export type Args = { a: number };
"#,
specifier: "file:///main.ts",
},
expected: vec![Expected {
source: r#"import { Args, foo } from "file:///main.ts";
Deno.test("file:///main.ts$3-7.ts", async ()=>{
const input = {
a: 42
} satisfies Args;
foo(input);
});
"#,
specifier: "file:///main.ts$3-7.ts",
media_type: MediaType::TypeScript,
}],
},
Test {
input: Input {
source: r#"
/**
* This is a module-level doc.
*
* ```ts
* foo();
* ```
*
* @module doc
*/
"#,
specifier: "file:///main.ts",
},
expected: vec![Expected {
source: r#"Deno.test("file:///main.ts$5-8.ts", async ()=>{
foo();
});
"#,
specifier: "file:///main.ts$5-8.ts",
media_type: MediaType::TypeScript,
}],
},
Test {
input: Input {
source: r#"
/**
* This is a module-level doc.
*
* ```js
* const cls = new MyClass();
* ```
*
* @module doc
*/
/**
* ```ts
* foo();
* ```
*/
export function foo() {}
export default class MyClass {}
export * from "./other.ts";
"#,
specifier: "file:///main.ts",
},
expected: vec![
Expected {
source: r#"import MyClass, { foo } from "file:///main.ts";
Deno.test("file:///main.ts$5-8.js", async ()=>{
const cls = new MyClass();
});
"#,
specifier: "file:///main.ts$5-8.js",
media_type: MediaType::JavaScript,
},
Expected {
source: r#"import MyClass, { foo } from "file:///main.ts";
Deno.test("file:///main.ts$13-16.ts", async ()=>{
foo();
});
"#,
specifier: "file:///main.ts$13-16.ts",
media_type: MediaType::TypeScript,
},
],
},
Test {
input: Input {
source: r#"
/**
* ```ts
* foo();
* ```
*/
export function foo() {}
export const ONE = 1;
const TWO = 2;
export default TWO;
"#,
specifier: "file:///main.ts",
},
expected: vec![Expected {
source: r#"import TWO, { ONE, foo } from "file:///main.ts";
Deno.test("file:///main.ts$3-6.ts", async ()=>{
foo();
});
"#,
specifier: "file:///main.ts$3-6.ts",
media_type: MediaType::TypeScript,
}],
},
// Avoid duplicate imports
Test {
input: Input {
source: r#"
/**
* ```ts
* import { DUPLICATE1 } from "./other1.ts";
* import * as DUPLICATE2 from "./other2.js";
* import { foo as DUPLICATE3 } from "./other3.tsx";
*
* foo();
* ```
*/
export function foo() {}
export const DUPLICATE1 = "dup1";
const DUPLICATE2 = "dup2";
export default DUPLICATE2;
const DUPLICATE3 = "dup3";
export { DUPLICATE3 };
"#,
specifier: "file:///main.ts",
},
expected: vec![Expected {
source: r#"import { DUPLICATE1 } from "./other1.ts";
import * as DUPLICATE2 from "./other2.js";
import { foo as DUPLICATE3 } from "./other3.tsx";
import { foo } from "file:///main.ts";
Deno.test("file:///main.ts$3-10.ts", async ()=>{
foo();
});
"#,
specifier: "file:///main.ts$3-10.ts",
media_type: MediaType::TypeScript,
}],
},
// duplication of imported identifier and local identifier is fine
Test {
input: Input {
source: r#"
/**
* ```ts
* const foo = createFoo();
* foo();
* ```
*/
export function createFoo() {
return () => "created foo";
}
export const foo = () => "foo";
"#,
specifier: "file:///main.ts",
},
expected: vec![Expected {
source: r#"import { createFoo } from "file:///main.ts";
Deno.test("file:///main.ts$3-7.ts", async ()=>{
const foo = createFoo();
foo();
});
"#,
specifier: "file:///main.ts$3-7.ts",
media_type: MediaType::TypeScript,
}],
},
// https://github.com/denoland/deno/issues/25718
// A case where the example code has an exported item which references
// a variable from one upper scope.
// Naive application of `Deno.test` wrap would cause a reference error
// because the variable would go inside the `Deno.test` block while the
// exported item would be moved to the top level. To suppress the auto
// move of the exported item to the top level, the `export` keyword is
// removed so that the item stays in the same scope as the variable.
Test {
input: Input {
source: r#"
/**
* ```ts
* import { getLogger } from "@std/log";
*
* const logger = getLogger("my-awesome-module");
*
* export function foo() {
* logger.debug("hello");
* }
* ```
*
* @module
*/
"#,
specifier: "file:///main.ts",
},
expected: vec![Expected {
source: r#"import { getLogger } from "@std/log";
Deno.test("file:///main.ts$3-12.ts", async ()=>{
const logger = getLogger("my-awesome-module");
function foo() {
logger.debug("hello");
}
});
"#,
specifier: "file:///main.ts$3-12.ts",
media_type: MediaType::TypeScript,
}],
},
Test {
input: Input {
source: r#"
# Header
This is a *markdown*.
```js
import { assertEquals } from "@std/assert/equal";
import { add } from "jsr:@deno/non-existent";
assertEquals(add(1, 2), 3);
```
"#,
specifier: "file:///README.md",
},
expected: vec![Expected {
source: r#"import { assertEquals } from "@std/assert/equal";
import { add } from "jsr:@deno/non-existent";
Deno.test("file:///README.md$6-12.js", async ()=>{
assertEquals(add(1, 2), 3);
});
"#,
specifier: "file:///README.md$6-12.js",
media_type: MediaType::JavaScript,
}],
},
// https://github.com/denoland/deno/issues/26009
Test {
input: Input {
source: r#"
/**
* ```ts
* console.log(Foo)
* ```
*/
export class Foo {}
export default Foo
"#,
specifier: "file:///main.ts",
},
expected: vec![Expected {
source: r#"import { Foo } from "file:///main.ts";
Deno.test("file:///main.ts$3-6.ts", async ()=>{
console.log(Foo);
});
"#,
specifier: "file:///main.ts$3-6.ts",
media_type: MediaType::TypeScript,
}],
},
// https://github.com/denoland/deno/issues/26728
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/util/console.rs | cli/util/console.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::io;
use std::sync::Arc;
use console_static_text::ConsoleStaticText;
use crossterm::ExecutableCommand;
use crossterm::cursor;
use crossterm::event::KeyCode;
use crossterm::event::KeyEvent;
use crossterm::event::KeyEventKind;
use crossterm::event::KeyModifiers;
use crossterm::terminal;
use deno_core::parking_lot::Mutex;
use deno_runtime::ops::tty::ConsoleSize;
use super::draw_thread::DrawThread;
/// Gets the console size.
pub fn console_size() -> Option<ConsoleSize> {
let stderr = &deno_runtime::deno_io::STDERR_HANDLE;
deno_runtime::ops::tty::console_size(stderr).ok()
}
pub fn new_console_static_text() -> ConsoleStaticText {
ConsoleStaticText::new(move || {
let size = console_size();
let to_u16 = |value: u32| value.min(u16::MAX as u32) as u16;
console_static_text::ConsoleSize {
cols: size.map(|size| size.cols).map(to_u16),
rows: size.map(|size| size.rows).map(to_u16),
}
})
}
pub struct RawMode {
needs_disable: bool,
}
impl RawMode {
pub fn enable() -> io::Result<Self> {
terminal::enable_raw_mode()?;
Ok(Self {
needs_disable: true,
})
}
pub fn disable(mut self) -> io::Result<()> {
self.needs_disable = false;
terminal::disable_raw_mode()
}
}
impl Drop for RawMode {
fn drop(&mut self) {
if self.needs_disable {
let _ = terminal::disable_raw_mode();
}
}
}
pub struct HideCursorGuard {
needs_disable: bool,
}
impl HideCursorGuard {
pub fn hide() -> io::Result<Self> {
io::stderr().execute(cursor::Hide)?;
Ok(Self {
needs_disable: true,
})
}
pub fn show(mut self) -> io::Result<()> {
self.needs_disable = false;
io::stderr().execute(cursor::Show)?;
Ok(())
}
}
impl Drop for HideCursorGuard {
fn drop(&mut self) {
if self.needs_disable {
_ = io::stderr().execute(cursor::Show);
}
}
}
#[derive(Debug)]
pub struct ConfirmOptions {
pub message: String,
pub default: bool,
}
/// Prompts and confirms if a tty.
///
/// Returns `None` when a tty.
pub fn confirm(options: ConfirmOptions) -> Option<bool> {
#[derive(Debug)]
struct PromptRenderer {
options: ConfirmOptions,
selection: Arc<Mutex<String>>,
}
impl super::draw_thread::DrawThreadRenderer for PromptRenderer {
fn render(&self, _data: &ConsoleSize) -> String {
let is_yes_default = self.options.default;
let selection = self.selection.lock();
format!(
"{} [{}/{}] {}",
self.options.message,
if is_yes_default { "Y" } else { "y" },
if is_yes_default { "n" } else { "N" },
*selection,
)
}
}
if !DrawThread::is_supported() {
return None;
}
let _raw_mode = RawMode::enable().ok()?;
let _hide_cursor_guard = HideCursorGuard::hide().ok()?;
let selection = Arc::new(Mutex::new(String::new()));
let default = options.default;
// uses a renderer and the draw thread in order to allow
// displaying other stuff on the draw thread while the prompt
// is showing
let renderer = PromptRenderer {
options,
selection: selection.clone(),
};
let _state = DrawThread::add_entry(Arc::new(renderer));
let mut selected = default;
loop {
let event = crossterm::event::read().ok()?;
#[allow(clippy::single_match)]
match event {
crossterm::event::Event::Key(KeyEvent {
kind: KeyEventKind::Press,
code,
modifiers,
..
}) => match (code, modifiers) {
(KeyCode::Char('c'), KeyModifiers::CONTROL)
| (KeyCode::Char('q'), KeyModifiers::NONE) => break,
(KeyCode::Char('y'), KeyModifiers::NONE | KeyModifiers::SHIFT) => {
selected = true;
*selection.lock() = "Y".to_string();
}
(KeyCode::Char('n'), KeyModifiers::NONE | KeyModifiers::SHIFT) => {
selected = false;
*selection.lock() = "N".to_string();
}
(KeyCode::Backspace, _) => {
selected = default;
*selection.lock() = "".to_string();
}
// l is common for enter in vim keybindings
(KeyCode::Enter, _) | (KeyCode::Char('l'), KeyModifiers::NONE) => {
return Some(selected);
}
_ => {}
},
_ => {}
}
}
None
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/util/retry.rs | cli/util/retry.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::future::Future;
use std::time::Duration;
pub fn retry<
F: FnMut() -> Fut,
T,
E,
Fut: Future<Output = Result<T, E>>,
ShouldRetry: FnMut(&E) -> bool,
>(
mut f: F,
mut should_retry: ShouldRetry,
) -> impl Future<Output = Result<T, E>> {
const WAITS: [Duration; 3] = [
Duration::from_millis(100),
Duration::from_millis(250),
Duration::from_millis(500),
];
let mut waits = WAITS.into_iter();
async move {
let mut first_result = None;
loop {
let result = f().await;
match result {
Ok(r) => return Ok(r),
Err(e) if !should_retry(&e) => return Err(e),
_ => {}
}
if first_result.is_none() {
first_result = Some(result);
}
let Some(wait) = waits.next() else {
return first_result.unwrap();
};
tokio::time::sleep(wait).await;
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/util/v8/convert.rs | cli/util/v8/convert.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use deno_core::FromV8;
use deno_core::ToV8;
use deno_core::v8;
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
/// A wrapper type for `Option<T>` that (de)serializes `None` as `null`
#[repr(transparent)]
pub struct OptionNull<T>(pub Option<T>);
impl<T> From<Option<T>> for OptionNull<T> {
fn from(option: Option<T>) -> Self {
Self(option)
}
}
impl<T> From<OptionNull<T>> for Option<T> {
fn from(value: OptionNull<T>) -> Self {
value.0
}
}
impl<'a, T> ToV8<'a> for OptionNull<T>
where
T: ToV8<'a>,
{
type Error = T::Error;
fn to_v8(
self,
scope: &mut v8::PinScope<'a, '_>,
) -> Result<v8::Local<'a, v8::Value>, Self::Error> {
match self.0 {
Some(value) => value.to_v8(scope),
None => Ok(v8::null(scope).into()),
}
}
}
impl<'a, T> FromV8<'a> for OptionNull<T>
where
T: FromV8<'a>,
{
type Error = T::Error;
fn from_v8(
scope: &mut v8::PinScope<'a, '_>,
value: v8::Local<'a, v8::Value>,
) -> Result<Self, Self::Error> {
if value.is_null() {
Ok(OptionNull(None))
} else {
T::from_v8(scope, value).map(|v| OptionNull(Some(v)))
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/util/progress_bar/renderer.rs | cli/util/progress_bar/renderer.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::fmt::Write;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use std::time::Duration;
use deno_terminal::colors;
use super::ProgressMessagePrompt;
use crate::util::display::human_download_size;
#[derive(Clone)]
pub struct ProgressDataDisplayEntry {
pub prompt: ProgressMessagePrompt,
pub message: String,
pub position: u64,
pub total_size: u64,
}
#[derive(Clone)]
pub struct ProgressData {
pub terminal_width: u32,
pub display_entries: Vec<ProgressDataDisplayEntry>,
pub pending_entries: usize,
pub percent_done: f64,
pub total_entries: usize,
pub duration: Duration,
}
pub trait ProgressBarRenderer: Send + Sync + std::fmt::Debug {
fn render(&self, data: ProgressData) -> String;
}
/// Indicatif style progress bar.
#[derive(Debug)]
pub struct BarProgressBarRenderer {
pub display_human_download_size: bool,
}
impl ProgressBarRenderer for BarProgressBarRenderer {
fn render(&self, data: ProgressData) -> String {
// In `ProgressBarRenderer` we only care about first entry.
let Some(display_entry) = &data.display_entries.first() else {
return String::new();
};
let (bytes_text, bytes_text_max_width) = {
let total_size = display_entry.total_size;
let pos = display_entry.position;
if total_size == 0 {
(String::new(), 0)
} else {
let (pos_str, total_size_str) = if self.display_human_download_size {
(
human_download_size(pos, total_size),
human_download_size(total_size, total_size),
)
} else {
(pos.to_string(), total_size.to_string())
};
(
format!(" {}/{}", pos_str, total_size_str,),
2 + total_size_str.len() * 2,
)
}
};
let (total_text, total_text_max_width) = if data.total_entries <= 1 {
(String::new(), 0)
} else {
let total_entries_str = data.total_entries.to_string();
(
format!(
" ({}/{})",
data.total_entries - data.pending_entries,
data.total_entries
),
4 + total_entries_str.len() * 2,
)
};
let elapsed_text = get_elapsed_text(data.duration);
let mut text = String::new();
if !display_entry.message.is_empty() {
writeln!(
&mut text,
"{} {}{}",
colors::green("Download"),
display_entry.message,
bytes_text,
)
.unwrap();
}
text.push_str(&elapsed_text);
let max_width = (data.terminal_width as i32 - 5).clamp(10, 75) as usize;
let same_line_text_width =
elapsed_text.len() + total_text_max_width + bytes_text_max_width + 3; // space, open and close brace
let total_bars = if same_line_text_width > max_width {
1
} else {
max_width - same_line_text_width
};
let completed_bars =
(total_bars as f64 * data.percent_done).floor() as usize;
text.push_str(" [");
if completed_bars != total_bars {
if completed_bars > 0 {
text.push_str(&format!(
"{}",
colors::cyan(format!("{}{}", "#".repeat(completed_bars - 1), ">"))
))
}
text.push_str(&format!(
"{}",
colors::intense_blue("-".repeat(total_bars - completed_bars))
))
} else {
text.push_str(&format!("{}", colors::cyan("#".repeat(completed_bars))))
}
text.push(']');
// suffix
if display_entry.message.is_empty() {
text.push_str(&colors::gray(bytes_text).to_string());
}
text.push_str(&colors::gray(total_text).to_string());
text
}
}
#[derive(Debug)]
pub struct TextOnlyProgressBarRenderer {
last_tick: AtomicUsize,
start_time: std::time::Instant,
}
impl Default for TextOnlyProgressBarRenderer {
fn default() -> Self {
Self {
last_tick: Default::default(),
start_time: std::time::Instant::now(),
}
}
}
const SPINNER_CHARS: [&str; 13] = [
"▰▱▱▱▱▱",
"▰▰▱▱▱▱",
"▰▰▰▱▱▱",
"▰▰▰▰▱▱",
"▰▰▰▰▰▱",
"▰▰▰▰▰▰",
"▰▰▰▰▰▰",
"▱▰▰▰▰▰",
"▱▱▰▰▰▰",
"▱▱▱▰▰▰",
"▱▱▱▱▰▰",
"▱▱▱▱▱▰",
"▱▱▱▱▱▱",
];
impl ProgressBarRenderer for TextOnlyProgressBarRenderer {
fn render(&self, data: ProgressData) -> String {
let last_tick = {
let last_tick = self.last_tick.load(Ordering::Relaxed);
let last_tick = (last_tick + 1) % SPINNER_CHARS.len();
self.last_tick.store(last_tick, Ordering::Relaxed);
last_tick
};
let current_time = std::time::Instant::now();
let non_empty_entry = data
.display_entries
.iter()
.find(|d| !d.message.is_empty() || d.total_size != 0);
let prompt = match non_empty_entry {
Some(entry) => entry.prompt,
None => data.display_entries[0].prompt,
};
let mut display_str =
format!("{} {} ", prompt.as_text(), SPINNER_CHARS[last_tick]);
let elapsed_time = current_time - self.start_time;
let fmt_elapsed_time = get_elapsed_text(elapsed_time);
let total_text = if data.total_entries <= 1 {
String::new()
} else {
format!(
" {}/{}",
data.total_entries - data.pending_entries,
data.total_entries
)
};
display_str.push_str(&format!("{}{}\n", fmt_elapsed_time, total_text));
if let Some(display_entry) = non_empty_entry {
let bytes_text = {
let total_size = display_entry.total_size;
let pos = display_entry.position;
if total_size == 0 {
String::new()
} else {
format!(
" {}/{}",
human_download_size(pos, total_size),
human_download_size(total_size, total_size)
)
}
};
// TODO(@marvinhagemeister): We're trying to reconstruct the original
// specifier from the resolved one, but we lack the information about
// private registries URLs and other things here.
let message = display_entry
.message
.replace("https://registry.npmjs.org/", "npm:")
.replace("https://jsr.io/", "jsr:")
.replace("%2f", "/")
.replace("%2F", "/");
display_str.push_str(
&colors::gray(format!(" {}{}\n", message, bytes_text)).to_string(),
);
} else {
// prevent cursor from going up
display_str.push('\n');
}
display_str
}
}
fn get_elapsed_text(elapsed: Duration) -> String {
let elapsed_secs = elapsed.as_secs();
let seconds = elapsed_secs % 60;
let minutes = elapsed_secs / 60;
format!("[{minutes:0>2}:{seconds:0>2}]")
}
#[cfg(test)]
mod test {
use std::time::Duration;
use pretty_assertions::assert_eq;
use test_util::assert_contains;
use super::*;
#[test]
fn should_get_elapsed_text() {
assert_eq!(get_elapsed_text(Duration::from_secs(1)), "[00:01]");
assert_eq!(get_elapsed_text(Duration::from_secs(20)), "[00:20]");
assert_eq!(get_elapsed_text(Duration::from_secs(59)), "[00:59]");
assert_eq!(get_elapsed_text(Duration::from_secs(60)), "[01:00]");
assert_eq!(
get_elapsed_text(Duration::from_secs(60 * 5 + 23)),
"[05:23]"
);
assert_eq!(
get_elapsed_text(Duration::from_secs(60 * 59 + 59)),
"[59:59]"
);
assert_eq!(get_elapsed_text(Duration::from_secs(60 * 60)), "[60:00]");
assert_eq!(
get_elapsed_text(Duration::from_secs(60 * 60 * 3 + 20 * 60 + 2)),
"[200:02]"
);
assert_eq!(
get_elapsed_text(Duration::from_secs(60 * 60 * 99)),
"[5940:00]"
);
}
const BYTES_TO_KIB: u64 = 2u64.pow(10);
#[test]
fn should_render_bar_progress() {
let renderer = BarProgressBarRenderer {
display_human_download_size: true,
};
let mut data = ProgressData {
display_entries: vec![ProgressDataDisplayEntry {
prompt: ProgressMessagePrompt::Download,
message: "data".to_string(),
position: 0,
total_size: 10 * BYTES_TO_KIB,
}],
duration: Duration::from_secs(1),
pending_entries: 1,
total_entries: 1,
percent_done: 0f64,
terminal_width: 50,
};
let text = renderer.render(data.clone());
let text = test_util::strip_ansi_codes(&text);
assert_eq!(
text,
concat!(
"Download data 0.00KiB/10.00KiB\n",
"[00:01] [-----------------]",
),
);
data.percent_done = 0.5f64;
data.display_entries[0].position = 5 * BYTES_TO_KIB;
data.display_entries[0].message = "".to_string();
data.total_entries = 3;
let text = renderer.render(data.clone());
let text = test_util::strip_ansi_codes(&text);
assert_eq!(text, "[00:01] [####>------] 5.00KiB/10.00KiB (2/3)",);
// just ensure this doesn't panic
data.terminal_width = 0;
let text = renderer.render(data.clone());
let text = test_util::strip_ansi_codes(&text);
assert_eq!(text, "[00:01] [-] 5.00KiB/10.00KiB (2/3)",);
data.terminal_width = 50;
data.pending_entries = 0;
data.display_entries[0].position = 10 * BYTES_TO_KIB;
data.percent_done = 1.0f64;
let text = renderer.render(data.clone());
let text = test_util::strip_ansi_codes(&text);
assert_eq!(text, "[00:01] [###########] 10.00KiB/10.00KiB (3/3)",);
data.display_entries[0].position = 0;
data.display_entries[0].total_size = 0;
data.pending_entries = 0;
data.total_entries = 1;
let text = renderer.render(data);
let text = test_util::strip_ansi_codes(&text);
assert_eq!(text, "[00:01] [###################################]",);
}
#[test]
fn should_render_text_only_progress() {
let renderer = TextOnlyProgressBarRenderer::default();
let mut data = ProgressData {
display_entries: vec![ProgressDataDisplayEntry {
prompt: ProgressMessagePrompt::Blocking,
message: "data".to_string(),
position: 0,
total_size: 10 * BYTES_TO_KIB,
}],
duration: Duration::from_secs(1),
pending_entries: 1,
total_entries: 3,
percent_done: 0f64,
terminal_width: 50,
};
let text = renderer.render(data.clone());
let text = test_util::strip_ansi_codes(&text);
assert_contains!(text, "Blocking ▰▰▱▱▱▱");
assert_contains!(text, "2/3\n data 0.00KiB/10.00KiB\n");
data.pending_entries = 0;
data.total_entries = 1;
data.display_entries[0].position = 0;
data.display_entries[0].total_size = 0;
let text = renderer.render(data);
let text = test_util::strip_ansi_codes(&text);
assert_contains!(text, "Blocking ▰▰▰▱▱▱");
assert_contains!(text, "\n data\n");
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/util/progress_bar/mod.rs | cli/util/progress_bar/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::sync::Arc;
use std::sync::atomic::AtomicU64;
use std::sync::atomic::Ordering;
use std::time::Instant;
use deno_core::parking_lot::Mutex;
use deno_runtime::ops::tty::ConsoleSize;
use self::renderer::ProgressBarRenderer;
use self::renderer::ProgressData;
use self::renderer::ProgressDataDisplayEntry;
use super::draw_thread::DrawThread;
use super::draw_thread::DrawThreadGuard;
use super::draw_thread::DrawThreadRenderer;
use crate::colors;
mod renderer;
// Inspired by Indicatif, but this custom implementation allows
// for more control over what's going on under the hood.
#[derive(Debug, Clone, Copy)]
pub enum ProgressMessagePrompt {
Download,
Blocking,
Initialize,
Cleaning,
}
impl ProgressMessagePrompt {
pub fn as_text(&self) -> String {
match self {
ProgressMessagePrompt::Download => colors::green("Download").to_string(),
ProgressMessagePrompt::Blocking => colors::cyan("Blocking").to_string(),
ProgressMessagePrompt::Initialize => {
colors::green("Initialize").to_string()
}
ProgressMessagePrompt::Cleaning => colors::green("Cleaning").to_string(),
}
}
}
#[derive(Debug)]
pub struct UpdateGuard {
maybe_entry: Option<Arc<ProgressBarEntry>>,
}
impl Drop for UpdateGuard {
fn drop(&mut self) {
if let Some(entry) = &self.maybe_entry {
entry.finish();
}
}
}
impl UpdateGuard {
pub fn set_position(&self, value: u64) {
if let Some(entry) = &self.maybe_entry {
entry.set_position(value);
}
}
pub fn set_total_size(&self, value: u64) {
if let Some(entry) = &self.maybe_entry {
entry.set_total_size(value);
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ProgressBarStyle {
/// Shows a progress bar with human readable download size
DownloadBars,
/// Shows a progress bar with numeric progress count
ProgressBars,
/// Shows a list of currently downloaded files.
TextOnly,
}
#[derive(Debug)]
struct ProgressBarEntry {
id: usize,
prompt: ProgressMessagePrompt,
pub message: String,
pos: AtomicU64,
total_size: AtomicU64,
progress_bar: ProgressBarInner,
}
impl ProgressBarEntry {
pub fn position(&self) -> u64 {
self.pos.load(Ordering::Relaxed)
}
pub fn set_position(&self, new_pos: u64) {
self.pos.store(new_pos, Ordering::Relaxed);
}
pub fn total_size(&self) -> u64 {
self.total_size.load(Ordering::Relaxed)
}
pub fn set_total_size(&self, new_size: u64) {
self.total_size.store(new_size, Ordering::Relaxed);
}
pub fn finish(&self) {
self.progress_bar.finish_entry(self.id);
}
pub fn percent(&self) -> f64 {
let pos = self.pos.load(Ordering::Relaxed) as f64;
let total_size = self.total_size.load(Ordering::Relaxed) as f64;
if total_size == 0f64 {
0f64
} else if pos > total_size {
1f64
} else {
pos / total_size
}
}
}
#[derive(Debug)]
struct InternalState {
/// If this guard exists, then it means the progress
/// bar is displaying in the draw thread.
draw_thread_guard: Option<DrawThreadGuard>,
start_time: Instant,
keep_alive_count: usize,
total_entries: usize,
entries: Vec<Arc<ProgressBarEntry>>,
is_deferring_display: bool,
}
#[derive(Clone, Debug)]
struct ProgressBarInner {
state: Arc<Mutex<InternalState>>,
renderer: Arc<dyn ProgressBarRenderer>,
}
impl ProgressBarInner {
fn new(renderer: Arc<dyn ProgressBarRenderer>) -> Self {
Self {
state: Arc::new(Mutex::new(InternalState {
draw_thread_guard: None,
start_time: Instant::now(),
keep_alive_count: 0,
total_entries: 0,
entries: Vec::new(),
is_deferring_display: false,
})),
renderer,
}
}
/// A deferred entry will only be shown once another entry
/// is added to the progress bar.
pub fn add_deferred_entry(
&self,
kind: ProgressMessagePrompt,
message: String,
) -> Arc<ProgressBarEntry> {
let mut internal_state = self.state.lock();
if internal_state.entries.is_empty() {
internal_state.is_deferring_display = true;
}
self.add_entry_internal(&mut internal_state, kind, message)
}
pub fn add_entry(
&self,
kind: ProgressMessagePrompt,
message: String,
) -> Arc<ProgressBarEntry> {
let mut internal_state = self.state.lock();
internal_state.is_deferring_display = false;
self.add_entry_internal(&mut internal_state, kind, message)
}
fn add_entry_internal(
&self,
internal_state: &mut InternalState,
kind: ProgressMessagePrompt,
message: String,
) -> Arc<ProgressBarEntry> {
let id = internal_state.total_entries;
let entry = Arc::new(ProgressBarEntry {
id,
prompt: kind,
message,
pos: Default::default(),
total_size: Default::default(),
progress_bar: self.clone(),
});
internal_state.entries.push(entry.clone());
internal_state.total_entries += 1;
internal_state.keep_alive_count += 1;
self.maybe_start_draw_thread(internal_state);
entry
}
fn finish_entry(&self, entry_id: usize) {
let mut internal_state = self.state.lock();
if let Ok(index) = internal_state
.entries
.binary_search_by(|e| e.id.cmp(&entry_id))
{
internal_state.entries.remove(index);
if internal_state.entries.is_empty() {
internal_state.is_deferring_display = false;
}
self.decrement_keep_alive(&mut internal_state);
}
}
fn decrement_keep_alive(&self, state: &mut InternalState) {
state.keep_alive_count -= 1;
if state.keep_alive_count == 0 {
// drop the guard to remove this from the draw thread
state.draw_thread_guard.take();
}
}
fn maybe_start_draw_thread(&self, internal_state: &mut InternalState) {
if internal_state.draw_thread_guard.is_none()
&& internal_state.keep_alive_count > 0
{
internal_state.start_time = Instant::now();
internal_state.draw_thread_guard =
Some(DrawThread::add_entry(Arc::new(self.clone())));
}
}
}
impl DrawThreadRenderer for ProgressBarInner {
fn render(&self, size: &ConsoleSize) -> String {
let data = {
let state = self.state.lock();
if state.entries.is_empty() || state.is_deferring_display {
return String::new();
}
let display_entries = state
.entries
.iter()
.map(|e| ProgressDataDisplayEntry {
prompt: e.prompt,
message: e.message.to_string(),
position: e.position(),
total_size: e.total_size(),
})
.collect::<Vec<_>>();
ProgressData {
duration: state.start_time.elapsed(),
terminal_width: size.cols,
pending_entries: state.entries.len(),
total_entries: state.total_entries,
display_entries,
percent_done: {
let mut total_percent_sum = 0f64;
for entry in &state.entries {
total_percent_sum += entry.percent();
}
total_percent_sum +=
(state.total_entries - state.entries.len()) as f64;
total_percent_sum / (state.total_entries as f64)
},
}
};
self.renderer.render(data)
}
}
#[derive(Clone, Debug)]
pub struct ProgressBar {
inner: ProgressBarInner,
}
impl deno_npm_installer::Reporter for ProgressBar {
type Guard = UpdateGuard;
type ClearGuard = UpdateGuard;
fn on_blocking(&self, message: &str) -> Self::Guard {
self.update_with_prompt(ProgressMessagePrompt::Blocking, message)
}
fn on_initializing(&self, message: &str) -> Self::Guard {
self.update_with_prompt(ProgressMessagePrompt::Initialize, message)
}
fn clear_guard(&self) -> Self::ClearGuard {
self.deferred_keep_initialize_alive()
}
}
impl ProgressBar {
/// Checks if progress bars are supported
pub fn are_supported() -> bool {
DrawThread::is_supported()
}
pub fn new(style: ProgressBarStyle) -> Self {
Self {
inner: ProgressBarInner::new(match style {
ProgressBarStyle::DownloadBars => {
Arc::new(renderer::BarProgressBarRenderer {
display_human_download_size: true,
})
}
ProgressBarStyle::ProgressBars => {
Arc::new(renderer::BarProgressBarRenderer {
display_human_download_size: false,
})
}
ProgressBarStyle::TextOnly => {
Arc::new(renderer::TextOnlyProgressBarRenderer::default())
}
}),
}
}
pub fn update(&self, msg: &str) -> UpdateGuard {
self.update_with_prompt(ProgressMessagePrompt::Download, msg)
}
pub fn update_with_prompt(
&self,
kind: ProgressMessagePrompt,
msg: &str,
) -> UpdateGuard {
// only check if progress bars are supported once we go
// to update so that we lazily initialize the progress bar
if ProgressBar::are_supported() {
let entry = self.inner.add_entry(kind, msg.to_string());
UpdateGuard {
maybe_entry: Some(entry),
}
} else {
// if we're not running in TTY, fallback to using logger crate
if !msg.is_empty() {
log::log!(log::Level::Info, "{} {}", kind.as_text(), msg);
}
UpdateGuard { maybe_entry: None }
}
}
pub fn deferred_keep_initialize_alive(&self) -> UpdateGuard {
self.deferred_update_with_prompt(ProgressMessagePrompt::Initialize, "")
}
/// Add an entry to the progress bar that will only be shown
/// once another entry has been added.
pub fn deferred_update_with_prompt(
&self,
kind: ProgressMessagePrompt,
msg: &str,
) -> UpdateGuard {
// only check if progress bars are supported once we go
// to update so that we lazily initialize the progress bar
if ProgressBar::are_supported() {
let entry = self.inner.add_deferred_entry(kind, msg.to_string());
UpdateGuard {
maybe_entry: Some(entry),
}
} else {
// do not display anything for a deferred update
UpdateGuard { maybe_entry: None }
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/util/sync/mod.rs | cli/util/sync/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
mod async_flag;
pub use async_flag::AsyncFlag;
pub use deno_core::unsync::sync::AtomicFlag;
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/util/sync/async_flag.rs | cli/util/sync/async_flag.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::sync::Arc;
use tokio::sync::Semaphore;
#[derive(Debug, Clone)]
pub struct AsyncFlag(Arc<Semaphore>);
impl Default for AsyncFlag {
fn default() -> Self {
Self(Arc::new(Semaphore::new(0)))
}
}
impl AsyncFlag {
pub fn raise(&self) {
self.0.add_permits(1);
}
pub async fn wait_raised(&self) {
drop(self.0.acquire().await);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/config.rs | cli/lsp/config.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap;
use std::ops::Deref;
use std::ops::DerefMut;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::MediaType;
use deno_config::deno_json::DenoJsonCache;
use deno_config::deno_json::FmtConfig;
use deno_config::deno_json::FmtOptionsConfig;
use deno_config::deno_json::NodeModulesDirMode;
use deno_config::deno_json::TestConfig;
use deno_config::glob::FilePatterns;
use deno_config::glob::PathOrPatternSet;
use deno_config::workspace::VendorEnablement;
use deno_config::workspace::Workspace;
use deno_config::workspace::WorkspaceCache;
use deno_config::workspace::WorkspaceDirectory;
use deno_config::workspace::WorkspaceDirectoryEmptyOptions;
use deno_config::workspace::WorkspaceDiscoverOptions;
use deno_core::ModuleSpecifier;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::serde::Deserialize;
use deno_core::serde::Serialize;
use deno_core::serde::de::DeserializeOwned;
use deno_core::serde_json;
use deno_core::serde_json::Value;
use deno_core::url::Url;
use deno_lib::args::has_flag_env_var;
use deno_lib::util::hash::FastInsecureHasher;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm_cache::NpmCacheSetting;
use deno_npm_installer::LifecycleScriptsConfig;
use deno_npm_installer::NpmInstallerFactory;
use deno_npm_installer::NpmInstallerFactoryOptions;
use deno_npm_installer::graph::NpmCachingStrategy;
use deno_npm_installer::lifecycle_scripts::NullLifecycleScriptsExecutor;
use deno_package_json::PackageJsonCache;
use deno_package_json::PackageJsonCacheResult;
use deno_path_util::url_to_file_path;
use deno_resolver::factory::ConfigDiscoveryOption;
use deno_resolver::factory::ResolverFactory;
use deno_resolver::factory::ResolverFactoryOptions;
use deno_resolver::factory::WorkspaceFactory;
use deno_resolver::factory::WorkspaceFactoryOptions;
use deno_resolver::workspace::SpecifiedImportMap;
use deno_runtime::deno_node::PackageJson;
use indexmap::IndexSet;
use lsp_types::ClientCapabilities;
use lsp_types::Uri;
use node_resolver::NodeResolverOptions;
use tower_lsp::lsp_types as lsp;
use super::logging::lsp_log;
use super::lsp_custom;
use super::urls::uri_to_url;
use super::urls::url_to_uri;
use crate::args::CliLockfile;
use crate::args::ConfigFile;
use crate::cache::DenoDir;
use crate::file_fetcher::CliFileFetcher;
use crate::http_util::HttpClientProvider;
use crate::lsp::logging::lsp_warn;
use crate::npm::CliNpmCacheHttpClient;
use crate::sys::CliSys;
use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
pub const SETTINGS_SECTION: &str = "deno";
fn is_true() -> bool {
true
}
/// Wrapper that defaults if it fails to deserialize. Good for individual
/// settings.
#[derive(Debug, Default, Clone, Eq, PartialEq)]
pub struct SafeValue<T> {
inner: T,
}
impl<'de, T: Default + for<'de2> Deserialize<'de2>> Deserialize<'de>
for SafeValue<T>
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
Ok(Self {
inner: Deserialize::deserialize(deserializer).unwrap_or_default(),
})
}
}
impl<T: Serialize> Serialize for SafeValue<T> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
self.inner.serialize(serializer)
}
}
impl<T> Deref for SafeValue<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<T> DerefMut for SafeValue<T> {
fn deref_mut(&mut self) -> &mut T {
&mut self.inner
}
}
impl<T> SafeValue<T> {
pub fn as_deref(&self) -> &T {
&self.inner
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct CodeLensSettings {
/// Flag for providing implementation code lenses.
#[serde(default)]
pub implementations: bool,
/// Flag for providing reference code lenses.
#[serde(default)]
pub references: bool,
/// Flag for providing reference code lens on all functions. For this to have
/// an impact, the `references` flag needs to be `true`.
#[serde(default)]
pub references_all_functions: bool,
/// Flag for providing test code lens on `Deno.test` statements. There is
/// also the `test_args` setting, but this is not used by the server.
#[serde(default = "is_true")]
pub test: bool,
}
impl Default for CodeLensSettings {
fn default() -> Self {
Self {
implementations: false,
references: false,
references_all_functions: false,
test: true,
}
}
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct DenoCompletionSettings {
#[serde(default)]
pub imports: ImportCompletionSettings,
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct ClassMemberSnippets {
#[serde(default = "is_true")]
pub enabled: bool,
}
impl Default for ClassMemberSnippets {
fn default() -> Self {
Self { enabled: true }
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct ObjectLiteralMethodSnippets {
#[serde(default = "is_true")]
pub enabled: bool,
}
impl Default for ObjectLiteralMethodSnippets {
fn default() -> Self {
Self { enabled: true }
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct CompletionSettings {
#[serde(default)]
pub complete_function_calls: bool,
#[serde(default = "is_true")]
pub include_automatic_optional_chain_completions: bool,
#[serde(default = "is_true")]
pub include_completions_for_import_statements: bool,
#[serde(default = "is_true")]
pub names: bool,
#[serde(default = "is_true")]
pub paths: bool,
#[serde(default = "is_true")]
pub auto_imports: bool,
#[serde(default = "is_true")]
pub enabled: bool,
#[serde(default)]
pub class_member_snippets: ClassMemberSnippets,
#[serde(default)]
pub object_literal_method_snippets: ObjectLiteralMethodSnippets,
}
impl Default for CompletionSettings {
fn default() -> Self {
Self {
complete_function_calls: false,
include_automatic_optional_chain_completions: true,
include_completions_for_import_statements: true,
names: true,
paths: true,
auto_imports: true,
enabled: true,
class_member_snippets: Default::default(),
object_literal_method_snippets: Default::default(),
}
}
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct InlayHintsSettings {
#[serde(default)]
pub parameter_names: InlayHintsParamNamesOptions,
#[serde(default)]
pub parameter_types: InlayHintsParamTypesOptions,
#[serde(default)]
pub variable_types: InlayHintsVarTypesOptions,
#[serde(default)]
pub property_declaration_types: InlayHintsPropDeclTypesOptions,
#[serde(default)]
pub function_like_return_types: InlayHintsFuncLikeReturnTypesOptions,
#[serde(default)]
pub enum_member_values: InlayHintsEnumMemberValuesOptions,
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct InlayHintsParamNamesOptions {
#[serde(default)]
pub enabled: InlayHintsParamNamesEnabled,
#[serde(default = "is_true")]
pub suppress_when_argument_matches_name: bool,
}
impl Default for InlayHintsParamNamesOptions {
fn default() -> Self {
Self {
enabled: InlayHintsParamNamesEnabled::None,
suppress_when_argument_matches_name: true,
}
}
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub enum InlayHintsParamNamesEnabled {
#[default]
None,
Literals,
All,
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct InlayHintsParamTypesOptions {
#[serde(default)]
pub enabled: bool,
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct InlayHintsVarTypesOptions {
#[serde(default)]
pub enabled: bool,
#[serde(default = "is_true")]
pub suppress_when_type_matches_name: bool,
}
impl Default for InlayHintsVarTypesOptions {
fn default() -> Self {
Self {
enabled: false,
suppress_when_type_matches_name: true,
}
}
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct InlayHintsPropDeclTypesOptions {
#[serde(default)]
pub enabled: bool,
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct InlayHintsFuncLikeReturnTypesOptions {
#[serde(default)]
pub enabled: bool,
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct InlayHintsEnumMemberValuesOptions {
#[serde(default)]
pub enabled: bool,
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct ImportCompletionSettings {
/// A flag that indicates if non-explicitly set origins should be checked for
/// supporting import suggestions.
#[serde(default = "is_true")]
pub auto_discover: bool,
/// A map of origins which have had explicitly set if import suggestions are
/// enabled.
#[serde(default)]
pub hosts: HashMap<String, bool>,
}
impl Default for ImportCompletionSettings {
fn default() -> Self {
Self {
auto_discover: true,
hosts: HashMap::default(),
}
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct TestingSettings {
/// A vector of arguments which should be used when running the tests for
/// a workspace.
#[serde(default)]
pub args: Vec<String>,
}
impl Default for TestingSettings {
fn default() -> Self {
Self {
args: vec!["--allow-all".to_string(), "--no-check".to_string()],
}
}
}
fn default_to_true() -> bool {
true
}
fn default_document_preload_limit() -> usize {
1000
}
fn empty_string_none<'de, D: serde::Deserializer<'de>>(
d: D,
) -> Result<Option<String>, D::Error> {
let o: Option<String> = Option::deserialize(d)?;
Ok(o.filter(|s| !s.is_empty()))
}
#[derive(
Debug, Default, Clone, Copy, Deserialize, Serialize, PartialEq, Eq,
)]
#[serde(rename_all = "kebab-case")]
pub enum ImportModuleSpecifier {
NonRelative,
ProjectRelative,
Relative,
#[default]
Shortest,
}
#[derive(
Debug, Default, Clone, Copy, Deserialize, Serialize, PartialEq, Eq,
)]
#[serde(rename_all = "kebab-case")]
pub enum JsxAttributeCompletionStyle {
#[default]
Auto,
Braces,
None,
}
#[derive(
Debug, Default, Clone, Copy, Deserialize, Serialize, PartialEq, Eq,
)]
#[serde(rename_all = "kebab-case")]
pub enum QuoteStyle {
#[default]
Auto,
Double,
Single,
}
impl From<&FmtOptionsConfig> for QuoteStyle {
fn from(config: &FmtOptionsConfig) -> Self {
match config.single_quote {
Some(true) => QuoteStyle::Single,
_ => QuoteStyle::Double,
}
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct LanguagePreferences {
#[serde(default)]
pub import_module_specifier: ImportModuleSpecifier,
#[serde(default)]
pub jsx_attribute_completion_style: JsxAttributeCompletionStyle,
#[serde(default)]
pub auto_import_file_exclude_patterns: Vec<String>,
#[serde(default = "is_true")]
pub use_aliases_for_renames: bool,
#[serde(default)]
pub quote_style: QuoteStyle,
#[serde(default)]
pub prefer_type_only_auto_imports: bool,
}
impl Default for LanguagePreferences {
fn default() -> Self {
LanguagePreferences {
import_module_specifier: Default::default(),
jsx_attribute_completion_style: Default::default(),
auto_import_file_exclude_patterns: vec![],
use_aliases_for_renames: true,
quote_style: Default::default(),
prefer_type_only_auto_imports: false,
}
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct SuggestionActionsSettings {
#[serde(default = "is_true")]
pub enabled: bool,
}
impl Default for SuggestionActionsSettings {
fn default() -> Self {
SuggestionActionsSettings { enabled: true }
}
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct UpdateImportsOnFileMoveOptions {
#[serde(default)]
pub enabled: UpdateImportsOnFileMoveEnabled,
}
#[derive(
Debug, Default, Clone, Copy, Deserialize, Serialize, PartialEq, Eq,
)]
#[serde(rename_all = "kebab-case")]
pub enum UpdateImportsOnFileMoveEnabled {
Always,
#[default]
Prompt,
Never,
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct LanguageWorkspaceSettings {
#[serde(default)]
pub inlay_hints: InlayHintsSettings,
#[serde(default)]
pub preferences: LanguagePreferences,
#[serde(default)]
pub suggest: CompletionSettings,
#[serde(default)]
pub suggestion_actions: SuggestionActionsSettings,
#[serde(default)]
pub update_imports_on_file_move: UpdateImportsOnFileMoveOptions,
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
#[serde(untagged)]
pub enum InspectSetting {
Bool(bool),
String(String),
}
impl Default for InspectSetting {
fn default() -> Self {
InspectSetting::Bool(false)
}
}
impl InspectSetting {
pub fn to_address(&self) -> Option<String> {
match self {
InspectSetting::Bool(false) => None,
InspectSetting::Bool(true) => Some("127.0.0.1:9222".to_string()),
InspectSetting::String(s) => Some(s.clone()),
}
}
}
/// Deno language server specific settings that are applied to a workspace.
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct WorkspaceSettings {
/// A flag that indicates if Deno is enabled for the workspace.
pub enable: Option<bool>,
/// A list of paths, using the root_uri as a base that should be Deno
/// disabled.
#[serde(default)]
pub disable_paths: Vec<String>,
/// A list of paths, using the root_uri as a base that should be Deno enabled.
pub enable_paths: Option<Vec<String>>,
/// An option that points to a path string of the path to utilise as the
/// cache/DENO_DIR for the language server.
#[serde(default, deserialize_with = "empty_string_none")]
pub cache: Option<String>,
/// Cache local modules and their dependencies on `textDocument/didSave`
/// notifications corresponding to them.
#[serde(default = "default_to_true")]
pub cache_on_save: bool,
/// Override the default stores used to validate certificates. This overrides
/// the environment variable `DENO_TLS_CA_STORE` if present.
pub certificate_stores: Option<Vec<String>>,
/// An option that points to a path string of the config file to apply to
/// code within the workspace.
#[serde(default, deserialize_with = "empty_string_none")]
pub config: Option<String>,
/// An option that points to a path string of the import map to apply to the
/// code within the workspace.
#[serde(default, deserialize_with = "empty_string_none")]
pub import_map: Option<String>,
/// Code lens specific settings for the workspace.
#[serde(default)]
pub code_lens: CodeLensSettings,
/// A flag that indicates if internal debug logging should be made available.
#[serde(default)]
pub internal_debug: bool,
#[serde(default)]
pub internal_inspect: InspectSetting,
/// Write logs to a file in a project-local directory.
#[serde(default)]
pub log_file: bool,
/// A flag that indicates if linting is enabled for the workspace.
#[serde(default = "default_to_true")]
pub lint: bool,
/// Limits the number of files that can be preloaded by the language server.
#[serde(default = "default_document_preload_limit")]
pub document_preload_limit: usize,
#[serde(default)]
pub suggest: DenoCompletionSettings,
/// Testing settings for the workspace.
#[serde(default)]
pub testing: TestingSettings,
/// An option which sets the cert file to use when attempting to fetch remote
/// resources. This overrides `DENO_CERT` if present.
#[serde(default, deserialize_with = "empty_string_none")]
pub tls_certificate: Option<String>,
/// An option, if set, will unsafely ignore certificate errors when fetching
/// remote resources.
#[serde(default)]
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
#[serde(default)]
pub unstable: SafeValue<Vec<String>>,
#[serde(default)]
pub javascript: LanguageWorkspaceSettings,
#[serde(default)]
pub typescript: LanguageWorkspaceSettings,
#[serde(default)]
pub tracing: Option<super::trace::TracingConfigOrEnabled>,
}
impl Default for WorkspaceSettings {
fn default() -> Self {
WorkspaceSettings {
enable: None,
disable_paths: vec![],
enable_paths: None,
cache: None,
cache_on_save: true,
certificate_stores: None,
config: None,
import_map: None,
code_lens: Default::default(),
internal_debug: false,
internal_inspect: Default::default(),
log_file: false,
lint: true,
document_preload_limit: default_document_preload_limit(),
suggest: Default::default(),
testing: Default::default(),
tls_certificate: None,
unsafely_ignore_certificate_errors: None,
unstable: Default::default(),
javascript: Default::default(),
typescript: Default::default(),
tracing: Default::default(),
}
}
}
impl WorkspaceSettings {
pub fn from_raw_settings(
deno: Value,
javascript: Value,
typescript: Value,
) -> Self {
fn parse_or_default<T: Default + DeserializeOwned>(
value: Value,
description: &str,
) -> T {
if value.is_null() {
return T::default();
}
match serde_json::from_value(value) {
Ok(v) => v,
Err(err) => {
lsp_warn!("Couldn't parse {description}: {err}");
T::default()
}
}
}
let deno_inlay_hints =
deno.as_object().and_then(|o| o.get("inlayHints").cloned());
let deno_suggest = deno.as_object().and_then(|o| o.get("suggest").cloned());
let mut settings: Self = parse_or_default(deno, "settings under \"deno\"");
settings.javascript =
parse_or_default(javascript, "settings under \"javascript\"");
settings.typescript =
parse_or_default(typescript, "settings under \"typescript\"");
if let Some(inlay_hints) = deno_inlay_hints {
let inlay_hints: InlayHintsSettings =
parse_or_default(inlay_hints, "settings under \"deno.inlayHints\"");
if inlay_hints.parameter_names.enabled != Default::default() {
lsp_warn!(
"\"deno.inlayHints.parameterNames.enabled\" is deprecated. Instead use \"javascript.inlayHints.parameterNames.enabled\" and \"typescript.inlayHints.parameterNames.enabled\"."
);
settings.javascript.inlay_hints.parameter_names.enabled =
inlay_hints.parameter_names.enabled.clone();
settings.typescript.inlay_hints.parameter_names.enabled =
inlay_hints.parameter_names.enabled;
}
if !inlay_hints
.parameter_names
.suppress_when_argument_matches_name
{
lsp_warn!(
"\"deno.inlayHints.parameterNames.suppressWhenArgumentMatchesName\" is deprecated. Instead use \"javascript.inlayHints.parameterNames.suppressWhenArgumentMatchesName\" and \"typescript.inlayHints.parameterNames.suppressWhenArgumentMatchesName\"."
);
settings
.javascript
.inlay_hints
.parameter_names
.suppress_when_argument_matches_name = inlay_hints
.parameter_names
.suppress_when_argument_matches_name;
settings
.typescript
.inlay_hints
.parameter_names
.suppress_when_argument_matches_name = inlay_hints
.parameter_names
.suppress_when_argument_matches_name;
}
if inlay_hints.parameter_types.enabled {
lsp_warn!(
"\"deno.inlayHints.parameterTypes.enabled\" is deprecated. Instead use \"javascript.inlayHints.parameterTypes.enabled\" and \"typescript.inlayHints.parameterTypes.enabled\"."
);
settings.javascript.inlay_hints.parameter_types.enabled =
inlay_hints.parameter_types.enabled;
settings.typescript.inlay_hints.parameter_types.enabled =
inlay_hints.parameter_types.enabled;
}
if inlay_hints.variable_types.enabled {
lsp_warn!(
"\"deno.inlayHints.variableTypes.enabled\" is deprecated. Instead use \"javascript.inlayHints.variableTypes.enabled\" and \"typescript.inlayHints.variableTypes.enabled\"."
);
settings.javascript.inlay_hints.variable_types.enabled =
inlay_hints.variable_types.enabled;
settings.typescript.inlay_hints.variable_types.enabled =
inlay_hints.variable_types.enabled;
}
if !inlay_hints.variable_types.suppress_when_type_matches_name {
lsp_warn!(
"\"deno.inlayHints.variableTypes.suppressWhenTypeMatchesName\" is deprecated. Instead use \"javascript.inlayHints.variableTypes.suppressWhenTypeMatchesName\" and \"typescript.inlayHints.variableTypes.suppressWhenTypeMatchesName\"."
);
settings
.javascript
.inlay_hints
.variable_types
.suppress_when_type_matches_name =
inlay_hints.variable_types.suppress_when_type_matches_name;
settings
.typescript
.inlay_hints
.variable_types
.suppress_when_type_matches_name =
inlay_hints.variable_types.suppress_when_type_matches_name;
}
if inlay_hints.property_declaration_types.enabled {
lsp_warn!(
"\"deno.inlayHints.propertyDeclarationTypes.enabled\" is deprecated. Instead use \"javascript.inlayHints.propertyDeclarationTypes.enabled\" and \"typescript.inlayHints.propertyDeclarationTypes.enabled\"."
);
settings
.javascript
.inlay_hints
.property_declaration_types
.enabled = inlay_hints.property_declaration_types.enabled;
settings
.typescript
.inlay_hints
.property_declaration_types
.enabled = inlay_hints.property_declaration_types.enabled;
}
if inlay_hints.function_like_return_types.enabled {
lsp_warn!(
"\"deno.inlayHints.functionLikeReturnTypes.enabled\" is deprecated. Instead use \"javascript.inlayHints.functionLikeReturnTypes.enabled\" and \"typescript.inlayHints.functionLikeReturnTypes.enabled\"."
);
settings
.javascript
.inlay_hints
.function_like_return_types
.enabled = inlay_hints.function_like_return_types.enabled;
settings
.typescript
.inlay_hints
.function_like_return_types
.enabled = inlay_hints.function_like_return_types.enabled;
}
if inlay_hints.enum_member_values.enabled {
lsp_warn!(
"\"deno.inlayHints.enumMemberValues.enabled\" is deprecated. Instead use \"javascript.inlayHints.enumMemberValues.enabled\" and \"typescript.inlayHints.enumMemberValues.enabled\"."
);
settings.javascript.inlay_hints.enum_member_values.enabled =
inlay_hints.enum_member_values.enabled;
settings.typescript.inlay_hints.enum_member_values.enabled =
inlay_hints.enum_member_values.enabled;
}
}
if let Some(suggest) = deno_suggest {
let suggest: CompletionSettings =
parse_or_default(suggest, "settings under \"deno.suggest\"");
if suggest.complete_function_calls {
lsp_warn!(
"\"deno.suggest.completeFunctionCalls\" is deprecated. Instead use \"javascript.suggest.completeFunctionCalls\" and \"typescript.suggest.completeFunctionCalls\"."
);
settings.javascript.suggest.complete_function_calls =
suggest.complete_function_calls;
settings.typescript.suggest.complete_function_calls =
suggest.complete_function_calls;
}
if !suggest.names {
lsp_warn!(
"\"deno.suggest.names\" is deprecated. Instead use \"javascript.suggest.names\" and \"typescript.suggest.names\"."
);
settings.javascript.suggest.names = suggest.names;
settings.typescript.suggest.names = suggest.names;
}
if !suggest.paths {
lsp_warn!(
"\"deno.suggest.paths\" is deprecated. Instead use \"javascript.suggest.paths\" and \"typescript.suggest.paths\"."
);
settings.javascript.suggest.paths = suggest.paths;
settings.typescript.suggest.paths = suggest.paths;
}
if !suggest.auto_imports {
lsp_warn!(
"\"deno.suggest.autoImports\" is deprecated. Instead use \"javascript.suggest.autoImports\" and \"typescript.suggest.autoImports\"."
);
settings.javascript.suggest.auto_imports = suggest.auto_imports;
settings.typescript.suggest.auto_imports = suggest.auto_imports;
}
}
settings
}
pub fn from_initialization_options(options: Value) -> Self {
let deno = options;
let javascript = deno
.as_object()
.and_then(|o| o.get("javascript").cloned())
.unwrap_or_default();
let typescript = deno
.as_object()
.and_then(|o| o.get("typescript").cloned())
.unwrap_or_default();
Self::from_raw_settings(deno, javascript, typescript)
}
}
#[derive(Debug, Default, Clone)]
pub struct Settings {
pub unscoped: Arc<WorkspaceSettings>,
pub by_workspace_folder: BTreeMap<Arc<Url>, Option<Arc<WorkspaceSettings>>>,
pub first_folder: Option<Arc<Url>>,
}
impl Settings {
pub fn path_enabled(&self, path: &Path) -> Option<bool> {
let (settings, mut folder_uri) = self.get_for_path(path);
folder_uri = folder_uri.or(self.first_folder.as_ref());
let mut disable_paths = vec![];
let mut enable_paths = None;
if let Some(folder_uri) = folder_uri
&& let Ok(folder_path) = url_to_file_path(folder_uri)
{
disable_paths = settings
.disable_paths
.iter()
.map(|p| folder_path.join(p))
.collect::<Vec<_>>();
enable_paths = settings.enable_paths.as_ref().map(|enable_paths| {
enable_paths
.iter()
.map(|p| folder_path.join(p))
.collect::<Vec<_>>()
});
}
if disable_paths.iter().any(|p| path.starts_with(p)) {
Some(false)
} else if let Some(enable_paths) = &enable_paths {
for enable_path in enable_paths {
// Also enable if the checked path is a dir containing an enabled path.
if path.starts_with(enable_path) || enable_path.starts_with(path) {
return Some(true);
}
}
Some(false)
} else {
settings.enable
}
}
/// Returns `None` if the value should be deferred to the presence of a
/// `deno.json` file.
pub fn specifier_enabled(&self, specifier: &ModuleSpecifier) -> Option<bool> {
let Ok(path) = url_to_file_path(specifier) else {
// Non-file URLs are not disabled by these settings.
return Some(true);
};
self.path_enabled(&path)
}
pub fn get_unscoped(&self) -> &WorkspaceSettings {
&self.unscoped
}
pub fn get_for_path(
&self,
path: &Path,
) -> (&WorkspaceSettings, Option<&Arc<Url>>) {
for (folder_uri, settings) in self.by_workspace_folder.iter().rev() {
if let Some(settings) = settings {
let Ok(folder_path) = url_to_file_path(folder_uri) else {
continue;
};
if path.starts_with(folder_path) {
return (settings, Some(folder_uri));
}
}
}
(&self.unscoped, self.first_folder.as_ref())
}
pub fn get_for_uri(
&self,
uri: &Uri,
) -> (&WorkspaceSettings, Option<&Arc<Url>>) {
self.get_for_specifier(&uri_to_url(uri))
}
pub fn get_for_specifier(
&self,
specifier: &ModuleSpecifier,
) -> (&WorkspaceSettings, Option<&Arc<Url>>) {
let Ok(path) = url_to_file_path(specifier) else {
return (&self.unscoped, self.first_folder.as_ref());
};
self.get_for_path(&path)
}
pub fn enable_settings_hash(&self) -> u64 {
let mut hasher = FastInsecureHasher::new_without_deno_version();
let unscoped = self.get_unscoped();
hasher.write_hashable(unscoped.enable);
hasher.write_hashable(&unscoped.enable_paths);
hasher.write_hashable(&unscoped.disable_paths);
hasher.write_hashable(unscoped.document_preload_limit);
for (folder_uri, settings) in &self.by_workspace_folder {
hasher.write_hashable(folder_uri);
hasher.write_hashable(
settings
.as_ref()
.map(|s| (&s.enable, &s.enable_paths, &s.disable_paths)),
);
}
hasher.write_hashable(&self.first_folder);
hasher.finish()
}
}
#[derive(Clone, Debug, Default)]
pub struct Config {
pub client_capabilities: Arc<ClientCapabilities>,
pub settings: Arc<Settings>,
pub workspace_folders: Arc<Vec<(Arc<Url>, lsp::WorkspaceFolder)>>,
pub tree: ConfigTree,
}
impl Config {
#[cfg(test)]
pub fn new_with_roots(root_urls: impl IntoIterator<Item = Url>) -> Self {
use super::urls::url_to_uri;
let mut config = Self::default();
let mut folders = vec![];
for root_url in root_urls {
let root_uri = url_to_uri(&root_url).unwrap();
let name = root_url.path_segments().and_then(|mut s| s.next_back());
let name = name.unwrap_or_default().to_string();
folders.push((
Arc::new(root_url),
lsp::WorkspaceFolder {
uri: root_uri,
name,
},
));
}
config.set_workspace_folders(folders);
config
}
pub fn set_workspace_folders(
&mut self,
folders: Vec<(Arc<Url>, lsp::WorkspaceFolder)>,
) {
self.settings = Arc::new(Settings {
unscoped: self.settings.unscoped.clone(),
by_workspace_folder: folders
.iter()
.map(|(s, _)| (s.clone(), None))
.collect(),
first_folder: folders.first().map(|(s, _)| s.clone()),
});
self.workspace_folders = Arc::new(folders);
}
pub fn set_workspace_settings(
&mut self,
unscoped: WorkspaceSettings,
folder_settings: Vec<(Arc<Url>, WorkspaceSettings)>,
) {
let mut by_folder = folder_settings.into_iter().collect::<HashMap<_, _>>();
self.settings = Arc::new(Settings {
unscoped: Arc::new(unscoped),
by_workspace_folder: self
.settings
.by_workspace_folder
.keys()
.map(|s| (s.clone(), by_folder.remove(s).map(Arc::new)))
.collect(),
first_folder: self.settings.first_folder.clone(),
});
}
pub fn workspace_settings(&self) -> &WorkspaceSettings {
self.settings.get_unscoped()
}
pub fn workspace_settings_for_uri(&self, uri: &Uri) -> &WorkspaceSettings {
self.settings.get_for_uri(uri).0
}
pub fn workspace_settings_for_specifier(
&self,
specifier: &ModuleSpecifier,
) -> &WorkspaceSettings {
self.settings.get_for_specifier(specifier).0
}
pub fn language_settings_for_specifier(
&self,
specifier: &ModuleSpecifier,
) -> Option<&LanguageWorkspaceSettings> {
let workspace_settings = self.workspace_settings_for_specifier(specifier);
match MediaType::from_specifier(specifier) {
MediaType::JavaScript
| MediaType::Jsx
| MediaType::Mjs
| MediaType::Cjs => Some(&workspace_settings.javascript),
MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Dts
| MediaType::Dmts
| MediaType::Dcts
| MediaType::Tsx => Some(&workspace_settings.typescript),
MediaType::Json
| MediaType::Jsonc
| MediaType::Json5
| MediaType::Wasm
| MediaType::Css
| MediaType::Html
| MediaType::SourceMap
| MediaType::Sql
| MediaType::Unknown => None,
}
}
/// Determine if any inlay hints are enabled. This allows short circuiting
/// when there are no inlay hints enabled.
pub fn enabled_inlay_hints_for_specifier(
&self,
specifier: &ModuleSpecifier,
) -> bool {
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/path_to_regex.rs | cli/lsp/path_to_regex.rs | // Copyright 2018-2025 the Deno authors. MIT license.
// The logic of this module is heavily influenced by path-to-regexp at:
// https://github.com/pillarjs/path-to-regexp/ which is licensed as follows:
// The MIT License (MIT)
//
// Copyright (c) 2014 Blake Embrey (hello@blakeembrey.com)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
use std::collections::HashMap;
use std::fmt;
use std::fmt::Write as _;
use std::iter::Peekable;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use fancy_regex::Regex as FancyRegex;
use once_cell::sync::Lazy;
use regex::Regex;
static ESCAPE_STRING_RE: Lazy<Regex> =
lazy_regex::lazy_regex!(r"([.+*?=^!:${}()\[\]|/\\])");
#[derive(Debug, PartialEq, Eq)]
enum TokenType {
Open,
Close,
Pattern,
Name,
Char,
EscapedChar,
Modifier,
End,
}
#[derive(Debug)]
struct LexToken {
token_type: TokenType,
index: usize,
value: String,
}
fn escape_string(s: &str) -> String {
ESCAPE_STRING_RE.replace_all(s, r"\$1").to_string()
}
fn lexer(s: &str) -> Result<Vec<LexToken>, AnyError> {
let mut tokens = Vec::new();
let mut chars = s.chars().peekable();
let mut index = 0_usize;
loop {
match chars.next() {
None => break,
Some(c) if c == '*' || c == '+' || c == '?' => {
tokens.push(LexToken {
token_type: TokenType::Modifier,
index,
value: c.to_string(),
});
index += 1;
}
Some('\\') => {
index += 1;
let value = chars
.next()
.ok_or_else(|| anyhow!("Unexpected end of string at {}.", index))?;
tokens.push(LexToken {
token_type: TokenType::EscapedChar,
index,
value: value.to_string(),
});
index += 1;
}
Some('{') => {
tokens.push(LexToken {
token_type: TokenType::Open,
index,
value: '{'.to_string(),
});
index += 1;
}
Some('}') => {
tokens.push(LexToken {
token_type: TokenType::Close,
index,
value: '}'.to_string(),
});
index += 1;
}
Some(':') => {
let mut name = String::new();
while let Some(c) = chars.peek() {
if (*c >= '0' && *c <= '9')
|| (*c >= 'A' && *c <= 'Z')
|| (*c >= 'a' && *c <= 'z')
|| *c == '_'
{
let ch = chars.next().unwrap();
name.push(ch);
} else {
break;
}
}
if name.is_empty() {
return Err(anyhow!("Missing parameter name at {}", index));
}
let name_len = name.len();
tokens.push(LexToken {
token_type: TokenType::Name,
index,
value: name,
});
index += 1 + name_len;
}
Some('(') => {
let mut count = 1;
let mut pattern = String::new();
if chars.peek() == Some(&'?') {
return Err(anyhow!(
"Pattern cannot start with \"?\" at {}.",
index + 1
));
}
loop {
let next_char = chars.peek();
if next_char.is_none() {
break;
}
if next_char == Some(&'\\') {
pattern.push(chars.next().unwrap());
pattern.push(
chars
.next()
.ok_or_else(|| anyhow!("Unexpected termination of string."))?,
);
continue;
}
if next_char == Some(&')') {
count -= 1;
if count == 0 {
chars.next();
break;
}
} else if next_char == Some(&'(') {
count += 1;
pattern.push(chars.next().unwrap());
if chars.peek() != Some(&'?') {
return Err(anyhow!(
"Capturing groups are not allowed at {}.",
index + pattern.len()
));
}
continue;
}
pattern.push(chars.next().unwrap());
}
if count > 0 {
return Err(anyhow!("Unbalanced pattern at {}.", index));
}
if pattern.is_empty() {
return Err(anyhow!("Missing pattern at {}.", index));
}
let pattern_len = pattern.len();
tokens.push(LexToken {
token_type: TokenType::Pattern,
index,
value: pattern,
});
index += 2 + pattern_len;
}
Some(c) => {
tokens.push(LexToken {
token_type: TokenType::Char,
index,
value: c.to_string(),
});
index += 1;
}
}
}
tokens.push(LexToken {
token_type: TokenType::End,
index,
value: "".to_string(),
});
Ok(tokens)
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum StringOrNumber {
String(String),
Number(usize),
}
impl fmt::Display for StringOrNumber {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match &self {
Self::Number(n) => write!(f, "{n}"),
Self::String(s) => write!(f, "{s}"),
}
}
}
#[derive(Debug, Clone)]
pub enum StringOrVec {
String(String),
Vec(Vec<String>),
}
impl StringOrVec {
pub fn from_str(s: &str, key: &Key) -> StringOrVec {
match &key.modifier {
Some(m) if m == "+" || m == "*" => {
let pat = format!(
"{}{}",
key.prefix.clone().unwrap_or_default(),
key.suffix.clone().unwrap_or_default()
);
s.split(&pat)
.map(String::from)
.collect::<Vec<String>>()
.into()
}
_ => s.into(),
}
}
pub fn to_string(
&self,
maybe_key: Option<&Key>,
omit_initial_prefix: bool,
) -> String {
match self {
Self::String(s) => s.clone(),
Self::Vec(v) => {
let (prefix, suffix) = if let Some(key) = maybe_key {
(
key.prefix.clone().unwrap_or_default(),
key.suffix.clone().unwrap_or_default(),
)
} else {
("/".to_string(), "".to_string())
};
let mut s = String::new();
for (i, segment) in v.iter().enumerate() {
if omit_initial_prefix && i == 0 {
write!(s, "{segment}{suffix}").unwrap();
} else {
write!(s, "{prefix}{segment}{suffix}").unwrap();
}
}
s
}
}
}
}
impl Default for StringOrVec {
fn default() -> Self {
Self::String("".to_string())
}
}
impl<'a> From<&'a str> for StringOrVec {
fn from(s: &'a str) -> Self {
Self::String(s.to_string())
}
}
impl From<Vec<String>> for StringOrVec {
fn from(v: Vec<String>) -> Self {
Self::Vec(v)
}
}
/// Meta data about a key.
#[derive(Debug, Clone)]
pub struct Key {
pub name: StringOrNumber,
pub prefix: Option<String>,
pub suffix: Option<String>,
pub pattern: String,
pub modifier: Option<String>,
}
/// A token is a string (nothing special) or key metadata (capture group).
#[derive(Debug, Clone)]
pub enum Token {
String(String),
Key(Key),
}
#[derive(Debug, Default)]
pub struct ParseOptions {
delimiter: Option<String>,
prefixes: Option<String>,
}
#[derive(Debug)]
pub struct TokensToCompilerOptions {
sensitive: bool,
validate: bool,
}
impl Default for TokensToCompilerOptions {
fn default() -> Self {
Self {
sensitive: false,
validate: true,
}
}
}
#[derive(Debug)]
pub struct TokensToRegexOptions {
sensitive: bool,
strict: bool,
end: bool,
start: bool,
delimiter: Option<String>,
ends_with: Option<String>,
}
impl Default for TokensToRegexOptions {
fn default() -> Self {
Self {
sensitive: false,
strict: false,
end: true,
start: true,
delimiter: None,
ends_with: None,
}
}
}
#[derive(Debug, Default)]
pub struct PathToRegexOptions {
parse_options: Option<ParseOptions>,
token_to_regex_options: Option<TokensToRegexOptions>,
}
fn try_consume(
token_type: &TokenType,
it: &mut Peekable<impl Iterator<Item = LexToken>>,
) -> Option<String> {
if let Some(token) = it.peek()
&& &token.token_type == token_type
{
let token = it.next().unwrap();
return Some(token.value);
}
None
}
fn must_consume(
token_type: &TokenType,
it: &mut Peekable<impl Iterator<Item = LexToken>>,
) -> Result<String, AnyError> {
try_consume(token_type, it).ok_or_else(|| {
let maybe_token = it.next();
if let Some(token) = maybe_token {
anyhow!(
"Unexpected {:?} at {}, expected {:?}",
token.token_type,
token.index,
token_type
)
} else {
anyhow!("Unexpected end of tokens, expected {:?}", token_type)
}
})
}
fn consume_text(
it: &mut Peekable<impl Iterator<Item = LexToken>>,
) -> Option<String> {
let mut result = String::new();
loop {
if let Some(value) = try_consume(&TokenType::Char, it) {
result.push_str(&value);
}
if let Some(value) = try_consume(&TokenType::EscapedChar, it) {
result.push_str(&value);
} else {
break;
}
}
if result.is_empty() {
None
} else {
Some(result)
}
}
/// Parse a string for the raw tokens.
pub fn parse(
s: &str,
maybe_options: Option<ParseOptions>,
) -> Result<Vec<Token>, AnyError> {
let mut tokens = lexer(s)?.into_iter().peekable();
let options = maybe_options.unwrap_or_default();
let prefixes = options.prefixes.unwrap_or_else(|| "./".to_string());
let default_pattern = if let Some(delimiter) = options.delimiter {
format!("[^{}]+?", escape_string(&delimiter))
} else {
"[^/#?]+?".to_string()
};
let mut result = Vec::new();
let mut key = 0_usize;
let mut path = String::new();
loop {
let char = try_consume(&TokenType::Char, &mut tokens);
let name = try_consume(&TokenType::Name, &mut tokens);
let pattern = try_consume(&TokenType::Pattern, &mut tokens);
if name.is_some() || pattern.is_some() {
let mut prefix = char.unwrap_or_default();
if !prefixes.contains(&prefix) {
path.push_str(&prefix);
prefix = String::new();
}
if !path.is_empty() {
result.push(Token::String(path.clone()));
path = String::new();
}
let name = name.map(StringOrNumber::String).unwrap_or_else(|| {
let default = StringOrNumber::Number(key);
key += 1;
default
});
let prefix = if prefix.is_empty() {
None
} else {
Some(prefix)
};
result.push(Token::Key(Key {
name,
prefix,
suffix: None,
pattern: pattern.unwrap_or_else(|| default_pattern.clone()),
modifier: try_consume(&TokenType::Modifier, &mut tokens),
}));
continue;
}
if let Some(value) = char {
path.push_str(&value);
continue;
} else if let Some(value) =
try_consume(&TokenType::EscapedChar, &mut tokens)
{
path.push_str(&value);
continue;
}
if !path.is_empty() {
result.push(Token::String(path.clone()));
path = String::new();
}
if try_consume(&TokenType::Open, &mut tokens).is_some() {
let prefix = consume_text(&mut tokens);
let maybe_name = try_consume(&TokenType::Name, &mut tokens);
let maybe_pattern = try_consume(&TokenType::Pattern, &mut tokens);
let suffix = consume_text(&mut tokens);
must_consume(&TokenType::Close, &mut tokens)?;
let name = maybe_name
.clone()
.map(StringOrNumber::String)
.unwrap_or_else(|| {
if maybe_pattern.is_some() {
let default = StringOrNumber::Number(key);
key += 1;
default
} else {
StringOrNumber::String("".to_string())
}
});
let pattern = if maybe_name.is_some() && maybe_pattern.is_none() {
default_pattern.clone()
} else {
maybe_pattern.unwrap_or_default()
};
result.push(Token::Key(Key {
name,
prefix,
pattern,
suffix,
modifier: try_consume(&TokenType::Modifier, &mut tokens),
}));
continue;
}
must_consume(&TokenType::End, &mut tokens)?;
break;
}
Ok(result)
}
/// Transform a vector of tokens into a regular expression, returning the
/// regular expression and optionally any keys that can be matched as part of
/// the expression.
pub fn tokens_to_regex(
tokens: &[Token],
maybe_options: Option<TokensToRegexOptions>,
) -> Result<(FancyRegex, Option<Vec<Key>>), AnyError> {
let TokensToRegexOptions {
sensitive,
strict,
end,
start,
delimiter,
ends_with,
} = maybe_options.unwrap_or_default();
let has_ends_with = ends_with.is_some();
let ends_with = format!(r"[{}]|$", ends_with.unwrap_or_default());
let delimiter =
format!(r"[{}]", delimiter.unwrap_or_else(|| "/#?".to_string()));
let mut route = if start {
"^".to_string()
} else {
String::new()
};
let maybe_end_token = tokens.iter().last().cloned();
let mut keys: Vec<Key> = Vec::new();
for token in tokens {
let value = match token {
Token::String(s) => s.to_string(),
Token::Key(key) => {
if !key.pattern.is_empty() {
keys.push(key.clone());
}
let prefix = key
.prefix
.clone()
.map(|s| escape_string(&s))
.unwrap_or_default();
let suffix = key
.suffix
.clone()
.map(|s| escape_string(&s))
.unwrap_or_default();
if !key.pattern.is_empty() {
if !prefix.is_empty() || !suffix.is_empty() {
match &key.modifier {
Some(s) if s == "+" || s == "*" => {
let modifier = if key.modifier == Some("*".to_string()) {
"?"
} else {
""
};
format!(
"(?:{}((?:{})(?:{}{}(?:{}))*){}){}",
prefix,
key.pattern,
suffix,
prefix,
key.pattern,
suffix,
modifier
)
}
_ => {
let modifier = key.modifier.clone().unwrap_or_default();
format!(
r"(?:{}({}){}){}",
prefix, key.pattern, suffix, modifier
)
}
}
} else {
let modifier = key.modifier.clone().unwrap_or_default();
format!(r"({}){}", key.pattern, modifier)
}
} else {
let modifier = key.modifier.clone().unwrap_or_default();
format!(r"(?:{prefix}{suffix}){modifier}")
}
}
};
route.push_str(&value);
}
if end {
if !strict {
write!(route, r"{delimiter}?").unwrap();
}
if has_ends_with {
write!(route, r"(?={ends_with})").unwrap();
} else {
route.push('$');
}
} else {
let is_end_delimited = match maybe_end_token {
Some(Token::String(mut s)) => {
if let Some(c) = s.pop() {
delimiter.contains(c)
} else {
false
}
}
Some(_) => false,
None => true,
};
if !strict {
write!(route, r"(?:{delimiter}(?={ends_with}))?").unwrap();
}
if !is_end_delimited {
write!(route, r"(?={delimiter}|{ends_with})").unwrap();
}
}
let flags = if sensitive { "" } else { "(?i)" };
let re = FancyRegex::new(&format!("{flags}{route}"))?;
let maybe_keys = if keys.is_empty() { None } else { Some(keys) };
Ok((re, maybe_keys))
}
/// Convert a path-like string into a regular expression, returning the regular
/// expression and optionally any keys that can be matched in the string.
pub fn string_to_regex(
path: &str,
maybe_options: Option<PathToRegexOptions>,
) -> Result<(FancyRegex, Option<Vec<Key>>), AnyError> {
let (parse_options, tokens_to_regex_options) =
if let Some(options) = maybe_options {
(options.parse_options, options.token_to_regex_options)
} else {
(None, None)
};
tokens_to_regex(&parse(path, parse_options)?, tokens_to_regex_options)
}
pub struct Compiler {
matches: Vec<Option<Regex>>,
tokens: Vec<Token>,
validate: bool,
}
impl Compiler {
pub fn new(
tokens: &[Token],
maybe_options: Option<TokensToCompilerOptions>,
) -> Self {
let TokensToCompilerOptions {
sensitive,
validate,
} = maybe_options.unwrap_or_default();
let flags = if sensitive { "" } else { "(?i)" };
let matches = tokens
.iter()
.map(|t| {
if let Token::Key(k) = t {
Some(Regex::new(&format!("{}^(?:{})$", flags, k.pattern)).unwrap())
} else {
None
}
})
.collect();
Self {
matches,
tokens: tokens.to_vec(),
validate,
}
}
/// Convert a map of key values into a string.
pub fn to_path(
&self,
params: &HashMap<StringOrNumber, StringOrVec>,
) -> Result<String, AnyError> {
let mut path = String::new();
for (i, token) in self.tokens.iter().enumerate() {
match token {
Token::String(s) => path.push_str(s),
Token::Key(k) => {
let value = params.get(&k.name);
let optional = k.modifier == Some("?".to_string())
|| k.modifier == Some("*".to_string());
let repeat = k.modifier == Some("*".to_string())
|| k.modifier == Some("+".to_string());
match value {
Some(StringOrVec::Vec(v)) => {
if !repeat {
return Err(anyhow!(
"Expected \"{:?}\" to not repeat, but got a vector",
k.name
));
}
if v.is_empty() {
if !optional {
return Err(anyhow!(
"Expected \"{:?}\" to not be empty.",
k.name
));
}
} else {
let prefix = k.prefix.clone().unwrap_or_default();
let suffix = k.suffix.clone().unwrap_or_default();
for segment in v {
if !segment.is_empty()
&& self.validate
&& let Some(re) = &self.matches[i]
&& !re.is_match(segment)
{
return Err(anyhow!(
"Expected all \"{:?}\" to match \"{}\", but got {}",
k.name,
k.pattern,
segment
));
}
write!(path, "{prefix}{segment}{suffix}").unwrap();
}
}
}
Some(StringOrVec::String(s)) => {
if self.validate
&& let Some(re) = &self.matches[i]
&& !re.is_match(s)
{
return Err(anyhow!(
"Expected \"{:?}\" to match \"{}\", but got \"{}\"",
k.name,
k.pattern,
s
));
}
let prefix = k.prefix.clone().unwrap_or_default();
let suffix = k.suffix.clone().unwrap_or_default();
write!(path, "{prefix}{s}{suffix}").unwrap();
}
None => {
if !optional {
let key_type = if repeat { "an array" } else { "a string" };
return Err(anyhow!(
"Expected \"{:?}\" to be {}",
k.name,
key_type
));
}
}
}
}
}
}
Ok(path)
}
}
#[derive(Debug)]
pub struct MatchResult {
pub params: HashMap<StringOrNumber, StringOrVec>,
}
impl MatchResult {
pub fn get(&self, key: &str) -> Option<&StringOrVec> {
self.params.get(&StringOrNumber::String(key.to_string()))
}
}
#[derive(Debug)]
pub struct Matcher {
maybe_keys: Option<Vec<Key>>,
re: FancyRegex,
}
impl Matcher {
pub fn new(
tokens: &[Token],
maybe_options: Option<TokensToRegexOptions>,
) -> Result<Self, AnyError> {
let (re, maybe_keys) = tokens_to_regex(tokens, maybe_options)?;
Ok(Self { maybe_keys, re })
}
/// Match a string path, optionally returning the match result.
pub fn matches(&self, path: &str) -> Option<MatchResult> {
let caps = self.re.captures(path).ok()??;
let mut params = HashMap::new();
if let Some(keys) = &self.maybe_keys {
for (i, key) in keys.iter().enumerate() {
if let Some(m) = caps.get(i + 1) {
let value = if key.modifier == Some("*".to_string())
|| key.modifier == Some("+".to_string())
{
let pat = format!(
"{}{}",
key.prefix.clone().unwrap_or_default(),
key.suffix.clone().unwrap_or_default()
);
m.as_str()
.split(&pat)
.map(String::from)
.collect::<Vec<String>>()
.into()
} else {
m.as_str().into()
};
params.insert(key.name.clone(), value);
}
}
}
Some(MatchResult { params })
}
}
#[cfg(test)]
mod tests {
use super::*;
type FixtureMatch<'a> = (&'a str, usize, usize);
type Fixture<'a> = (&'a str, Option<FixtureMatch<'a>>);
fn test_path(
path: &str,
maybe_options: Option<PathToRegexOptions>,
fixtures: &[Fixture],
) {
let result = string_to_regex(path, maybe_options);
assert!(result.is_ok(), "Could not parse path: \"{path}\"");
let (re, _) = result.unwrap();
for (fixture, expected) in fixtures {
let result = re.find(fixture);
assert!(
result.is_ok(),
"Find failure for path \"{path}\" and fixture \"{fixture}\""
);
let actual = result.unwrap();
if let Some((text, start, end)) = *expected {
assert!(
actual.is_some(),
"Match failure for path \"{path}\" and fixture \"{fixture}\". Expected Some got None"
);
let actual = actual.unwrap();
assert_eq!(
actual.as_str(),
text,
"Match failure for path \"{}\" and fixture \"{}\". Expected \"{}\" got \"{}\".",
path,
fixture,
text,
actual.as_str()
);
assert_eq!(actual.start(), start);
assert_eq!(actual.end(), end);
} else {
assert!(
actual.is_none(),
"Match failure for path \"{path}\" and fixture \"{fixture}\". Expected None got {actual:?}"
);
}
}
}
#[test]
fn test_compiler() {
let tokens = parse("/x/:a@:b/:c*", None).expect("could not parse");
let mut params = HashMap::<StringOrNumber, StringOrVec>::new();
params.insert(
StringOrNumber::String("a".to_string()),
StringOrVec::String("y".to_string()),
);
params.insert(
StringOrNumber::String("b".to_string()),
StringOrVec::String("v1.0.0".to_string()),
);
params.insert(
StringOrNumber::String("c".to_string()),
StringOrVec::Vec(vec!["z".to_string(), "example.ts".to_string()]),
);
let compiler = Compiler::new(&tokens, None);
let actual = compiler.to_path(¶ms);
assert!(actual.is_ok());
let actual = actual.unwrap();
assert_eq!(actual, "/x/y@v1.0.0/z/example.ts".to_string());
}
#[test]
fn test_compiler_ends_with_sep() {
let tokens = parse("/x/:a@:b/:c*", None).expect("could not parse");
let mut params = HashMap::<StringOrNumber, StringOrVec>::new();
params.insert(
StringOrNumber::String("a".to_string()),
StringOrVec::String("y".to_string()),
);
params.insert(
StringOrNumber::String("b".to_string()),
StringOrVec::String("v1.0.0".to_string()),
);
params.insert(
StringOrNumber::String("c".to_string()),
StringOrVec::Vec(vec![
"z".to_string(),
"example".to_string(),
"".to_string(),
]),
);
let compiler = Compiler::new(&tokens, None);
let actual = compiler.to_path(¶ms);
assert!(actual.is_ok());
let actual = actual.unwrap();
assert_eq!(actual, "/x/y@v1.0.0/z/example/".to_string());
}
#[test]
fn test_string_to_regex() {
test_path("/", None, &[("/test", None), ("/", Some(("/", 0, 1)))]);
test_path(
"/test",
None,
&[
("/test", Some(("/test", 0, 5))),
("/route", None),
("/test/route", None),
("/test/", Some(("/test/", 0, 6))),
],
);
test_path(
"/test/",
None,
&[
("/test", None),
("/test/", Some(("/test/", 0, 6))),
("/test//", Some(("/test//", 0, 7))),
],
);
// case-sensitive paths
test_path(
"/test",
Some(PathToRegexOptions {
parse_options: None,
token_to_regex_options: Some(TokensToRegexOptions {
sensitive: true,
..Default::default()
}),
}),
&[("/test", Some(("/test", 0, 5))), ("/TEST", None)],
);
test_path(
"/TEST",
Some(PathToRegexOptions {
parse_options: None,
token_to_regex_options: Some(TokensToRegexOptions {
sensitive: true,
..Default::default()
}),
}),
&[("/TEST", Some(("/TEST", 0, 5))), ("/test", None)],
);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/diagnostics.rs | cli/lsp/diagnostics.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap;
use std::str::FromStr;
use std::sync::Arc;
use std::thread;
use console_static_text::ansi::strip_ansi_codes;
use deno_ast::MediaType;
use deno_core::ModuleSpecifier;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::futures::StreamExt;
use deno_core::parking_lot::RwLock;
use deno_core::resolve_url;
use deno_core::serde::Deserialize;
use deno_core::serde_json;
use deno_core::serde_json::json;
use deno_core::url::Url;
use deno_graph::Resolution;
use deno_graph::ResolutionError;
use deno_graph::SpecifierError;
use deno_graph::source::ResolveError;
use deno_resolver::deno_json::CompilerOptionsKey;
use deno_resolver::graph::enhanced_resolution_error_message;
use deno_resolver::workspace::sloppy_imports_resolve;
use deno_runtime::deno_node;
use deno_runtime::tokio_util::create_basic_runtime;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq;
use import_map::ImportMap;
use import_map::ImportMapErrorKind;
use log::error;
use lsp_types::Uri;
use node_resolver::NodeResolutionKind;
use tokio::sync::mpsc;
use tokio_util::sync::CancellationToken;
use tokio_util::task::AbortOnDropHandle;
use tower_lsp::lsp_types as lsp;
use super::analysis;
use super::analysis::import_map_lookup;
use super::client::Client;
use super::documents::Document;
use super::documents::DocumentModule;
use super::documents::DocumentModules;
use super::language_server;
use super::language_server::StateSnapshot;
use super::performance::Performance;
use super::tsc::TsServer;
use crate::lsp::documents::OpenDocument;
use crate::lsp::language_server::OnceCellMap;
use crate::lsp::lint::LspLinter;
use crate::lsp::logging::lsp_warn;
use crate::lsp::urls::uri_to_url;
use crate::sys::CliSys;
use crate::tsc::DiagnosticCategory;
use crate::type_checker::ambient_modules_to_regex_string;
use crate::util::path::to_percent_decoded_str;
#[derive(Debug)]
pub struct DiagnosticsUpdateMessage {
pub snapshot: Arc<StateSnapshot>,
}
#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)]
pub enum DiagnosticSource {
Deno,
Lint,
Ts,
}
impl DiagnosticSource {
pub fn as_lsp_source(&self) -> &'static str {
match self {
Self::Deno => "deno",
Self::Lint => "deno-lint",
Self::Ts => "deno-ts",
}
}
}
fn should_send_diagnostic_batch_notifications() -> bool {
deno_lib::args::has_flag_env_var(
"DENO_INTERNAL_DIAGNOSTIC_BATCH_NOTIFICATIONS",
)
}
#[derive(Debug)]
struct DocumentDiagnosticsState {
version: i32,
ts_diagnostics: Arc<Vec<lsp::Diagnostic>>,
no_cache_diagnostics: Arc<Vec<lsp::Diagnostic>>,
}
#[derive(Debug, Default)]
pub struct DiagnosticsState {
documents: RwLock<HashMap<Uri, DocumentDiagnosticsState>>,
}
impl DiagnosticsState {
fn update(&self, uri: &Uri, version: i32, diagnostics: &[lsp::Diagnostic]) {
let mut specifiers = self.documents.write();
let current_version = specifiers.get(uri).map(|s| s.version);
if let Some(current_version) = current_version
&& version < current_version
{
return;
}
let mut ts_diagnostics = vec![];
let mut no_cache_diagnostics = vec![];
for diagnostic in diagnostics {
if diagnostic.source.as_deref()
== Some(DiagnosticSource::Ts.as_lsp_source())
{
ts_diagnostics.push(diagnostic.clone());
}
if diagnostic.code
== Some(lsp::NumberOrString::String("no-cache".to_string()))
|| diagnostic.code
== Some(lsp::NumberOrString::String("not-installed-jsr".to_string()))
|| diagnostic.code
== Some(lsp::NumberOrString::String("not-installed-npm".to_string()))
{
no_cache_diagnostics.push(diagnostic.clone());
}
}
specifiers.insert(
uri.clone(),
DocumentDiagnosticsState {
version,
ts_diagnostics: Arc::new(ts_diagnostics),
no_cache_diagnostics: Arc::new(no_cache_diagnostics),
},
);
}
pub fn clear(&self, uri: &Uri) {
self.documents.write().remove(uri);
}
pub fn ts_diagnostics(&self, uri: &Uri) -> Arc<Vec<lsp::Diagnostic>> {
self
.documents
.read()
.get(uri)
.map(|s| s.ts_diagnostics.clone())
.unwrap_or_default()
}
pub fn has_no_cache_diagnostics(&self, uri: &Uri) -> bool {
self
.documents
.read()
.get(uri)
.map(|s| !s.no_cache_diagnostics.is_empty())
.unwrap_or(false)
}
pub fn no_cache_diagnostics(&self, uri: &Uri) -> Arc<Vec<lsp::Diagnostic>> {
self
.documents
.read()
.get(uri)
.map(|s| s.no_cache_diagnostics.clone())
.unwrap_or_default()
}
}
pub struct DiagnosticsServer {
channel: Option<mpsc::UnboundedSender<DiagnosticsUpdateMessage>>,
client: Client,
performance: Arc<Performance>,
ts_server: Arc<TsServer>,
pub state: Arc<DiagnosticsState>,
}
impl std::fmt::Debug for DiagnosticsServer {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("DiagnosticsServer")
.field("channel", &self.channel)
.field("client", &self.client)
.field("performance", &self.performance)
.field("ts_server", &self.ts_server)
.field("state", &self.state)
.finish()
}
}
impl DiagnosticsServer {
pub fn new(
client: Client,
performance: Arc<Performance>,
ts_server: Arc<TsServer>,
) -> Self {
DiagnosticsServer {
channel: Default::default(),
client,
performance,
ts_server,
state: Default::default(),
}
}
#[allow(unused_must_use)]
pub fn start(&mut self) {
let (tx, mut rx) = mpsc::unbounded_channel::<DiagnosticsUpdateMessage>();
self.channel = Some(tx);
let client = self.client.clone();
let state = self.state.clone();
let performance = self.performance.clone();
let ts_server = self.ts_server.clone();
let should_send_batch_notifications =
should_send_diagnostic_batch_notifications();
let _join_handle = thread::spawn(move || {
let runtime = create_basic_runtime();
runtime.block_on(async {
let ambient_modules_regex_cache = Arc::new(OnceCellMap::<
(CompilerOptionsKey, Option<Arc<Uri>>),
Option<regex::Regex>,
>::new());
let mut _previous_handle;
while let Some(message) = rx.recv().await {
let _mark = performance.measure_scope("lsp.update_diagnostics");
let client = client.clone();
let state = state.clone();
let ts_server = ts_server.clone();
let ambient_modules_regex_cache = ambient_modules_regex_cache.clone();
let join_handle = tokio::task::spawn(async move {
let token = CancellationToken::new();
let _drop_guard = token.drop_guard_ref();
let DiagnosticsUpdateMessage { snapshot } = message;
if should_send_batch_notifications {
client.send_diagnostic_batch_start_notification();
}
let open_docs =
snapshot.document_modules.documents.open_docs().cloned();
deno_core::futures::stream::iter(open_docs.map(|document| {
let snapshot = snapshot.clone();
let ts_server = ts_server.clone();
let ambient_modules_regex_cache =
ambient_modules_regex_cache.clone();
let token = token.clone();
AbortOnDropHandle::new(tokio::task::spawn(async move {
let diagnostics = generate_document_diagnostics(
&document,
&snapshot,
&ts_server,
&ambient_modules_regex_cache,
&token,
)
.await
.unwrap_or_else(|err| {
lsp_warn!(
"Couldn't generate diagnostics for \"{}\": {err:#}",
document.uri.as_str()
);
vec![]
});
(document, diagnostics)
}))
}))
.buffered(
std::thread::available_parallelism()
.map(From::from)
.unwrap_or(8),
)
.for_each(|result| async {
let (document, diagnostics) = match result {
Ok(r) => r,
Err(err) => {
lsp_warn!("Diagnostics task join error: {err:#}");
return;
}
};
publish_document_diagnostics(
&document,
diagnostics,
&client,
&state,
&token,
)
.await
})
.await;
if should_send_batch_notifications {
client.send_diagnostic_batch_end_notification();
}
});
_previous_handle = AbortOnDropHandle::new(join_handle);
}
})
});
}
pub fn update(
&self,
message: DiagnosticsUpdateMessage,
) -> Result<(), AnyError> {
// todo(dsherret): instead of queuing up messages, it would be better to
// instead only store the latest message (ex. maybe using a
// tokio::sync::watch::channel)
if let Some(tx) = &self.channel {
tx.send(message).map_err(|err| err.into())
} else {
Err(anyhow!("diagnostics server not started"))
}
}
}
impl<'a> From<&'a crate::tsc::DiagnosticCategory> for lsp::DiagnosticSeverity {
fn from(category: &'a crate::tsc::DiagnosticCategory) -> Self {
match category {
crate::tsc::DiagnosticCategory::Error => lsp::DiagnosticSeverity::ERROR,
crate::tsc::DiagnosticCategory::Warning => {
lsp::DiagnosticSeverity::WARNING
}
crate::tsc::DiagnosticCategory::Suggestion => {
lsp::DiagnosticSeverity::HINT
}
crate::tsc::DiagnosticCategory::Message => {
lsp::DiagnosticSeverity::INFORMATION
}
}
}
}
impl<'a> From<&'a crate::tsc::Position> for lsp::Position {
fn from(pos: &'a crate::tsc::Position) -> Self {
Self {
line: pos.line as u32,
character: pos.character as u32,
}
}
}
fn get_diagnostic_message(diagnostic: &crate::tsc::Diagnostic) -> String {
if let Some(message) = diagnostic.message_text.clone() {
message
} else if let Some(message_chain) = diagnostic.message_chain.clone() {
message_chain.format_message(0)
} else {
"[missing message]".to_string()
}
}
fn to_lsp_range(
start: &crate::tsc::Position,
end: &crate::tsc::Position,
) -> lsp::Range {
lsp::Range {
start: start.into(),
end: end.into(),
}
}
fn to_lsp_related_information(
related_information: &Option<Vec<crate::tsc::Diagnostic>>,
module: &DocumentModule,
document_modules: &DocumentModules,
) -> Option<Vec<lsp::DiagnosticRelatedInformation>> {
related_information.as_ref().map(|related| {
related
.iter()
.filter_map(|ri| {
if let (Some(file_name), Some(start), Some(end)) =
(&ri.file_name, &ri.start, &ri.end)
{
let uri = resolve_url(file_name)
.ok()
.and_then(|s| {
document_modules.module_for_specifier(
&s,
module.scope.as_deref(),
Some(&module.compiler_options_key),
)
})
.map(|m| m.uri.as_ref().clone())
.unwrap_or_else(|| Uri::from_str("unknown:").unwrap());
Some(lsp::DiagnosticRelatedInformation {
location: lsp::Location {
uri,
range: to_lsp_range(start, end),
},
message: get_diagnostic_message(ri),
})
} else {
None
}
})
.collect()
})
}
pub fn ts_json_to_diagnostics(
diagnostics: Vec<crate::tsc::Diagnostic>,
module: &DocumentModule,
document_modules: &DocumentModules,
) -> Vec<lsp::Diagnostic> {
diagnostics
.iter()
.filter_map(|d| {
if let (Some(start), Some(end)) = (&d.start, &d.end) {
Some(lsp::Diagnostic {
range: to_lsp_range(start, end),
severity: Some((&d.category).into()),
code: Some(lsp::NumberOrString::Number(d.code as i32)),
code_description: None,
source: Some(DiagnosticSource::Ts.as_lsp_source().to_string()),
message: get_diagnostic_message(d),
related_information: to_lsp_related_information(
&d.related_information,
module,
document_modules,
),
tags: match d.code {
// These are codes that indicate the variable is unused.
2695 | 6133 | 6138 | 6192 | 6196 | 6198 | 6199 | 6205 | 7027
| 7028 => Some(vec![lsp::DiagnosticTag::UNNECESSARY]),
// These are codes that indicated the variable is deprecated.
2789 | 6385 | 6387 => Some(vec![lsp::DiagnosticTag::DEPRECATED]),
_ => None,
},
data: None,
})
} else {
None
}
})
.collect()
}
fn generate_document_lint_diagnostics(
module: &DocumentModule,
linter: &LspLinter,
token: CancellationToken,
) -> Vec<lsp::Diagnostic> {
if !module.is_diagnosable()
|| !linter
.lint_config
.files
.matches_specifier(&module.specifier)
{
return Vec::new();
}
match &module
.open_data
.as_ref()
.and_then(|d| d.parsed_source.as_ref())
{
Some(Ok(parsed_source)) => {
match analysis::get_lint_references(parsed_source, &linter.inner, token) {
Ok(references) => references
.into_iter()
.map(|r| r.to_diagnostic())
.collect::<Vec<_>>(),
_ => Vec::new(),
}
}
Some(Err(_)) => Vec::new(),
None => {
error!("Missing file contents for: {}", &module.specifier);
Vec::new()
}
}
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct DiagnosticDataSpecifier {
pub specifier: ModuleSpecifier,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct DiagnosticDataStrSpecifier {
pub specifier: String,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct DiagnosticDataRedirect {
pub redirect: ModuleSpecifier,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct DiagnosticDataNoLocal {
pub to: ModuleSpecifier,
pub message: String,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct DiagnosticDataImportMapRemap {
pub from: String,
pub to: String,
}
/// An enum which represents diagnostic errors which originate from Deno itself.
pub enum DenoDiagnostic {
/// A `x-deno-warning` is associated with the specifier and should be displayed
/// as a warning to the user.
DenoWarn(String),
/// An informational diagnostic that indicates an existing specifier can be
/// remapped to an import map import specifier.
ImportMapRemap { from: String, to: String },
/// The import assertion type is incorrect.
InvalidAttributeType(String),
/// A module requires an attribute type to be a valid import.
NoAttributeType,
/// A remote module was not found in the cache.
NoCache(ModuleSpecifier),
/// A jsr package reference was not found in the cache.
NotInstalledJsr(PackageReq, ModuleSpecifier),
/// An npm package reference was not found in the cache.
NotInstalledNpm(PackageReq, ModuleSpecifier),
/// An npm package reference was not exported by its package.
NoExportNpm(NpmPackageReqReference),
/// A local module was not found on the local file system.
NoLocal(ModuleSpecifier),
/// An error occurred when resolving the specifier string.
ResolutionError(deno_graph::ResolutionError),
/// Unknown `node:` specifier.
UnknownNodeSpecifier(ModuleSpecifier),
/// Bare specifier is used for `node:` specifier
BareNodeSpecifier(String),
}
impl DenoDiagnostic {
fn code(&self) -> &str {
match self {
Self::DenoWarn(_) => "deno-warn",
Self::ImportMapRemap { .. } => "import-map-remap",
Self::InvalidAttributeType(_) => "invalid-attribute-type",
Self::NoAttributeType => "no-attribute-type",
Self::NoCache(_) => "no-cache",
Self::NotInstalledJsr(_, _) => "not-installed-jsr",
Self::NotInstalledNpm(_, _) => "not-installed-npm",
Self::NoExportNpm(_) => "no-export-npm",
Self::NoLocal(_) => "no-local",
Self::ResolutionError(err) => {
if deno_resolver::graph::get_resolution_error_bare_node_specifier(err)
.is_some()
{
"import-node-prefix-missing"
} else {
match err {
ResolutionError::InvalidDowngrade { .. } => "invalid-downgrade",
ResolutionError::InvalidJsrHttpsTypesImport { .. } => {
"invalid-jsr-https-types-import"
}
ResolutionError::InvalidLocalImport { .. } => {
"invalid-local-import"
}
ResolutionError::InvalidSpecifier { error, .. } => match error {
SpecifierError::ImportPrefixMissing { .. } => {
"import-prefix-missing"
}
SpecifierError::InvalidUrl(_) => "invalid-url",
},
ResolutionError::ResolverError { .. } => "resolver-error",
}
}
}
Self::UnknownNodeSpecifier(_) => "resolver-error",
Self::BareNodeSpecifier(_) => "import-node-prefix-missing",
}
}
/// A "static" method which for a diagnostic that originated from the
/// structure returns a code action which can resolve the diagnostic.
pub fn get_code_action(
uri: &Uri,
specifier: &ModuleSpecifier,
diagnostic: &lsp::Diagnostic,
) -> Result<lsp::CodeAction, AnyError> {
if let Some(lsp::NumberOrString::String(code)) = &diagnostic.code {
let code_action = match code.as_str() {
"import-map-remap" => {
let data = diagnostic
.data
.clone()
.ok_or_else(|| anyhow!("Diagnostic is missing data"))?;
let DiagnosticDataImportMapRemap { from, to } =
serde_json::from_value(data)?;
lsp::CodeAction {
title: format!("Update \"{from}\" to \"{to}\" to use import map."),
kind: Some(lsp::CodeActionKind::QUICKFIX),
diagnostics: Some(vec![diagnostic.clone()]),
edit: Some(lsp::WorkspaceEdit {
changes: Some(HashMap::from([(
uri.clone(),
vec![lsp::TextEdit {
new_text: format!("\"{to}\""),
range: diagnostic.range,
}],
)])),
..Default::default()
}),
..Default::default()
}
}
"no-attribute-type" => lsp::CodeAction {
title: "Insert import attribute.".to_string(),
kind: Some(lsp::CodeActionKind::QUICKFIX),
diagnostics: Some(vec![diagnostic.clone()]),
edit: Some(lsp::WorkspaceEdit {
changes: Some(HashMap::from([(
uri.clone(),
vec![lsp::TextEdit {
new_text: " with { type: \"json\" }".to_string(),
range: lsp::Range {
start: diagnostic.range.end,
end: diagnostic.range.end,
},
}],
)])),
..Default::default()
}),
..Default::default()
},
"no-cache" | "not-installed-jsr" | "not-installed-npm" => {
let data = diagnostic
.data
.clone()
.ok_or_else(|| anyhow!("Diagnostic is missing data"))?;
let data: DiagnosticDataSpecifier = serde_json::from_value(data)?;
let title = if matches!(
code.as_str(),
"not-installed-jsr" | "not-installed-npm"
) {
format!("Install \"{}\" and its dependencies.", data.specifier)
} else {
format!("Cache \"{}\" and its dependencies.", data.specifier)
};
lsp::CodeAction {
title,
kind: Some(lsp::CodeActionKind::QUICKFIX),
diagnostics: Some(vec![diagnostic.clone()]),
command: Some(lsp::Command {
title: "".to_string(),
command: "deno.cache".to_string(),
arguments: Some(vec![json!([data.specifier]), json!(&specifier)]),
}),
..Default::default()
}
}
"no-local" => {
let data = diagnostic
.data
.clone()
.ok_or_else(|| anyhow!("Diagnostic is missing data"))?;
let data: DiagnosticDataNoLocal = serde_json::from_value(data)?;
lsp::CodeAction {
title: data.message,
kind: Some(lsp::CodeActionKind::QUICKFIX),
diagnostics: Some(vec![diagnostic.clone()]),
edit: Some(lsp::WorkspaceEdit {
changes: Some(HashMap::from([(
uri.clone(),
vec![lsp::TextEdit {
new_text: format!(
"\"{}\"",
relative_specifier(&data.to, specifier)
),
range: diagnostic.range,
}],
)])),
..Default::default()
}),
..Default::default()
}
}
"redirect" => {
let data = diagnostic
.data
.clone()
.ok_or_else(|| anyhow!("Diagnostic is missing data"))?;
let data: DiagnosticDataRedirect = serde_json::from_value(data)?;
lsp::CodeAction {
title: "Update specifier to its redirected specifier.".to_string(),
kind: Some(lsp::CodeActionKind::QUICKFIX),
diagnostics: Some(vec![diagnostic.clone()]),
edit: Some(lsp::WorkspaceEdit {
changes: Some(HashMap::from([(
uri.clone(),
vec![lsp::TextEdit {
new_text: format!(
"\"{}\"",
specifier_text_for_redirected(&data.redirect, specifier)
),
range: diagnostic.range,
}],
)])),
..Default::default()
}),
..Default::default()
}
}
"import-node-prefix-missing" => {
let data = diagnostic
.data
.clone()
.ok_or_else(|| anyhow!("Diagnostic is missing data"))?;
let data: DiagnosticDataStrSpecifier = serde_json::from_value(data)?;
lsp::CodeAction {
title: format!("Update specifier to node:{}", data.specifier),
kind: Some(lsp::CodeActionKind::QUICKFIX),
diagnostics: Some(vec![diagnostic.clone()]),
edit: Some(lsp::WorkspaceEdit {
changes: Some(HashMap::from([(
uri.clone(),
vec![lsp::TextEdit {
new_text: format!("\"node:{}\"", data.specifier),
range: diagnostic.range,
}],
)])),
..Default::default()
}),
..Default::default()
}
}
_ => {
return Err(anyhow!(
"Unsupported diagnostic code (\"{}\") provided.",
code
));
}
};
Ok(code_action)
} else {
Err(anyhow!("Unsupported diagnostic code provided."))
}
}
/// Given a reference to the code from an LSP diagnostic, determine if the
/// diagnostic is fixable or not
pub fn is_fixable(diagnostic: &lsp_types::Diagnostic) -> bool {
if let Some(lsp::NumberOrString::String(code)) = &diagnostic.code {
match code.as_str() {
"import-map-remap"
| "no-cache"
| "not-installed-jsr"
| "not-installed-npm"
| "no-attribute-type"
| "redirect"
| "import-node-prefix-missing" => true,
"no-local" => diagnostic.data.is_some(),
_ => false,
}
} else {
false
}
}
/// Convert to an lsp Diagnostic when the range the diagnostic applies to is
/// provided.
pub fn to_lsp_diagnostic(&self, range: &lsp::Range) -> lsp::Diagnostic {
fn no_local_message(
specifier: &ModuleSpecifier,
suggestion_message: Option<String>,
) -> String {
let mut message = format!(
"Unable to load a local module: {}\n",
to_percent_decoded_str(specifier.as_ref())
);
if let Some(suggestion_message) = suggestion_message {
message.push_str(&suggestion_message);
message.push('.');
} else {
message.push_str("Please check the file path.");
}
message
}
let (severity, message, data) = match self {
Self::DenoWarn(message) => (lsp::DiagnosticSeverity::WARNING, message.to_string(), None),
Self::ImportMapRemap { from, to } => (lsp::DiagnosticSeverity::HINT, format!("The import specifier can be remapped to \"{to}\" which will resolve it via the active import map."), Some(json!({ "from": from, "to": to }))),
Self::InvalidAttributeType(assert_type) => (lsp::DiagnosticSeverity::ERROR, format!("The module is a JSON module and expected an attribute type of \"json\". Instead got \"{assert_type}\"."), None),
Self::NoAttributeType => (lsp::DiagnosticSeverity::ERROR, "The module is a JSON module and not being imported with an import attribute. Consider adding `with { type: \"json\" }` to the import statement.".to_string(), None),
Self::NoCache(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing remote URL: {specifier}"), Some(json!({ "specifier": specifier }))),
Self::NotInstalledJsr(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("JSR package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))),
Self::NotInstalledNpm(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("npm package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))),
Self::NoExportNpm(pkg_ref) => (lsp::DiagnosticSeverity::ERROR, format!("NPM package \"{}\" does not define an export \"{}\".", pkg_ref.req(), pkg_ref.sub_path().unwrap_or(".")), None),
Self::NoLocal(specifier) => {
let sloppy_resolution = sloppy_imports_resolve(specifier, deno_resolver::workspace::ResolutionKind::Execution, CliSys::default());
let data = sloppy_resolution.as_ref().map(|(resolved, sloppy_reason)| {
json!({
"specifier": specifier,
"to": resolved,
"message": sloppy_reason.quick_fix_message_for_specifier(resolved),
})
});
(lsp::DiagnosticSeverity::ERROR, no_local_message(specifier, sloppy_resolution.as_ref().map(|(resolved, sloppy_reason)| sloppy_reason.suggestion_message_for_specifier(resolved))), data)
},
Self::ResolutionError(err) => {
let message = strip_ansi_codes(&enhanced_resolution_error_message(err)).into_owned();
(
lsp::DiagnosticSeverity::ERROR,
message,
deno_resolver::graph::get_resolution_error_bare_node_specifier(err)
.map(|specifier| json!({ "specifier": specifier }))
)},
Self::UnknownNodeSpecifier(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("No such built-in module: node:{}", specifier.path()), None),
Self::BareNodeSpecifier(specifier) => (lsp::DiagnosticSeverity::WARNING, format!("\"{0}\" is resolved to \"node:{0}\". If you want to use a built-in Node module, add a \"node:\" prefix.", specifier), Some(json!({ "specifier": specifier }))),
};
lsp::Diagnostic {
range: *range,
severity: Some(severity),
code: Some(lsp::NumberOrString::String(self.code().to_string())),
source: Some(DiagnosticSource::Deno.as_lsp_source().to_string()),
message,
data,
..Default::default()
}
}
}
fn specifier_text_for_redirected(redirect: &Url, referrer: &Url) -> String {
if redirect.scheme() == "file" && referrer.scheme() == "file" {
// use a relative specifier when it's going to a file url
relative_specifier(redirect, referrer)
} else {
redirect.to_string()
}
}
fn relative_specifier(specifier: &Url, referrer: &Url) -> String {
match referrer.make_relative(specifier) {
Some(relative) => {
if relative.starts_with('.') {
relative
} else {
format!("./{}", relative)
}
}
None => specifier.to_string(),
}
}
fn maybe_ambient_import_specifier(
diagnostic: &DenoDiagnostic,
) -> Option<String> {
match diagnostic {
DenoDiagnostic::NoCache(url) | DenoDiagnostic::NoLocal(url) => {
Some(url.to_string())
}
DenoDiagnostic::ResolutionError(err) => {
maybe_ambient_specifier_resolution_err(err)
}
_ => None,
}
}
fn maybe_ambient_specifier_resolution_err(
err: &ResolutionError,
) -> Option<String> {
match err {
ResolutionError::InvalidDowngrade { .. }
| ResolutionError::InvalidJsrHttpsTypesImport { .. }
| ResolutionError::InvalidLocalImport { .. } => None,
ResolutionError::InvalidSpecifier { error, .. } => match error {
SpecifierError::InvalidUrl(..) => None,
SpecifierError::ImportPrefixMissing { specifier, .. } => {
Some(specifier.to_string())
}
},
ResolutionError::ResolverError { error, .. } => match &**error {
ResolveError::Specifier(specifier_error) => match specifier_error {
SpecifierError::InvalidUrl(..) => None,
SpecifierError::ImportPrefixMissing { specifier, .. } => {
Some(specifier.to_string())
}
},
ResolveError::ImportMap(import_map_error) => {
match import_map_error.as_kind() {
ImportMapErrorKind::UnmappedBareSpecifier(spec, _) => {
Some(spec.clone())
}
ImportMapErrorKind::JsonParse(_)
| ImportMapErrorKind::ImportMapNotObject
| ImportMapErrorKind::ImportsFieldNotObject
| ImportMapErrorKind::ScopesFieldNotObject
| ImportMapErrorKind::ScopePrefixNotObject(_)
| ImportMapErrorKind::BlockedByNullEntry(_)
| ImportMapErrorKind::SpecifierResolutionFailure { .. }
| ImportMapErrorKind::SpecifierBacktracksAbovePrefix { .. } => None,
}
}
ResolveError::Other(..) => None,
},
}
}
#[allow(clippy::too_many_arguments)]
fn diagnose_resolution(
snapshot: &language_server::StateSnapshot,
dependency_key: &str,
resolution: &Resolution,
is_dynamic: bool,
maybe_assert_type: Option<&str>,
referrer_module: &DocumentModule,
import_map: Option<&ImportMap>,
) -> (Vec<DenoDiagnostic>, Vec<DenoDiagnostic>) {
let mut diagnostics = vec![];
let mut deferred_diagnostics = vec![];
match resolution {
Resolution::Ok(resolved) => {
let specifier = &resolved.specifier;
let scoped_resolver = snapshot
.resolver
.get_scoped_resolver(referrer_module.scope.as_deref());
let managed_npm_resolver =
scoped_resolver.as_maybe_managed_npm_resolver();
for (_, headers) in scoped_resolver.redirect_chain_headers(specifier) {
if let Some(message) = headers.get("x-deno-warning") {
diagnostics.push(DenoDiagnostic::DenoWarn(message.clone()));
}
}
match snapshot.document_modules.module_for_specifier(
specifier,
referrer_module.scope.as_deref(),
Some(&referrer_module.compiler_options_key),
) {
Some(module) => {
if let Some(headers) = &module.headers
&& let Some(message) = headers.get("x-deno-warning")
{
diagnostics.push(DenoDiagnostic::DenoWarn(message.clone()));
}
if module.media_type == MediaType::Json {
match maybe_assert_type {
// The module has the correct assertion type, no diagnostic
Some("json" | "text" | "bytes") => (),
// The dynamic import statement is missing an attribute type, which
// we might not be able to statically detect, therefore we will
// not provide a potentially incorrect diagnostic.
None if is_dynamic => (),
// The module has an incorrect assertion type, diagnostic
Some(assert_type) => diagnostics.push(
DenoDiagnostic::InvalidAttributeType(assert_type.to_string()),
),
// The module is missing an attribute type, diagnostic
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/documents.rs | cli/lsp/documents.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::HashSet;
use std::fs;
use std::future::Future;
use std::ops::Range;
use std::path::PathBuf;
use std::pin::Pin;
use std::str::FromStr;
use std::sync::Arc;
use std::sync::Weak;
use std::time::SystemTime;
use dashmap::DashMap;
use deno_ast::MediaType;
use deno_ast::ParsedSource;
use deno_ast::SourceTextInfo;
use deno_ast::swc::ecma_visit::VisitWith;
use deno_core::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_core::futures::future;
use deno_core::futures::future::Shared;
use deno_core::parking_lot::RwLock;
use deno_core::resolve_url;
use deno_core::url::Position;
use deno_core::url::Url;
use deno_error::JsErrorBox;
use deno_graph::TypesDependency;
use deno_path_util::url_to_file_path;
use deno_resolver::deno_json::CompilerOptionsKey;
use deno_runtime::deno_node;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
use indexmap::IndexMap;
use indexmap::IndexSet;
use lsp_types::Uri;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use node_resolver::cache::NodeResolutionThreadLocalCache;
use once_cell::sync::Lazy;
use serde::Serialize;
use tower_lsp::lsp_types as lsp;
use weak_table::PtrWeakKeyHashMap;
use weak_table::WeakValueHashMap;
use super::cache::LspCache;
use super::cache::calculate_fs_version_at_path;
use super::config::Config;
use super::logging::lsp_warn;
use super::resolver::LspResolver;
use super::resolver::ScopeDepInfo;
use super::resolver::SingleReferrerGraphResolver;
use super::testing::TestCollector;
use super::testing::TestModule;
use super::text::LineIndex;
use super::tsc::ChangeKind;
use super::tsc::NavigationTree;
use super::urls::COMPONENT;
use super::urls::normalize_uri;
use super::urls::uri_is_file_like;
use super::urls::uri_to_file_path;
use super::urls::uri_to_url;
use super::urls::url_to_uri;
use crate::graph_util::CliJsrUrlProvider;
use crate::lsp::compiler_options::LspCompilerOptionsData;
use crate::lsp::compiler_options::LspCompilerOptionsResolver;
#[derive(Debug)]
pub struct OpenDocument {
pub uri: Arc<Uri>,
pub text: Arc<str>,
pub line_index: Arc<LineIndex>,
pub version: i32,
pub language_id: LanguageId,
pub notebook_uri: Option<Arc<Uri>>,
pub fs_version_on_open: Option<String>,
}
impl OpenDocument {
fn new(
uri: Uri,
version: i32,
language_id: LanguageId,
text: Arc<str>,
notebook_uri: Option<Arc<Uri>>,
) -> Self {
let line_index = Arc::new(LineIndex::new(&text));
let fs_version_on_open = uri_to_file_path(&uri)
.ok()
.and_then(calculate_fs_version_at_path);
OpenDocument {
uri: Arc::new(uri),
text,
line_index,
version,
language_id,
notebook_uri,
fs_version_on_open,
}
}
fn with_change(
&self,
version: i32,
changes: Vec<lsp::TextDocumentContentChangeEvent>,
) -> Result<Self, AnyError> {
let mut text = self.text.to_string();
let mut line_index = self.line_index.clone();
let mut index_valid = IndexValid::All;
for change in changes {
if let Some(range) = change.range {
if !index_valid.covers(range.start.line) {
line_index = Arc::new(LineIndex::new(&text));
}
index_valid = IndexValid::UpTo(range.start.line);
let range = line_index.get_text_range(range)?;
text.replace_range(Range::<usize>::from(range), &change.text);
} else {
text = change.text;
index_valid = IndexValid::UpTo(0);
}
}
let text: Arc<str> = text.into();
let line_index = if index_valid == IndexValid::All {
line_index
} else {
Arc::new(LineIndex::new(&text))
};
Ok(OpenDocument {
uri: self.uri.clone(),
text,
line_index,
version,
language_id: self.language_id,
notebook_uri: self.notebook_uri.clone(),
fs_version_on_open: self.fs_version_on_open.clone(),
})
}
pub fn is_diagnosable(&self) -> bool {
self.language_id.is_diagnosable()
}
pub fn is_file_like(&self) -> bool {
uri_is_file_like(&self.uri)
}
pub fn script_version(&self) -> String {
let fs_version = self.fs_version_on_open.as_deref().unwrap_or("1");
format!("{fs_version}+{}", self.version)
}
}
fn remote_url_to_uri(url: &Url) -> Option<Uri> {
if !matches!(url.scheme(), "http" | "https") {
return None;
}
let mut string = String::with_capacity(url.as_str().len() + 6);
string.push_str("deno:/");
string.push_str(url.scheme());
for p in url[Position::BeforeHost..].split('/') {
string.push('/');
string.push_str(
&percent_encoding::utf8_percent_encode(p, COMPONENT).to_string(),
);
}
Uri::from_str(&string)
.inspect_err(|err| {
lsp_warn!("Couldn't convert remote URL \"{url}\" to URI: {err}")
})
.ok()
}
fn asset_url_to_uri(url: &Url) -> Option<Uri> {
if url.scheme() != "asset" {
return None;
}
Uri::from_str(&format!("deno:/asset{}", url.path()))
.inspect_err(|err| {
lsp_warn!("Couldn't convert asset URL \"{url}\" to URI: {err}")
})
.ok()
}
fn data_url_to_uri(url: &Url) -> Option<Uri> {
let data_url = deno_media_type::data_url::RawDataUrl::parse(url).ok()?;
let media_type = data_url.media_type();
let extension = if media_type == MediaType::Unknown {
""
} else {
media_type.as_ts_extension()
};
let mut file_name_str = url.path().to_string();
if let Some(query) = url.query() {
file_name_str.push('?');
file_name_str.push_str(query);
}
let hash = deno_lib::util::checksum::r#gen(&[file_name_str.as_bytes()]);
Uri::from_str(&format!("deno:/data_url/{hash}{extension}",))
.inspect_err(|err| {
lsp_warn!("Couldn't convert data url \"{url}\" to URI: {err}")
})
.ok()
}
#[derive(Debug, Clone)]
pub enum DocumentText {
Static(&'static str),
Arc(Arc<str>),
}
impl DocumentText {
/// Will clone the string if static.
pub fn to_arc(&self) -> Arc<str> {
match self {
Self::Static(s) => (*s).into(),
Self::Arc(s) => s.clone(),
}
}
}
impl std::ops::Deref for DocumentText {
type Target = str;
fn deref(&self) -> &Self::Target {
match self {
Self::Static(s) => s,
Self::Arc(s) => s,
}
}
}
impl Serialize for DocumentText {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
(self as &str).serialize(serializer)
}
}
#[derive(Debug, Clone)]
pub enum ServerDocumentKind {
Fs {
fs_version: String,
text: Arc<str>,
},
RemoteUrl {
url: Arc<Url>,
fs_cache_version: String,
text: Arc<str>,
},
DataUrl {
url: Arc<Url>,
text: Arc<str>,
},
Asset {
url: Arc<Url>,
text: &'static str,
},
}
#[derive(Debug)]
pub struct ServerDocument {
pub uri: Arc<Uri>,
pub media_type: MediaType,
pub line_index: Arc<LineIndex>,
pub kind: ServerDocumentKind,
}
impl ServerDocument {
fn load(uri: &Uri) -> Option<Self> {
let scheme = uri.scheme()?;
if scheme.eq_lowercase("file") {
let url = uri_to_url(uri);
let path = url_to_file_path(&url).ok()?;
let bytes = fs::read(&path).ok()?;
let media_type = MediaType::from_specifier(&url);
let text: Arc<str> =
bytes_to_content(&url, media_type, bytes, None).ok()?.into();
let fs_version = calculate_fs_version_at_path(&path)?;
let line_index = Arc::new(LineIndex::new(&text));
return Some(Self {
uri: Arc::new(uri.clone()),
media_type,
line_index,
kind: ServerDocumentKind::Fs { fs_version, text },
});
}
None
}
fn remote_url(
uri: &Uri,
url: Arc<Url>,
scope: Option<&Url>,
cache: &LspCache,
) -> Option<Self> {
let media_type = MediaType::from_specifier(&url);
let http_cache = cache.for_specifier(scope);
let cache_key = http_cache.cache_item_key(&url).ok()?;
let cache_entry = http_cache.get(&cache_key, None).ok()??;
let (_, maybe_charset) =
deno_graph::source::resolve_media_type_and_charset_from_headers(
&url,
Some(&cache_entry.metadata.headers),
);
let fs_cache_version = (|| {
let modified = http_cache.read_modified_time(&cache_key).ok()??;
let duration = modified.duration_since(SystemTime::UNIX_EPOCH).ok()?;
Some(duration.as_millis().to_string())
})()
.unwrap_or_else(|| "1".to_string());
let text: Arc<str> = bytes_to_content(
&url,
media_type,
cache_entry.content.into_owned(),
maybe_charset,
)
.ok()?
.into();
let line_index = Arc::new(LineIndex::new(&text));
Some(Self {
uri: Arc::new(uri.clone()),
media_type,
line_index,
kind: ServerDocumentKind::RemoteUrl {
url,
fs_cache_version,
text,
},
})
}
fn asset(name: &str, text: &'static str) -> Self {
let url = Arc::new(Url::parse(&format!("asset:///{name}")).unwrap());
let uri = asset_url_to_uri(&url).unwrap();
let media_type = MediaType::from_specifier(&url);
let line_index = Arc::new(LineIndex::new(text));
Self {
uri: Arc::new(uri),
media_type,
line_index,
kind: ServerDocumentKind::Asset { url, text },
}
}
fn data_url(uri: &Uri, url: Arc<Url>) -> Option<Self> {
let raw_data_url =
deno_media_type::data_url::RawDataUrl::parse(&url).ok()?;
let media_type = raw_data_url.media_type();
let text: Arc<str> = raw_data_url.decode().ok()?.into();
let line_index = Arc::new(LineIndex::new(&text));
Some(Self {
uri: Arc::new(uri.clone()),
media_type,
line_index,
kind: ServerDocumentKind::DataUrl { url, text },
})
}
pub fn text(&self) -> DocumentText {
match &self.kind {
ServerDocumentKind::Fs { text, .. } => DocumentText::Arc(text.clone()),
ServerDocumentKind::RemoteUrl { text, .. } => {
DocumentText::Arc(text.clone())
}
ServerDocumentKind::DataUrl { text, .. } => {
DocumentText::Arc(text.clone())
}
ServerDocumentKind::Asset { text, .. } => DocumentText::Static(text),
}
}
pub fn is_diagnosable(&self) -> bool {
media_type_is_diagnosable(self.media_type)
}
pub fn is_file_like(&self) -> bool {
uri_is_file_like(&self.uri)
}
pub fn script_version(&self) -> String {
match &self.kind {
ServerDocumentKind::Fs { fs_version, .. } => fs_version.clone(),
ServerDocumentKind::RemoteUrl {
fs_cache_version, ..
} => fs_cache_version.clone(),
ServerDocumentKind::DataUrl { .. } => "1".to_string(),
ServerDocumentKind::Asset { .. } => "1".to_string(),
}
}
}
#[derive(Debug)]
pub struct AssetDocuments {
inner: HashMap<Arc<Uri>, Arc<ServerDocument>>,
}
impl AssetDocuments {
pub fn get(&self, k: &Uri) -> Option<&Arc<ServerDocument>> {
self.inner.get(k)
}
}
pub static ASSET_DOCUMENTS: Lazy<AssetDocuments> =
Lazy::new(|| AssetDocuments {
inner: crate::tsc::LAZILY_LOADED_STATIC_ASSETS
.iter()
.map(|(k, v)| {
let doc = Arc::new(ServerDocument::asset(k, v.source.as_str()));
let uri = doc.uri.clone();
(uri, doc)
})
.collect(),
});
#[derive(Debug, Clone)]
pub enum Document {
Open(Arc<OpenDocument>),
Server(Arc<ServerDocument>),
}
impl Document {
pub fn open(&self) -> Option<&Arc<OpenDocument>> {
match self {
Self::Open(d) => Some(d),
Self::Server(_) => None,
}
}
pub fn server(&self) -> Option<&Arc<ServerDocument>> {
match self {
Self::Open(_) => None,
Self::Server(d) => Some(d),
}
}
pub fn uri(&self) -> &Arc<Uri> {
match self {
Self::Open(d) => &d.uri,
Self::Server(d) => &d.uri,
}
}
pub fn text(&self) -> DocumentText {
match self {
Self::Open(d) => DocumentText::Arc(d.text.clone()),
Self::Server(d) => d.text(),
}
}
pub fn line_index(&self) -> &Arc<LineIndex> {
match self {
Self::Open(d) => &d.line_index,
Self::Server(d) => &d.line_index,
}
}
pub fn script_version(&self) -> String {
match self {
Self::Open(d) => d.script_version(),
Self::Server(d) => d.script_version(),
}
}
pub fn is_diagnosable(&self) -> bool {
match self {
Self::Open(d) => d.is_diagnosable(),
Self::Server(d) => d.is_diagnosable(),
}
}
pub fn is_file_like(&self) -> bool {
match self {
Self::Open(d) => d.is_file_like(),
Self::Server(d) => d.is_file_like(),
}
}
}
#[derive(Debug, Default, Clone)]
pub struct Documents {
open: IndexMap<Uri, Arc<OpenDocument>>,
server: Arc<DashMap<Uri, Arc<ServerDocument>>>,
cells_by_notebook_uri: BTreeMap<Arc<Uri>, Vec<Arc<Uri>>>,
file_like_uris_by_url: Arc<DashMap<Url, Arc<Uri>>>,
/// These URLs can not be recovered from the URIs we assign them without these
/// maps. We want to be able to discard old documents from here but keep these
/// mappings.
data_urls_by_uri: Arc<DashMap<Uri, Arc<Url>>>,
remote_urls_by_uri: Arc<DashMap<Uri, Arc<Url>>>,
}
impl Documents {
fn open(
&mut self,
uri: Uri,
version: i32,
language_id: LanguageId,
text: Arc<str>,
notebook_uri: Option<Arc<Uri>>,
) -> Arc<OpenDocument> {
let uri = normalize_uri(&uri);
self.server.remove(&uri);
let doc = Arc::new(OpenDocument::new(
uri.as_ref().clone(),
version,
language_id,
text,
notebook_uri,
));
self.open.insert(uri.into_owned(), doc.clone());
if !doc.uri.scheme().is_some_and(|s| s.eq_lowercase("file")) {
let url = uri_to_url(&doc.uri);
if url.scheme() == "file" {
self.file_like_uris_by_url.insert(url, doc.uri.clone());
}
}
doc
}
fn change(
&mut self,
uri: &Uri,
version: i32,
changes: Vec<lsp::TextDocumentContentChangeEvent>,
) -> Result<Arc<OpenDocument>, AnyError> {
let uri = normalize_uri(uri);
let Some((uri, doc)) = self.open.shift_remove_entry(uri.as_ref()) else {
return Err(
JsErrorBox::new(
"NotFound",
format!(
"The URI \"{}\" does not refer to an open document.",
uri.as_str()
),
)
.into(),
);
};
let doc = Arc::new(doc.with_change(version, changes)?);
self.open.insert(uri, doc.clone());
Ok(doc)
}
fn close(&mut self, uri: &Uri) -> Result<Arc<OpenDocument>, AnyError> {
let uri = normalize_uri(uri);
self
.file_like_uris_by_url
.retain(|_, u| u.as_ref() != uri.as_ref());
let doc = self.open.shift_remove(uri.as_ref()).ok_or_else(|| {
JsErrorBox::new(
"NotFound",
format!(
"The URI \"{}\" does not refer to an open document.",
uri.as_str()
),
)
})?;
Ok(doc)
}
fn open_notebook(
&mut self,
uri: Uri,
cells: Vec<lsp::TextDocumentItem>,
) -> Vec<Arc<OpenDocument>> {
let uri = Arc::new(normalize_uri(&uri).into_owned());
let mut documents = Vec::with_capacity(cells.len());
for cell in cells {
let language_id = cell.language_id.parse().unwrap_or_else(|err| {
lsp_warn!("{:#}", err);
LanguageId::Unknown
});
if language_id == LanguageId::Unknown {
lsp_warn!(
"Unsupported language id \"{}\" received for document \"{}\".",
cell.language_id,
cell.uri.as_str()
);
}
let document = self.open(
cell.uri.clone(),
cell.version,
language_id,
cell.text.into(),
Some(uri.clone()),
);
documents.push(document);
}
self
.cells_by_notebook_uri
.insert(uri, documents.iter().map(|d| d.uri.clone()).collect());
documents
}
pub fn change_notebook(
&mut self,
uri: &Uri,
structure: Option<lsp::NotebookDocumentCellChangeStructure>,
content: Option<Vec<lsp::NotebookDocumentChangeTextContent>>,
) -> Vec<(Arc<OpenDocument>, ChangeKind)> {
let uri = Arc::new(normalize_uri(uri).into_owned());
let mut documents_with_change_kinds = Vec::new();
if let Some(structure) = structure {
if let Some(cells) = self.cells_by_notebook_uri.get_mut(&uri) {
cells.splice(
structure.array.start as usize
..(structure.array.start + structure.array.delete_count) as usize,
structure
.array
.cells
.into_iter()
.flatten()
.map(|c| Arc::new(normalize_uri(&c.document).into_owned())),
);
}
for closed in structure.did_close.into_iter().flatten() {
let document = match self.close(&closed.uri) {
Ok(d) => d,
Err(err) => {
lsp_warn!("{:#}", err);
continue;
}
};
documents_with_change_kinds.push((document, ChangeKind::Closed));
}
for opened in structure.did_open.into_iter().flatten() {
let language_id = opened.language_id.parse().unwrap_or_else(|err| {
lsp_warn!("{:#}", err);
LanguageId::Unknown
});
if language_id == LanguageId::Unknown {
lsp_warn!(
"Unsupported language id \"{}\" received for document \"{}\".",
opened.language_id,
opened.uri.as_str()
);
}
let document = self.open(
opened.uri,
opened.version,
language_id,
opened.text.into(),
Some(uri.clone()),
);
documents_with_change_kinds.push((document, ChangeKind::Opened));
}
}
for changed in content.into_iter().flatten() {
let document = match self.change(
&changed.document.uri,
changed.document.version,
changed.changes,
) {
Ok(d) => d,
Err(err) => {
lsp_warn!("{:#}", err);
continue;
}
};
documents_with_change_kinds.push((document, ChangeKind::Modified));
}
documents_with_change_kinds
}
pub fn close_notebook(&mut self, uri: &Uri) -> Vec<Arc<OpenDocument>> {
let uri = normalize_uri(uri);
let Some(cell_uris) = self.cells_by_notebook_uri.remove(uri.as_ref())
else {
lsp_warn!(
"The URI \"{}\" does not refer to an open notebook document.",
uri.as_str(),
);
return Default::default();
};
let mut documents = Vec::with_capacity(cell_uris.len());
for cell_uri in cell_uris {
let document = match self.close(&cell_uri) {
Ok(d) => d,
Err(err) => {
lsp_warn!("{:#}", err);
continue;
}
};
documents.push(document);
}
documents
}
pub fn get(&self, uri: &Uri) -> Option<Document> {
let uri = normalize_uri(uri);
if let Some(doc) = self.open.get(uri.as_ref()) {
return Some(Document::Open(doc.clone()));
}
if let Some(doc) = ASSET_DOCUMENTS.get(&uri) {
return Some(Document::Server(doc.clone()));
}
if let Some(doc) = self.server.get(&uri) {
return Some(Document::Server(doc.clone()));
}
let doc = match ServerDocument::load(&uri) {
Some(doc) => doc,
_ => match self.data_urls_by_uri.get(&uri) {
Some(data_url) => {
ServerDocument::data_url(&uri, data_url.value().clone())?
}
_ => {
return None;
}
},
};
let doc = Arc::new(doc);
self.server.insert(uri.into_owned(), doc.clone());
Some(Document::Server(doc))
}
/// This will not create any server entries, only retrieve existing entries.
pub fn inspect(&self, uri: &Uri) -> Option<Document> {
let uri = normalize_uri(uri);
if let Some(doc) = self.open.get(uri.as_ref()) {
return Some(Document::Open(doc.clone()));
}
if let Some(doc) = self.server.get(&uri) {
return Some(Document::Server(doc.clone()));
}
None
}
pub fn get_for_specifier(
&self,
specifier: &Url,
scope: Option<&Url>,
cache: &LspCache,
) -> Option<Document> {
let scheme = specifier.scheme();
if scheme == "file" {
let uri = self
.file_like_uris_by_url
.get(specifier)
.map(|e| e.value().clone())
.or_else(|| url_to_uri(specifier).ok().map(Arc::new))?;
self.get(&uri)
} else if scheme == "asset" {
let uri = asset_url_to_uri(specifier)?;
self.get(&uri)
} else if scheme == "http" || scheme == "https" {
if let Some(vendored_specifier) =
cache.vendored_specifier(specifier, scope)
{
let uri = url_to_uri(&vendored_specifier).ok()?;
self.get(&uri)
} else {
let uri = remote_url_to_uri(specifier)?;
if let Some(doc) = self.server.get(&uri) {
return Some(Document::Server(doc.clone()));
}
let url = Arc::new(specifier.clone());
self.remote_urls_by_uri.insert(uri.clone(), url.clone());
let doc =
Arc::new(ServerDocument::remote_url(&uri, url, scope, cache)?);
self.server.insert(uri, doc.clone());
Some(Document::Server(doc))
}
} else if scheme == "data" {
let uri = data_url_to_uri(specifier)?;
if let Some(doc) = self.server.get(&uri) {
return Some(Document::Server(doc.clone()));
}
let url = Arc::new(specifier.clone());
self.data_urls_by_uri.insert(uri.clone(), url.clone());
let doc = Arc::new(ServerDocument::data_url(&uri, url)?);
self.server.insert(uri, doc.clone());
Some(Document::Server(doc))
} else {
None
}
}
pub fn cells_by_notebook_uri(&self) -> &BTreeMap<Arc<Uri>, Vec<Arc<Uri>>> {
&self.cells_by_notebook_uri
}
pub fn open_docs(&self) -> impl Iterator<Item = &Arc<OpenDocument>> {
self.open.values()
}
pub fn server_docs(&self) -> Vec<Arc<ServerDocument>> {
self.server.iter().map(|e| e.value().clone()).collect()
}
pub fn docs(&self) -> Vec<Document> {
self
.open
.values()
.map(|d| Document::Open(d.clone()))
.chain(
self
.server
.iter()
.map(|e| Document::Server(e.value().clone())),
)
.collect()
}
pub fn filtered_docs(
&self,
predicate: impl FnMut(&Document) -> bool,
) -> Vec<Document> {
self
.open
.values()
.map(|d| Document::Open(d.clone()))
.chain(
self
.server
.iter()
.map(|e| Document::Server(e.value().clone())),
)
.filter(predicate)
.collect()
}
pub fn remove_server_doc(&self, uri: &Uri) {
self.server.remove(uri);
}
}
#[derive(Debug)]
pub struct DocumentModuleOpenData {
pub version: i32,
pub parsed_source: Option<ParsedSourceResult>,
}
#[derive(Debug)]
pub struct DocumentModule {
pub uri: Arc<Uri>,
pub open_data: Option<DocumentModuleOpenData>,
pub notebook_uri: Option<Arc<Uri>>,
pub script_version: String,
pub specifier: Arc<Url>,
pub scope: Option<Arc<Url>>,
pub compiler_options_key: CompilerOptionsKey,
pub media_type: MediaType,
pub headers: Option<HashMap<String, String>>,
pub text: DocumentText,
pub line_index: Arc<LineIndex>,
pub resolution_mode: ResolutionMode,
pub dependencies: Arc<IndexMap<String, deno_graph::Dependency>>,
pub types_dependency: Option<Arc<TypesDependency>>,
pub navigation_tree: tokio::sync::OnceCell<Arc<NavigationTree>>,
pub semantic_tokens_full: tokio::sync::OnceCell<lsp::SemanticTokens>,
text_info_cell: once_cell::sync::OnceCell<SourceTextInfo>,
test_module_fut: Option<TestModuleFut>,
}
impl DocumentModule {
#[allow(clippy::too_many_arguments)]
pub fn new(
document: &Document,
specifier: Arc<Url>,
compiler_options_key: CompilerOptionsKey,
compiler_options_data: &LspCompilerOptionsData,
scope: Option<Arc<Url>>,
resolver: &LspResolver,
config: &Config,
cache: &LspCache,
) -> Self {
let text = document.text();
let headers = matches!(specifier.scheme(), "http" | "https")
.then(|| {
let http_cache = cache.for_specifier(scope.as_deref());
let cache_key = http_cache.cache_item_key(&specifier).ok()?;
let cache_entry = http_cache.get(&cache_key, None).ok()??;
Some(cache_entry.metadata.headers)
})
.flatten();
let open_document = document.open();
let media_type = resolve_media_type(
&specifier,
headers.as_ref(),
open_document.map(|d| d.language_id),
);
let (parsed_source, maybe_module, resolution_mode) =
if media_type_is_diagnosable(media_type) {
parse_and_analyze_module(
specifier.as_ref().clone(),
text.to_arc(),
headers.as_ref(),
media_type,
scope.as_deref(),
compiler_options_data,
resolver,
)
} else {
(None, None, ResolutionMode::Import)
};
let maybe_module = maybe_module.and_then(Result::ok);
let dependencies = maybe_module
.as_ref()
.map(|m| Arc::new(m.dependencies.clone()))
.unwrap_or_default();
let types_dependency = maybe_module
.as_ref()
.and_then(|m| Some(Arc::new(m.maybe_types_dependency.clone()?)));
let test_module_fut =
get_maybe_test_module_fut(parsed_source.as_ref(), config);
DocumentModule {
uri: document.uri().clone(),
open_data: open_document.map(|d| DocumentModuleOpenData {
version: d.version,
parsed_source,
}),
notebook_uri: open_document.and_then(|d| d.notebook_uri.clone()),
script_version: document.script_version(),
specifier,
scope,
compiler_options_key,
media_type,
headers,
text,
line_index: document.line_index().clone(),
resolution_mode,
dependencies,
types_dependency,
navigation_tree: Default::default(),
semantic_tokens_full: Default::default(),
text_info_cell: Default::default(),
test_module_fut,
}
}
pub fn is_diagnosable(&self) -> bool {
media_type_is_diagnosable(self.media_type)
}
pub fn dependency_at_position(
&self,
position: &lsp::Position,
) -> Option<(&str, &deno_graph::Dependency, &deno_graph::Range)> {
let position = deno_graph::Position {
line: position.line as usize,
character: position.character as usize,
};
self
.dependencies
.iter()
.find_map(|(s, dep)| dep.includes(position).map(|r| (s.as_str(), dep, r)))
}
pub fn text_info(&self) -> &SourceTextInfo {
// try to get the text info from the parsed source and if
// not then create one in the cell
self
.open_data
.as_ref()
.and_then(|d| d.parsed_source.as_ref())
.and_then(|p| p.as_ref().ok())
.map(|p| p.text_info_lazy())
.unwrap_or_else(|| {
self
.text_info_cell
.get_or_init(|| SourceTextInfo::new(self.text.to_arc()))
})
}
pub async fn test_module(&self) -> Option<Arc<TestModule>> {
self.test_module_fut.clone()?.await
}
}
type DepInfoByScope = BTreeMap<Option<Arc<Url>>, Arc<ScopeDepInfo>>;
#[derive(Debug, Default)]
struct WeakDocumentModuleMap {
open: RwLock<PtrWeakKeyHashMap<Weak<OpenDocument>, Arc<DocumentModule>>>,
server: RwLock<PtrWeakKeyHashMap<Weak<ServerDocument>, Arc<DocumentModule>>>,
by_specifier: RwLock<WeakValueHashMap<Arc<Url>, Weak<DocumentModule>>>,
}
impl WeakDocumentModuleMap {
fn get(&self, document: &Document) -> Option<Arc<DocumentModule>> {
match document {
Document::Open(d) => self.open.read().get(d).cloned(),
Document::Server(d) => self.server.read().get(d).cloned(),
}
}
fn contains_specifier(&self, specifier: &Url) -> bool {
self.by_specifier.read().contains_key(specifier)
}
fn inspect_values(&self) -> Vec<Arc<DocumentModule>> {
self
.open
.read()
.values()
.cloned()
.chain(self.server.read().values().cloned())
.collect()
}
fn insert(
&self,
document: &Document,
module: Arc<DocumentModule>,
) -> Option<Arc<DocumentModule>> {
match document {
Document::Open(d) => {
self.open.write().insert(d.clone(), module.clone());
}
Document::Server(d) => {
self.server.write().insert(d.clone(), module.clone());
}
}
self
.by_specifier
.write()
.insert(module.specifier.clone(), module.clone());
Some(module)
}
fn remove_expired(&self) {
// IMPORTANT: Maintain this order based on weak ref relations.
self.open.write().remove_expired();
self.server.write().remove_expired();
self.by_specifier.write().remove_expired();
}
}
type ScopeInfo = (Option<Arc<Url>>, CompilerOptionsKey);
#[derive(Debug, Default, Clone)]
pub struct DocumentModules {
pub documents: Documents,
config: Arc<Config>,
compiler_options_resolver: Arc<LspCompilerOptionsResolver>,
resolver: Arc<LspResolver>,
cache: Arc<LspCache>,
workspace_files: Arc<IndexSet<PathBuf>>,
dep_info_by_scope: once_cell::sync::OnceCell<Arc<DepInfoByScope>>,
modules_unscoped: Arc<WeakDocumentModuleMap>,
modules_by_scope: Arc<BTreeMap<Arc<Url>, Arc<WeakDocumentModuleMap>>>,
assigned_scopes: Arc<DashMap<Arc<Uri>, ScopeInfo>>,
}
impl DocumentModules {
pub fn update_config(
&mut self,
config: &Config,
compiler_options_resolver: &Arc<LspCompilerOptionsResolver>,
resolver: &Arc<LspResolver>,
cache: &LspCache,
workspace_files: &Arc<IndexSet<PathBuf>>,
) {
self.config = Arc::new(config.clone());
self.compiler_options_resolver = compiler_options_resolver.clone();
self.cache = Arc::new(cache.clone());
self.resolver = resolver.clone();
self.workspace_files = workspace_files.clone();
self.modules_unscoped = Default::default();
self.modules_by_scope = Arc::new(
self
.config
.tree
.data_by_scope()
.keys()
.map(|s| (s.clone(), Default::default()))
.collect(),
);
self.dep_info_by_scope = Default::default();
self.assigned_scopes = Default::default();
node_resolver::PackageJsonThreadLocalCache::clear();
NodeResolutionThreadLocalCache::clear();
// Clean up non-existent documents.
self.documents.server.retain(|_, d| {
let Some(module) =
self.inspect_primary_module(&Document::Server(d.clone()))
else {
return false;
};
let Ok(path) = url_to_file_path(&module.specifier) else {
// Remove non-file schemed docs (deps). They may not be dependencies
// anymore after updating resolvers.
return false;
};
if !config.specifier_enabled(&module.specifier) {
return false;
}
path.is_file()
});
}
pub fn open_document(
&mut self,
uri: Uri,
version: i32,
language_id: LanguageId,
text: Arc<str>,
notebook_uri: Option<Arc<Uri>>,
) -> Arc<OpenDocument> {
self.dep_info_by_scope = Default::default();
self
.documents
.open(uri, version, language_id, text, notebook_uri)
}
pub fn change_document(
&mut self,
uri: &Uri,
version: i32,
changes: Vec<lsp::TextDocumentContentChangeEvent>,
) -> Result<Arc<OpenDocument>, AnyError> {
self.dep_info_by_scope = Default::default();
let document = self.documents.change(uri, version, changes)?;
Ok(document)
}
/// Returns if the document is diagnosable.
pub fn close_document(
&mut self,
uri: &Uri,
) -> Result<Arc<OpenDocument>, AnyError> {
self.dep_info_by_scope = Default::default();
let document = self.documents.close(uri)?;
// If applicable, try to load the closed document as a server document so
// it's still included as a ts root etc..
if uri.scheme().is_some_and(|s| s.eq_lowercase("file"))
&& self.config.uri_enabled(uri)
{
self.documents.get(uri);
}
Ok(document)
}
pub fn open_notebook_document(
&mut self,
uri: Uri,
cells: Vec<lsp::TextDocumentItem>,
) -> Vec<Arc<OpenDocument>> {
self.dep_info_by_scope = Default::default();
self.documents.open_notebook(uri, cells)
}
pub fn change_notebook_document(
&mut self,
uri: &Uri,
structure: Option<lsp::NotebookDocumentCellChangeStructure>,
content: Option<Vec<lsp::NotebookDocumentChangeTextContent>>,
) -> Vec<(Arc<OpenDocument>, ChangeKind)> {
self.dep_info_by_scope = Default::default();
self.documents.change_notebook(uri, structure, content)
}
pub fn close_notebook_document(
&mut self,
uri: &Uri,
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/repl.rs | cli/lsp/repl.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap;
use deno_ast::LineAndColumnIndex;
use deno_ast::ModuleSpecifier;
use deno_ast::SourceTextInfo;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::serde_json;
use lsp_types::Uri;
use tokio_util::sync::CancellationToken;
use tower_lsp::LanguageServer;
use tower_lsp::lsp_types::ClientCapabilities;
use tower_lsp::lsp_types::ClientInfo;
use tower_lsp::lsp_types::CompletionContext;
use tower_lsp::lsp_types::CompletionParams;
use tower_lsp::lsp_types::CompletionResponse;
use tower_lsp::lsp_types::CompletionTextEdit;
use tower_lsp::lsp_types::CompletionTriggerKind;
use tower_lsp::lsp_types::DidChangeTextDocumentParams;
use tower_lsp::lsp_types::DidCloseTextDocumentParams;
use tower_lsp::lsp_types::DidOpenTextDocumentParams;
use tower_lsp::lsp_types::InitializeParams;
use tower_lsp::lsp_types::InitializedParams;
use tower_lsp::lsp_types::PartialResultParams;
use tower_lsp::lsp_types::Position;
use tower_lsp::lsp_types::Range;
use tower_lsp::lsp_types::TextDocumentContentChangeEvent;
use tower_lsp::lsp_types::TextDocumentIdentifier;
use tower_lsp::lsp_types::TextDocumentItem;
use tower_lsp::lsp_types::TextDocumentPositionParams;
use tower_lsp::lsp_types::VersionedTextDocumentIdentifier;
use tower_lsp::lsp_types::WorkDoneProgressParams;
use super::client::Client;
use super::config::ClassMemberSnippets;
use super::config::CompletionSettings;
use super::config::DenoCompletionSettings;
use super::config::ImportCompletionSettings;
use super::config::LanguageWorkspaceSettings;
use super::config::ObjectLiteralMethodSnippets;
use super::config::TestingSettings;
use super::config::WorkspaceSettings;
use super::urls::uri_parse_unencoded;
use super::urls::url_to_uri;
#[derive(Debug)]
pub struct ReplCompletionItem {
pub new_text: String,
pub range: std::ops::Range<usize>,
}
pub struct ReplLanguageServer {
language_server: super::language_server::LanguageServer,
document_version: i32,
document_text: String,
pending_text: String,
cwd_uri: ModuleSpecifier,
}
impl ReplLanguageServer {
pub async fn new_initialized() -> Result<ReplLanguageServer, AnyError> {
// downgrade info and warn lsp logging to debug
super::logging::set_lsp_log_level(log::Level::Debug);
super::logging::set_lsp_warn_level(log::Level::Debug);
let language_server =
super::language_server::LanguageServer::new(Client::new_for_repl());
let cwd_uri = get_cwd_uri()?;
#[allow(deprecated)]
language_server
.initialize(InitializeParams {
process_id: None,
root_path: None,
root_uri: Some(url_to_uri(&cwd_uri).unwrap()),
initialization_options: Some(
serde_json::to_value(get_repl_workspace_settings()).unwrap(),
),
capabilities: ClientCapabilities {
workspace: None,
text_document: None,
window: None,
general: None,
experimental: None,
offset_encoding: None,
notebook_document: None,
},
trace: None,
workspace_folders: None,
client_info: Some(ClientInfo {
name: "Deno REPL".to_string(),
version: None,
}),
locale: None,
work_done_progress_params: Default::default(),
})
.await?;
language_server.initialized(InitializedParams {}).await;
let server = ReplLanguageServer {
language_server,
document_version: 0,
document_text: String::new(),
pending_text: String::new(),
cwd_uri,
};
server.open_current_document().await;
Ok(server)
}
pub async fn commit_text(&mut self, line_text: &str) {
self.did_change(line_text).await;
self.document_text.push_str(&self.pending_text);
self.pending_text = String::new();
}
pub async fn completions(
&mut self,
line_text: &str,
position: usize,
token: CancellationToken,
) -> Vec<ReplCompletionItem> {
self.did_change(line_text).await;
let text_info = deno_ast::SourceTextInfo::from_string(format!(
"{}{}",
self.document_text, self.pending_text
));
let before_line_len = self.document_text.len();
let position = text_info.range().start + before_line_len + position;
let line_and_column = text_info.line_and_column_index(position);
let response = self
.language_server
.completion(
CompletionParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier {
uri: self.get_document_uri(),
},
position: Position {
line: line_and_column.line_index as u32,
character: line_and_column.column_index as u32,
},
},
work_done_progress_params: WorkDoneProgressParams {
work_done_token: None,
},
partial_result_params: PartialResultParams {
partial_result_token: None,
},
context: Some(CompletionContext {
trigger_kind: CompletionTriggerKind::INVOKED,
trigger_character: None,
}),
},
token,
)
.await
.ok()
.unwrap_or_default();
let mut items = match response {
Some(CompletionResponse::Array(items)) => items,
Some(CompletionResponse::List(list)) => list.items,
None => Vec::new(),
};
items.sort_by_key(|item| {
if let Some(sort_text) = &item.sort_text {
sort_text.clone()
} else {
item.label.clone()
}
});
items
.into_iter()
.filter_map(|item| {
item.text_edit.and_then(|edit| match edit {
CompletionTextEdit::Edit(edit) => Some(ReplCompletionItem {
new_text: edit.new_text,
range: lsp_range_to_std_range(&text_info, &edit.range),
}),
CompletionTextEdit::InsertAndReplace(_) => None,
})
})
.filter(|item| {
// filter the results to only exact matches
let text = &text_info.text_str()[item.range.clone()];
item.new_text.starts_with(text)
})
.map(|mut item| {
// convert back to a line position
item.range.start -= before_line_len;
item.range.end -= before_line_len;
item
})
.collect()
}
async fn did_change(&mut self, new_text: &str) {
self.check_cwd_change().await;
let new_text = if new_text.ends_with('\n') {
new_text.to_string()
} else {
format!("{new_text}\n")
};
self.document_version += 1;
let current_line_count =
self.document_text.chars().filter(|c| *c == '\n').count() as u32;
let pending_line_count =
self.pending_text.chars().filter(|c| *c == '\n').count() as u32;
self
.language_server
.did_change(DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier {
uri: self.get_document_uri(),
version: self.document_version,
},
content_changes: vec![TextDocumentContentChangeEvent {
range: Some(Range {
start: Position::new(current_line_count, 0),
end: Position::new(current_line_count + pending_line_count, 0),
}),
range_length: None,
text: new_text.to_string(),
}],
})
.await;
self.pending_text = new_text;
}
async fn check_cwd_change(&mut self) {
// handle if the cwd changes, if the cwd is deleted in the case of
// get_cwd_uri() erroring, then keep using it as the base
let cwd_uri = get_cwd_uri().unwrap_or_else(|_| self.cwd_uri.clone());
if self.cwd_uri != cwd_uri {
self
.language_server
.did_close(DidCloseTextDocumentParams {
text_document: TextDocumentIdentifier {
uri: self.get_document_uri(),
},
})
.await;
self.cwd_uri = cwd_uri;
self.document_version = 0;
self.open_current_document().await;
}
}
async fn open_current_document(&self) {
self
.language_server
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: self.get_document_uri(),
language_id: "typescript".to_string(),
version: self.document_version,
text: format!("{}{}", self.document_text, self.pending_text),
},
})
.await;
}
fn get_document_uri(&self) -> Uri {
uri_parse_unencoded(self.cwd_uri.join("$deno$repl.mts").unwrap().as_str())
.unwrap()
}
}
fn lsp_range_to_std_range(
text_info: &SourceTextInfo,
range: &Range,
) -> std::ops::Range<usize> {
let start_index = text_info
.loc_to_source_pos(LineAndColumnIndex {
line_index: range.start.line as usize,
column_index: range.start.character as usize,
})
.as_byte_index(text_info.range().start);
let end_index = text_info
.loc_to_source_pos(LineAndColumnIndex {
line_index: range.end.line as usize,
column_index: range.end.character as usize,
})
.as_byte_index(text_info.range().start);
start_index..end_index
}
fn get_cwd_uri() -> Result<ModuleSpecifier, AnyError> {
let cwd = std::env::current_dir()?;
ModuleSpecifier::from_directory_path(&cwd)
.map_err(|_| anyhow!("Could not get URI from {}", cwd.display()))
}
pub fn get_repl_workspace_settings() -> WorkspaceSettings {
WorkspaceSettings {
enable: Some(true),
disable_paths: vec![],
enable_paths: None,
config: None,
certificate_stores: None,
cache: None,
cache_on_save: false,
import_map: None,
code_lens: Default::default(),
internal_debug: false,
internal_inspect: Default::default(),
log_file: false,
lint: false,
document_preload_limit: 0, // don't pre-load any modules as it's expensive and not useful for the repl
tls_certificate: None,
unsafely_ignore_certificate_errors: None,
unstable: Default::default(),
suggest: DenoCompletionSettings {
imports: ImportCompletionSettings {
auto_discover: false,
hosts: HashMap::from([("https://deno.land".to_string(), true)]),
},
},
testing: TestingSettings { args: vec![] },
javascript: LanguageWorkspaceSettings {
suggest: CompletionSettings {
auto_imports: false,
class_member_snippets: ClassMemberSnippets { enabled: false },
complete_function_calls: false,
enabled: true,
include_automatic_optional_chain_completions: false,
include_completions_for_import_statements: true,
names: false,
object_literal_method_snippets: ObjectLiteralMethodSnippets {
enabled: false,
},
paths: false,
},
..Default::default()
},
typescript: LanguageWorkspaceSettings {
suggest: CompletionSettings {
auto_imports: false,
class_member_snippets: ClassMemberSnippets { enabled: false },
complete_function_calls: false,
enabled: true,
include_automatic_optional_chain_completions: false,
include_completions_for_import_statements: true,
names: false,
object_literal_method_snippets: ObjectLiteralMethodSnippets {
enabled: false,
},
paths: false,
},
..Default::default()
},
tracing: Default::default(),
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/registries.rs | cli/lsp/registries.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_core::ModuleSpecifier;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::serde::Deserialize;
use deno_core::serde_json;
use deno_core::serde_json::Value;
use deno_core::serde_json::json;
use deno_core::url::ParseError;
use deno_core::url::Position;
use deno_core::url::Url;
use deno_graph::Dependency;
use deno_resolver::file_fetcher::FetchOptions;
use deno_resolver::file_fetcher::FetchPermissionsOptionRef;
use deno_resolver::loader::MemoryFilesRc;
use log::error;
use once_cell::sync::Lazy;
use tower_lsp::lsp_types as lsp;
use super::completions::IMPORT_COMMIT_CHARS;
use super::logging::lsp_log;
use super::path_to_regex::Compiler;
use super::path_to_regex::Key;
use super::path_to_regex::MatchResult;
use super::path_to_regex::Matcher;
use super::path_to_regex::StringOrNumber;
use super::path_to_regex::StringOrVec;
use super::path_to_regex::Token;
use super::path_to_regex::parse;
use super::path_to_regex::string_to_regex;
use crate::cache::GlobalHttpCache;
use crate::cache::HttpCache;
use crate::file_fetcher::CliFileFetcher;
use crate::file_fetcher::CreateCliFileFetcherOptions;
use crate::file_fetcher::TextDecodedFile;
use crate::file_fetcher::create_cli_file_fetcher;
use crate::http_util::HttpClientProvider;
use crate::sys::CliSys;
const CONFIG_PATH: &str = "/.well-known/deno-import-intellisense.json";
const COMPONENT: &percent_encoding::AsciiSet = &percent_encoding::CONTROLS
.add(b' ')
.add(b'"')
.add(b'#')
.add(b'<')
.add(b'>')
.add(b'?')
.add(b'`')
.add(b'{')
.add(b'}')
.add(b'/')
.add(b':')
.add(b';')
.add(b'=')
.add(b'@')
.add(b'[')
.add(b'\\')
.add(b']')
.add(b'^')
.add(b'|')
.add(b'$')
.add(b'&')
.add(b'+')
.add(b',');
const REGISTRY_IMPORT_COMMIT_CHARS: &[&str] = &["\"", "'"];
static REPLACEMENT_VARIABLE_RE: Lazy<regex::Regex> =
lazy_regex::lazy_regex!(r"\$\{\{?(\w+)\}?\}");
fn base_url(url: &Url) -> String {
url.origin().ascii_serialization()
}
#[derive(Debug)]
enum CompletionType {
Literal(String),
Key {
key: Key,
prefix: Option<String>,
index: usize,
},
}
/// Determine if a completion at a given offset is a string literal or a key/
/// variable.
fn get_completion_type(
char_offset: usize,
tokens: &[Token],
match_result: &MatchResult,
) -> Option<CompletionType> {
let mut char_count = 0_usize;
for (index, token) in tokens.iter().enumerate() {
match token {
Token::String(s) => {
char_count += s.chars().count();
if char_offset < char_count {
return Some(CompletionType::Literal(s.clone()));
}
}
Token::Key(k) => {
if let Some(prefix) = &k.prefix {
char_count += prefix.chars().count();
if char_offset < char_count {
return Some(CompletionType::Key {
key: k.clone(),
prefix: Some(prefix.clone()),
index,
});
}
}
if char_offset < char_count {
return None;
}
if let StringOrNumber::String(name) = &k.name {
let value = match_result
.get(name)
.map(|s| s.to_string(Some(k), false))
.unwrap_or_default();
char_count += value.chars().count();
if char_offset <= char_count {
return Some(CompletionType::Key {
key: k.clone(),
prefix: None,
index,
});
}
}
if let Some(suffix) = &k.suffix {
char_count += suffix.chars().count();
if char_offset <= char_count {
return Some(CompletionType::Literal(suffix.clone()));
}
}
}
}
}
None
}
/// Generate a data value for a completion item that will instruct the client to
/// resolve the completion item to obtain further information, in this case, the
/// details/documentation endpoint for the item if it exists in the registry
/// configuration
fn get_data(
registry: &RegistryConfiguration,
base: &ModuleSpecifier,
variable: &Key,
value: &str,
) -> Option<Value> {
let url = registry.get_documentation_url_for_key(variable)?;
get_endpoint(url, base, variable, Some(value))
.ok()
.map(|specifier| json!({ "documentation": specifier }))
}
/// Generate a data value for a completion item that will instruct the client to
/// resolve the completion item to obtain further information, in this case, the
/// details/documentation endpoint for the item if it exists in the registry
/// configuration when there is a match result that should be interpolated
fn get_data_with_match(
registry: &RegistryConfiguration,
base: &ModuleSpecifier,
tokens: &[Token],
match_result: &MatchResult,
variable: &Key,
value: &str,
) -> Option<Value> {
let url = registry.get_documentation_url_for_key(variable)?;
get_endpoint_with_match(
variable,
url,
base,
tokens,
match_result,
Some(value),
)
.ok()
.map(|specifier| json!({ "documentation": specifier }))
}
/// Convert a single variable templated string into a fully qualified URL which
/// can be fetched to provide additional data.
fn get_endpoint(
url: &str,
base: &Url,
variable: &Key,
maybe_value: Option<&str>,
) -> Result<ModuleSpecifier, AnyError> {
let url = replace_variable(url, variable, maybe_value);
parse_url_with_base(&url, base)
}
/// Convert a templated URL string into a fully qualified URL which can be
/// fetched to provide additional data. If `maybe_value` is some, then the
/// variable will replaced in the template prior to other matched variables
/// being replaced, otherwise the supplied variable will be blanked out if
/// present in the template.
fn get_endpoint_with_match(
variable: &Key,
url: &str,
base: &Url,
tokens: &[Token],
match_result: &MatchResult,
maybe_value: Option<&str>,
) -> Result<ModuleSpecifier, AnyError> {
let mut url = url.to_string();
let has_value = maybe_value.is_some();
if has_value {
url = replace_variable(&url, variable, maybe_value);
}
for (key, value) in match_result.params.iter() {
if let StringOrNumber::String(name) = key {
let maybe_key = tokens.iter().find_map(|t| match t {
Token::Key(k) if k.name == *key => Some(k),
_ => None,
});
url =
url.replace(&format!("${{{name}}}"), &value.to_string(maybe_key, true));
url = url.replace(
&format!("${{{{{name}}}}}"),
&percent_encoding::percent_encode(
value.to_string(maybe_key, true).as_bytes(),
COMPONENT,
)
.to_string(),
);
}
}
if !has_value {
url = replace_variable(&url, variable, None);
}
parse_url_with_base(&url, base)
}
/// Based on the preselect response from the registry, determine if this item
/// should be preselected or not.
fn get_preselect(item: String, preselect: Option<String>) -> Option<bool> {
if Some(item) == preselect {
Some(true)
} else {
None
}
}
fn parse_replacement_variables<S: AsRef<str>>(s: S) -> Vec<String> {
REPLACEMENT_VARIABLE_RE
.captures_iter(s.as_ref())
.filter_map(|c| c.get(1).map(|m| m.as_str().to_string()))
.collect()
}
/// Attempt to parse a URL along with a base, where the base will be used if the
/// URL requires one.
fn parse_url_with_base(
url: &str,
base: &ModuleSpecifier,
) -> Result<ModuleSpecifier, AnyError> {
match Url::parse(url) {
Ok(url) => Ok(url),
Err(ParseError::RelativeUrlWithoutBase) => {
base.join(url).map_err(|err| err.into())
}
Err(err) => Err(err.into()),
}
}
/// Replaces a variable in a templated URL string with the supplied value or
/// "blank" it out if there is no value supplied.
fn replace_variable(
url: &str,
variable: &Key,
maybe_value: Option<&str>,
) -> String {
let url_str = url.to_string();
let value = maybe_value.unwrap_or("");
if let StringOrNumber::String(name) = &variable.name {
url_str
.replace(&format!("${{{name}}}"), value)
.replace(&format! {"${{{{{name}}}}}"}, value)
} else {
url_str
}
}
/// Validate a registry configuration JSON structure.
fn validate_config(config: &RegistryConfigurationJson) -> Result<(), AnyError> {
if config.version < 1 || config.version > 2 {
return Err(anyhow!(
"Invalid registry configuration. Expected version 1 or 2 got {}.",
config.version
));
}
for registry in &config.registries {
let (_, keys) = string_to_regex(®istry.schema, None)?;
let key_names: Vec<String> = keys
.map(|keys| {
keys
.iter()
.filter_map(|k| {
if let StringOrNumber::String(s) = &k.name {
Some(s.clone())
} else {
None
}
})
.collect()
})
.unwrap_or_default();
for key_name in &key_names {
if !registry
.variables
.iter()
.map(|var| var.key.to_owned())
.any(|x| x == *key_name)
{
return Err(anyhow!(
"Invalid registry configuration. Registry with schema \"{}\" is missing variable declaration for key \"{}\".",
registry.schema,
key_name
));
}
}
for variable in ®istry.variables {
let key_index = key_names.iter().position(|key| *key == variable.key);
let key_index = key_index.ok_or_else(||anyhow!("Invalid registry configuration. Registry with schema \"{}\" is missing a path parameter in schema for variable \"{}\".", registry.schema, variable.key))?;
let replacement_variables = parse_replacement_variables(&variable.url);
let limited_keys = key_names.get(0..key_index).unwrap();
for v in replacement_variables {
if variable.key == v && config.version == 1 {
return Err(anyhow!(
"Invalid registry configuration. Url \"{}\" (for variable \"{}\" in registry with schema \"{}\") uses variable \"{}\", which is not allowed because that would be a self reference.",
variable.url,
variable.key,
registry.schema,
v
));
}
let key_index = limited_keys.iter().position(|key| key == &v);
if key_index.is_none() && variable.key != v {
return Err(anyhow!(
"Invalid registry configuration. Url \"{}\" (for variable \"{}\" in registry with schema \"{}\") uses variable \"{}\", which is not allowed because the schema defines \"{}\" to the right of \"{}\".",
variable.url,
variable.key,
registry.schema,
v,
v,
variable.key
));
}
}
}
}
Ok(())
}
#[derive(Debug, Clone, Deserialize)]
pub struct RegistryConfigurationVariable {
/// The name of the variable.
key: String,
/// An optional URL/API endpoint that can provide optional documentation for a
/// completion item when requested by the language server.
documentation: Option<String>,
/// The URL with variable substitutions of the endpoint that will provide
/// completions for the variable.
url: String,
}
#[derive(Debug, Clone, Deserialize)]
pub struct RegistryConfiguration {
/// A Express-like path which describes how URLs are composed for a registry.
schema: String,
/// The variables denoted in the `schema` should have a variable entry.
variables: Vec<RegistryConfigurationVariable>,
}
impl RegistryConfiguration {
fn get_url_for_key(&self, key: &Key) -> Option<&str> {
self.variables.iter().find_map(|v| {
if key.name == StringOrNumber::String(v.key.clone()) {
Some(v.url.as_str())
} else {
None
}
})
}
fn get_documentation_url_for_key(&self, key: &Key) -> Option<&str> {
self.variables.iter().find_map(|v| {
if key.name == StringOrNumber::String(v.key.clone()) {
v.documentation.as_deref()
} else {
None
}
})
}
}
/// A structure that represents the configuration of an origin and its module
/// registries.
#[derive(Debug, Deserialize)]
struct RegistryConfigurationJson {
version: u32,
registries: Vec<RegistryConfiguration>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct VariableItemsList {
pub items: Vec<String>,
#[serde(default)]
pub is_incomplete: bool,
pub preselect: Option<String>,
}
#[derive(Debug, Deserialize)]
#[serde(untagged)]
enum VariableItems {
Simple(Vec<String>),
List(VariableItemsList),
}
/// A structure which holds the information about currently configured module
/// registries and can provide completion information for URLs that match
/// one of the enabled registries.
#[derive(Debug, Clone)]
pub struct ModuleRegistry {
origins: HashMap<String, Vec<RegistryConfiguration>>,
pub location: PathBuf,
pub file_fetcher: Arc<CliFileFetcher>,
http_cache: Arc<GlobalHttpCache>,
}
impl ModuleRegistry {
pub fn new(
location: PathBuf,
http_client_provider: Arc<HttpClientProvider>,
) -> Self {
// the http cache should always be the global one for registry completions
let http_cache =
Arc::new(GlobalHttpCache::new(CliSys::default(), location.clone()));
let file_fetcher = create_cli_file_fetcher(
Default::default(),
http_cache.clone().into(),
http_client_provider,
MemoryFilesRc::default(),
CliSys::default(),
CreateCliFileFetcherOptions {
allow_remote: true,
cache_setting: CacheSetting::RespectHeaders,
download_log_level: super::logging::lsp_log_level(),
progress_bar: None,
},
);
Self {
origins: HashMap::new(),
location,
file_fetcher: Arc::new(file_fetcher),
http_cache,
}
}
/// Disable a registry, removing its configuration, if any, from memory.
pub fn disable(&mut self, origin: &str) {
let Ok(origin_url) = Url::parse(origin) else {
return;
};
let origin = base_url(&origin_url);
self.origins.remove(&origin);
}
/// Check to see if the given origin has a registry configuration.
pub async fn check_origin(&self, origin: &str) -> Result<(), AnyError> {
let origin_url = Url::parse(origin)?;
let specifier = origin_url.join(CONFIG_PATH)?;
self.fetch_config(&specifier).await?;
Ok(())
}
/// Fetch and validate the specifier to a registry configuration, resolving
/// with the configuration if valid.
async fn fetch_config(
&self,
specifier: &ModuleSpecifier,
) -> Result<Vec<RegistryConfiguration>, AnyError> {
let fetch_result = self.file_fetcher
.fetch_with_options(
specifier,
FetchPermissionsOptionRef::AllowAll,
FetchOptions {
local: Default::default(),
maybe_auth: None,
maybe_accept: Some("application/vnd.deno.reg.v2+json, application/vnd.deno.reg.v1+json;q=0.9, application/json;q=0.8"),
maybe_cache_setting: None,
}
)
.await;
// if there is an error fetching, we will cache an empty file, so that
// subsequent requests they are just an empty doc which will error without
// needing to connect to the remote URL. We will cache it for 1 week.
if fetch_result.is_err() {
let mut headers_map = HashMap::new();
headers_map.insert(
"cache-control".to_string(),
"max-age=604800, immutable".to_string(),
);
self.http_cache.set(specifier, headers_map, &[])?;
}
let file = TextDecodedFile::decode(fetch_result?)?;
let config: RegistryConfigurationJson = serde_json::from_str(&file.source)?;
validate_config(&config)?;
Ok(config.registries)
}
/// Enable a registry by attempting to retrieve its configuration and
/// validating it.
pub async fn enable(&mut self, origin: &str) {
let Ok(origin_url) = Url::parse(origin) else {
return;
};
let origin = base_url(&origin_url);
#[allow(clippy::map_entry)]
// we can't use entry().or_insert_with() because we can't use async closures
if !self.origins.contains_key(&origin) {
let Ok(specifier) = origin_url.join(CONFIG_PATH) else {
return;
};
match self.fetch_config(&specifier).await {
Ok(configs) => {
self.origins.insert(origin, configs);
}
Err(err) => {
lsp_log!(
" Error fetching registry config for \"{}\": {}",
origin,
err.to_string()
);
self.origins.remove(&origin);
}
}
}
}
#[cfg(test)]
/// This is only used during testing, as it directly provides the full URL
/// for obtaining the registry configuration, versus "guessing" at it.
async fn enable_custom(&mut self, specifier: &str) -> Result<(), AnyError> {
let specifier = Url::parse(specifier)?;
let origin = base_url(&specifier);
#[allow(clippy::map_entry)]
if !self.origins.contains_key(&origin) {
let configs = self.fetch_config(&specifier).await?;
self.origins.insert(origin, configs);
}
Ok(())
}
pub async fn get_hover(&self, dependency: &Dependency) -> Option<String> {
let maybe_code = dependency.get_code();
let maybe_type = dependency.get_type();
let specifier = match (maybe_code, maybe_type) {
(Some(specifier), _) => Some(specifier),
(_, Some(specifier)) => Some(specifier),
_ => None,
}?;
let origin = base_url(specifier);
let registries = self.origins.get(&origin)?;
let path = &specifier[Position::BeforePath..];
for registry in registries {
let tokens = parse(®istry.schema, None).ok()?;
let matcher = Matcher::new(&tokens, None).ok()?;
if let Some(match_result) = matcher.matches(path) {
let key = if let Some(Token::Key(key)) = tokens.iter().last() {
Some(key)
} else {
None
}?;
let url = registry.get_documentation_url_for_key(key)?;
let endpoint = get_endpoint_with_match(
key,
url,
specifier,
&tokens,
&match_result,
None,
)
.ok()?;
let file_fetcher = self.file_fetcher.clone();
let file = {
let file = file_fetcher
.fetch_bypass_permissions(&endpoint)
.await
.ok()?;
TextDecodedFile::decode(file).ok()?
};
let documentation: lsp::Documentation =
serde_json::from_str(&file.source).ok()?;
return match documentation {
lsp::Documentation::String(doc) => Some(doc),
lsp::Documentation::MarkupContent(lsp::MarkupContent {
value,
..
}) => Some(value),
};
}
}
None
}
/// For a string specifier from the client, provide a set of completions, if
/// any, for the specifier.
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub async fn get_completions(
&self,
text: &str,
range: &lsp::Range,
resolved: Option<&ModuleSpecifier>,
specifier_exists: impl Fn(&ModuleSpecifier) -> bool,
) -> Option<lsp::CompletionList> {
let resolved = resolved
.map(Cow::Borrowed)
.or_else(|| ModuleSpecifier::parse(text).ok().map(Cow::Owned))?;
let resolved_str = resolved.as_str();
let origin = base_url(&resolved);
let origin_char_count = origin.chars().count();
let registries = self.origins.get(&origin)?;
let path = &resolved[Position::BeforePath..];
let path_char_offset = resolved_str.chars().count() - origin_char_count;
let mut completions = HashMap::<String, lsp::CompletionItem>::new();
let mut is_incomplete = false;
let mut did_match = false;
for registry in registries {
let tokens = parse(®istry.schema, None)
.map_err(|e| {
error!(
"Error parsing registry schema for origin \"{}\". {}",
origin, e
);
})
.ok()?;
let mut i = tokens.len();
let last_key_name = StringOrNumber::String(
tokens
.iter()
.last()
.map(|t| {
if let Token::Key(key) = t
&& let StringOrNumber::String(s) = &key.name
{
return s.clone();
}
"".to_string()
})
.unwrap_or_default(),
);
loop {
let matcher = Matcher::new(&tokens[..i], None)
.map_err(|e| {
error!(
"Error creating matcher for schema for origin \"{}\". {}",
origin, e
);
})
.ok()?;
if let Some(match_result) = matcher.matches(path) {
did_match = true;
let completion_type =
get_completion_type(path_char_offset, &tokens, &match_result);
match completion_type {
Some(CompletionType::Literal(s)) => {
let label = s;
let full_text = format!("{text}{label}");
let text_edit =
Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: full_text.clone(),
}));
let filter_text = Some(full_text);
completions.insert(
label.clone(),
lsp::CompletionItem {
label,
kind: Some(lsp::CompletionItemKind::FOLDER),
filter_text,
sort_text: Some("1".to_string()),
text_edit,
commit_characters: Some(
REGISTRY_IMPORT_COMMIT_CHARS
.iter()
.map(|&c| c.into())
.collect(),
),
..Default::default()
},
);
}
Some(CompletionType::Key { key, prefix, index }) => {
let maybe_url = registry.get_url_for_key(&key);
if let Some(url) = maybe_url
&& let Some(items) = self
.get_variable_items(
&key,
url,
&resolved,
&tokens,
&match_result,
)
.await
{
let compiler = Compiler::new(&tokens[..=index], None);
let base = Url::parse(&origin).ok()?;
let (items, preselect, incomplete) = match items {
VariableItems::List(list) => {
(list.items, list.preselect, list.is_incomplete)
}
VariableItems::Simple(items) => (items, None, false),
};
if incomplete {
is_incomplete = true;
}
for (idx, item) in items.into_iter().enumerate() {
let mut label = if let Some(p) = &prefix {
format!("{p}{item}")
} else {
item.clone()
};
if label.ends_with('/') {
label.pop();
}
let kind =
if key.name == last_key_name && !item.ends_with('/') {
Some(lsp::CompletionItemKind::FILE)
} else {
Some(lsp::CompletionItemKind::FOLDER)
};
let mut params = match_result.params.clone();
params.insert(
key.name.clone(),
StringOrVec::from_str(&item, &key),
);
let mut path = compiler.to_path(¶ms).unwrap_or_default();
if path.ends_with('/') {
path.pop();
}
let item_specifier = base.join(&path).ok()?;
let full_text = if let Some(suffix) =
item_specifier.as_str().strip_prefix(resolved_str)
{
format!("{text}{suffix}")
} else {
item_specifier.to_string()
};
let text_edit =
Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: full_text.to_string(),
}));
let command = if key.name == last_key_name
&& !item.ends_with('/')
&& !specifier_exists(&item_specifier)
{
Some(lsp::Command {
title: "".to_string(),
command: "deno.cache".to_string(),
arguments: Some(vec![
json!([item_specifier]),
json!(&resolved),
]),
})
} else {
None
};
let detail = Some(format!("({})", key.name));
let filter_text = Some(full_text.to_string());
let sort_text = Some(format!("{:0>10}", idx + 1));
let preselect =
get_preselect(item.clone(), preselect.clone());
let data = get_data_with_match(
registry,
&resolved,
&tokens,
&match_result,
&key,
&item,
);
let commit_characters = if is_incomplete {
Some(
REGISTRY_IMPORT_COMMIT_CHARS
.iter()
.map(|&c| c.into())
.collect(),
)
} else {
Some(
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
)
};
completions.insert(
item,
lsp::CompletionItem {
label,
kind,
detail,
sort_text,
filter_text,
text_edit,
command,
preselect,
data,
commit_characters,
..Default::default()
},
);
}
}
}
None => (),
}
break;
}
i -= 1;
// If we have fallen though to the first token, and we still
// didn't get a match
if i == 0 {
match &tokens[i] {
// so if the first token is a string literal, we will return
// that as a suggestion
Token::String(s) => {
if s.starts_with(path) {
let label = s.to_string();
let kind = Some(lsp::CompletionItemKind::FOLDER);
let mut url = resolved.as_ref().clone();
url.set_path(s);
let full_text = if let Some(suffix) =
url.as_str().strip_prefix(resolved_str)
{
format!("{text}{suffix}")
} else {
url.into()
};
let text_edit =
Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: full_text.to_string(),
}));
let filter_text = Some(full_text.to_string());
completions.insert(
s.to_string(),
lsp::CompletionItem {
label,
kind,
filter_text,
sort_text: Some("1".to_string()),
text_edit,
preselect: Some(true),
commit_characters: Some(
REGISTRY_IMPORT_COMMIT_CHARS
.iter()
.map(|&c| c.into())
.collect(),
),
..Default::default()
},
);
}
}
// if the token though is a key, and the key has a prefix, and
// the path matches the prefix, we will go and get the items
// for that first key and return them.
Token::Key(k) => {
if let Some(prefix) = &k.prefix {
let maybe_url = registry.get_url_for_key(k);
if let Some(url) = maybe_url
&& let Some(items) = self.get_items(url).await
{
let base = Url::parse(&origin).ok()?;
let (items, preselect, incomplete) = match items {
VariableItems::List(list) => {
(list.items, list.preselect, list.is_incomplete)
}
VariableItems::Simple(items) => (items, None, false),
};
if incomplete {
is_incomplete = true;
}
for (idx, item) in items.into_iter().enumerate() {
let path = format!("{prefix}{item}");
let kind = Some(lsp::CompletionItemKind::FOLDER);
let item_specifier = base.join(&path).ok()?;
let full_text = if let Some(suffix) =
item_specifier.as_str().strip_prefix(resolved_str)
{
format!("{text}{suffix}")
} else {
item_specifier.to_string()
};
let text_edit =
Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: full_text.clone(),
}));
let command = if k.name == last_key_name
&& !specifier_exists(&item_specifier)
{
Some(lsp::Command {
title: "".to_string(),
command: "deno.cache".to_string(),
arguments: Some(vec![
json!([item_specifier]),
json!(&resolved),
]),
})
} else {
None
};
let detail = Some(format!("({})", k.name));
let filter_text = Some(full_text.to_string());
let sort_text = Some(format!("{:0>10}", idx + 1));
let preselect =
get_preselect(item.clone(), preselect.clone());
let data = get_data(registry, &resolved, k, &path);
let commit_characters = if is_incomplete {
Some(
REGISTRY_IMPORT_COMMIT_CHARS
.iter()
.map(|&c| c.into())
.collect(),
)
} else {
Some(
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
)
};
completions.insert(
item.clone(),
lsp::CompletionItem {
label: item,
kind,
detail,
sort_text,
filter_text,
text_edit,
command,
preselect,
data,
commit_characters,
..Default::default()
},
);
}
}
}
}
}
break;
}
}
}
// If we return None, other sources of completions will be looked for
// but if we did at least match part of a registry, we should send an
// empty vector so that no-completions will be sent back to the client
if completions.is_empty() && !did_match {
None
} else {
Some(lsp::CompletionList {
items: completions.into_values().collect(),
is_incomplete,
})
}
}
pub async fn get_documentation(
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/lsp_custom.rs | cli/lsp/lsp_custom.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use deno_core::serde::Deserialize;
use deno_core::serde::Serialize;
use tower_lsp::lsp_types as lsp;
pub const PERFORMANCE_REQUEST: &str = "deno/performance";
pub const TASK_REQUEST: &str = "deno/taskDefinitions";
pub const VIRTUAL_TEXT_DOCUMENT: &str = "deno/virtualTextDocument";
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct TaskDefinition {
pub name: String,
pub command: Option<String>,
pub description: Option<String>,
pub source_uri: lsp::Uri,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct RegistryStateNotificationParams {
pub origin: String,
pub suggestions: bool,
}
pub enum RegistryStateNotification {}
impl lsp::notification::Notification for RegistryStateNotification {
type Params = RegistryStateNotificationParams;
const METHOD: &'static str = "deno/registryState";
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct VirtualTextDocumentParams {
pub text_document: lsp::TextDocumentIdentifier,
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct DenoConfigurationData {
pub scope_uri: lsp::Uri,
pub workspace_root_scope_uri: Option<lsp::Uri>,
pub deno_json: Option<lsp::TextDocumentIdentifier>,
pub package_json: Option<lsp::TextDocumentIdentifier>,
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct DidRefreshDenoConfigurationTreeNotificationParams {
pub data: Vec<DenoConfigurationData>,
pub deno_dir_npm_folder_uri: Option<lsp::Uri>,
}
pub enum DidRefreshDenoConfigurationTreeNotification {}
impl lsp::notification::Notification
for DidRefreshDenoConfigurationTreeNotification
{
type Params = DidRefreshDenoConfigurationTreeNotificationParams;
const METHOD: &'static str = "deno/didRefreshDenoConfigurationTree";
}
#[derive(Debug, Eq, Hash, PartialEq, Copy, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum DenoConfigurationChangeType {
Added,
Changed,
Removed,
}
impl DenoConfigurationChangeType {
pub fn from_file_change_type(file_event: lsp::FileChangeType) -> Self {
match file_event {
lsp::FileChangeType::CREATED => Self::Added,
lsp::FileChangeType::CHANGED => Self::Changed,
lsp::FileChangeType::DELETED => Self::Removed,
_ => Self::Changed, // non-exhaustable enum
}
}
}
#[derive(Debug, Eq, Hash, PartialEq, Copy, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum DenoConfigurationType {
DenoJson,
PackageJson,
}
#[derive(Debug, Eq, Hash, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct DenoConfigurationChangeEvent {
pub scope_uri: lsp::Uri,
pub file_uri: lsp::Uri,
#[serde(rename = "type")]
pub typ: DenoConfigurationChangeType,
pub configuration_type: DenoConfigurationType,
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct DidChangeDenoConfigurationNotificationParams {
pub changes: Vec<DenoConfigurationChangeEvent>,
}
// TODO(nayeemrmn): This is being replaced by
// `DidRefreshDenoConfigurationTreeNotification` for Deno > v2.0.0. Remove it
// soon.
pub enum DidChangeDenoConfigurationNotification {}
impl lsp::notification::Notification
for DidChangeDenoConfigurationNotification
{
type Params = DidChangeDenoConfigurationNotificationParams;
const METHOD: &'static str = "deno/didChangeDenoConfiguration";
}
pub enum DidUpgradeCheckNotification {}
impl lsp::notification::Notification for DidUpgradeCheckNotification {
type Params = DidUpgradeCheckNotificationParams;
const METHOD: &'static str = "deno/didUpgradeCheck";
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct UpgradeAvailable {
pub latest_version: String,
pub is_canary: bool,
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct DidUpgradeCheckNotificationParams {
pub upgrade_available: Option<UpgradeAvailable>,
}
/// This notification is only sent for testing purposes
/// in order to group diagnostics.
pub enum DiagnosticBatchStartNotification {}
impl lsp::notification::Notification for DiagnosticBatchStartNotification {
type Params = ();
const METHOD: &'static str = "deno/internalTestDiagnosticBatchStart";
}
/// This notification is only sent for testing purposes
/// in order to group diagnostics.
pub enum DiagnosticBatchEndNotification {}
impl lsp::notification::Notification for DiagnosticBatchEndNotification {
type Params = ();
const METHOD: &'static str = "deno/internalTestDiagnosticBatchEnd";
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/code_lens.rs | cli/lsp/code_lens.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::cell::RefCell;
use std::collections::HashSet;
use std::rc::Rc;
use std::sync::Arc;
use deno_ast::ParsedSource;
use deno_ast::SourceRange;
use deno_ast::SourceRangedForSpanned;
use deno_ast::swc::ast;
use deno_ast::swc::ecma_visit::Visit;
use deno_ast::swc::ecma_visit::VisitWith;
use deno_core::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_core::serde::Deserialize;
use deno_core::serde::Serialize;
use deno_core::serde_json;
use deno_core::serde_json::json;
use lazy_regex::lazy_regex;
use lsp_types::Uri;
use once_cell::sync::Lazy;
use regex::Regex;
use tokio_util::sync::CancellationToken;
use tower_lsp::jsonrpc::Error as LspError;
use tower_lsp::jsonrpc::Result as LspResult;
use tower_lsp::lsp_types as lsp;
use super::analysis::source_range_to_lsp_range;
use super::config::CodeLensSettings;
use super::language_server;
use super::text::LineIndex;
use super::tsc;
use super::tsc::NavigationTree;
static ABSTRACT_MODIFIER: Lazy<Regex> = lazy_regex!(r"\babstract\b");
static EXPORT_MODIFIER: Lazy<Regex> = lazy_regex!(r"\bexport\b");
#[derive(Debug, Copy, Clone, Eq, PartialEq, Deserialize, Serialize)]
pub enum CodeLensSource {
#[serde(rename = "implementations")]
Implementations,
#[serde(rename = "references")]
References,
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct CodeLensData {
pub source: CodeLensSource,
pub uri: Uri,
}
struct DenoTestCollector {
code_lenses: Vec<lsp::CodeLens>,
parsed_source: ParsedSource,
specifier: ModuleSpecifier,
test_vars: HashSet<String>,
}
impl DenoTestCollector {
pub fn new(specifier: ModuleSpecifier, parsed_source: ParsedSource) -> Self {
Self {
code_lenses: Vec::new(),
parsed_source,
specifier,
test_vars: HashSet::new(),
}
}
fn add_code_lenses<N: AsRef<str>>(&mut self, name: N, range: &SourceRange) {
let range =
source_range_to_lsp_range(range, self.parsed_source.text_info_lazy());
self.add_code_lens(&name, range, "▶\u{fe0e} Run Test", false);
self.add_code_lens(&name, range, "Debug", true);
}
fn add_code_lens<N: AsRef<str>>(
&mut self,
name: &N,
range: lsp::Range,
title: &str,
inspect: bool,
) {
let options = json!({
"inspect": inspect,
});
self.code_lenses.push(lsp::CodeLens {
range,
command: Some(lsp::Command {
title: title.to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!(self.specifier),
json!(name.as_ref()),
options,
]),
}),
data: None,
});
}
fn check_call_expr(&mut self, node: &ast::CallExpr, range: &SourceRange) {
if let Some(expr) = node.args.first().map(|es| es.expr.as_ref()) {
match expr {
ast::Expr::Object(obj_lit) => {
for prop in &obj_lit.props {
if let ast::PropOrSpread::Prop(prop) = prop
&& let ast::Prop::KeyValue(key_value_prop) = prop.as_ref()
&& let ast::PropName::Ident(ast::IdentName { sym, .. }) =
&key_value_prop.key
&& sym == "name"
{
match key_value_prop.value.as_ref() {
ast::Expr::Lit(ast::Lit::Str(lit_str)) => {
let name = lit_str.value.to_string_lossy().to_string();
self.add_code_lenses(name, range);
}
ast::Expr::Tpl(tpl) if tpl.quasis.len() == 1 => {
let name = tpl.quasis.first().unwrap().raw.to_string();
self.add_code_lenses(name, range);
}
_ => {}
}
}
}
}
ast::Expr::Fn(fn_expr) => {
if let Some(ast::Ident { sym, .. }) = fn_expr.ident.as_ref() {
let name = sym.to_string();
self.add_code_lenses(name, range);
}
}
ast::Expr::Lit(ast::Lit::Str(lit_str)) => {
let name = lit_str.value.to_string_lossy().to_string();
self.add_code_lenses(name, range);
}
ast::Expr::Tpl(tpl) if tpl.quasis.len() == 1 => {
let name = tpl.quasis.first().unwrap().raw.to_string();
self.add_code_lenses(name, range);
}
_ => (),
}
}
}
/// Move out the code lenses from the collector.
fn take(self) -> Vec<lsp::CodeLens> {
self.code_lenses
}
}
impl Visit for DenoTestCollector {
fn visit_call_expr(&mut self, node: &ast::CallExpr) {
let ast::Callee::Expr(callee_expr) = &node.callee else {
return;
};
let mut prop_chain = ["", "", ""];
let mut current_segment = callee_expr.as_ref();
let mut rightmost_symbol_range = None;
for (i, name) in prop_chain.iter_mut().enumerate().rev() {
match current_segment {
ast::Expr::Ident(ident) => {
*name = ident.sym.as_str();
rightmost_symbol_range.get_or_insert_with(|| ident.range());
break;
}
ast::Expr::Member(member_expr) => {
if i == 0 {
return;
}
let ast::MemberProp::Ident(right) = &member_expr.prop else {
return;
};
*name = right.sym.as_str();
rightmost_symbol_range.get_or_insert_with(|| right.range());
current_segment = &member_expr.obj;
}
_ => return,
}
}
let Some(rightmost_symbol_range) = rightmost_symbol_range else {
debug_assert!(false, "rightmost symbol range should always be defined");
return;
};
match prop_chain {
["", "Deno", "test"]
| ["Deno", "test", "ignore" | "only"]
| ["", "", "describe"]
| ["", "describe", "ignore" | "only" | "skip"] => {}
["", "", s] if self.test_vars.contains(s) => {}
_ => return,
};
self.check_call_expr(node, &rightmost_symbol_range);
}
fn visit_var_decl(&mut self, node: &ast::VarDecl) {
for decl in &node.decls {
if let Some(init) = &decl.init {
match init.as_ref() {
// Identify destructured assignments of `test` from `Deno`
ast::Expr::Ident(ident) => {
if ident.sym == "Deno"
&& let ast::Pat::Object(object_pat) = &decl.name
{
for prop in &object_pat.props {
match prop {
ast::ObjectPatProp::Assign(prop) => {
let name = prop.key.sym.to_string();
if name == "test" {
self.test_vars.insert(name);
}
}
ast::ObjectPatProp::KeyValue(prop) => {
if let ast::PropName::Ident(key_ident) = &prop.key
&& key_ident.sym == "test"
&& let ast::Pat::Ident(value_ident) = &prop.value.as_ref()
{
self.test_vars.insert(value_ident.id.sym.to_string());
}
}
_ => (),
}
}
}
}
// Identify variable assignments where the init is `Deno.test`
ast::Expr::Member(member_expr) => {
if let ast::Expr::Ident(obj_ident) = member_expr.obj.as_ref()
&& obj_ident.sym == "Deno"
&& let ast::MemberProp::Ident(prop_ident) = &member_expr.prop
&& prop_ident.sym == "test"
&& let ast::Pat::Ident(binding_ident) = &decl.name
{
self.test_vars.insert(binding_ident.id.sym.to_string());
}
}
_ => (),
}
}
}
}
}
async fn resolve_implementation_code_lens(
code_lens: lsp::CodeLens,
data: CodeLensData,
language_server: &language_server::Inner,
token: &CancellationToken,
) -> LspResult<lsp::CodeLens> {
let locations = language_server
.goto_implementation(
lsp::request::GotoImplementationParams {
text_document_position_params: lsp::TextDocumentPositionParams {
text_document: lsp::TextDocumentIdentifier {
uri: data.uri.clone(),
},
position: code_lens.range.start,
},
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
},
token,
)
.await?
.map(|r| match r {
lsp::GotoDefinitionResponse::Scalar(location) => vec![location],
lsp::GotoDefinitionResponse::Array(locations) => locations,
lsp::GotoDefinitionResponse::Link(links) => links
.into_iter()
.map(|l| lsp::Location {
uri: l.target_uri,
range: l.target_selection_range,
})
.collect(),
})
.unwrap_or(Vec::new());
let title = if locations.len() == 1 {
"1 implementation".to_string()
} else {
format!("{} implementations", locations.len())
};
let command = if locations.is_empty() {
lsp::Command {
title,
command: String::new(),
arguments: None,
}
} else {
lsp::Command {
title,
command: "deno.client.showReferences".to_string(),
arguments: Some(vec![
json!(data.uri),
json!(code_lens.range.start),
json!(locations),
]),
}
};
Ok(lsp::CodeLens {
range: code_lens.range,
command: Some(command),
data: None,
})
}
async fn resolve_references_code_lens(
code_lens: lsp::CodeLens,
data: CodeLensData,
language_server: &language_server::Inner,
token: &CancellationToken,
) -> LspResult<lsp::CodeLens> {
let locations = language_server
.references(
lsp::ReferenceParams {
text_document_position: lsp::TextDocumentPositionParams {
text_document: lsp::TextDocumentIdentifier {
uri: data.uri.clone(),
},
position: code_lens.range.start,
},
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
context: lsp::ReferenceContext {
include_declaration: false,
},
},
token,
)
.await?
.unwrap_or_default();
let title = if locations.len() == 1 {
"1 reference".to_string()
} else {
format!("{} references", locations.len())
};
let command = if locations.is_empty() {
lsp::Command {
title,
command: String::new(),
arguments: None,
}
} else {
lsp::Command {
title,
command: "deno.client.showReferences".to_string(),
arguments: Some(vec![
json!(data.uri),
json!(code_lens.range.start),
json!(locations),
]),
}
};
Ok(lsp::CodeLens {
range: code_lens.range,
command: Some(command),
data: None,
})
}
pub async fn resolve_code_lens(
code_lens: lsp::CodeLens,
language_server: &language_server::Inner,
token: &CancellationToken,
) -> LspResult<lsp::CodeLens> {
let data: CodeLensData =
serde_json::from_value(code_lens.data.clone().unwrap()).map_err(|err| {
LspError::invalid_params(format!(
"Unable to parse code lens data: {:#}",
err
))
})?;
match data.source {
CodeLensSource::Implementations => {
resolve_implementation_code_lens(code_lens, data, language_server, token)
.await
}
CodeLensSource::References => {
resolve_references_code_lens(code_lens, data, language_server, token)
.await
}
}
}
pub fn collect_test(
specifier: &ModuleSpecifier,
parsed_source: &ParsedSource,
_token: &CancellationToken,
) -> Result<Vec<lsp::CodeLens>, AnyError> {
// TODO(nayeemrmn): Do cancellation checks while collecting tests.
let mut collector =
DenoTestCollector::new(specifier.clone(), parsed_source.clone());
parsed_source.program().visit_with(&mut collector);
Ok(collector.take())
}
/// Return tsc navigation tree code lenses.
pub fn collect_tsc(
uri: &Uri,
code_lens_settings: &CodeLensSettings,
line_index: Arc<LineIndex>,
navigation_tree: &NavigationTree,
token: &CancellationToken,
) -> Result<Vec<lsp::CodeLens>, AnyError> {
let code_lenses = Rc::new(RefCell::new(Vec::new()));
navigation_tree.walk(token, &|i, mp| {
let mut code_lenses = code_lenses.borrow_mut();
// TSC Implementations Code Lens
if code_lens_settings.implementations {
let source = CodeLensSource::Implementations;
match i.kind {
tsc::ScriptElementKind::InterfaceElement => {
code_lenses.push(i.to_code_lens(line_index.clone(), uri, source));
}
tsc::ScriptElementKind::ClassElement
| tsc::ScriptElementKind::MemberFunctionElement
| tsc::ScriptElementKind::MemberVariableElement
| tsc::ScriptElementKind::MemberGetAccessorElement
| tsc::ScriptElementKind::MemberSetAccessorElement => {
if ABSTRACT_MODIFIER.is_match(&i.kind_modifiers) {
code_lenses.push(i.to_code_lens(line_index.clone(), uri, source));
}
}
_ => (),
}
}
// TSC References Code Lens
if code_lens_settings.references {
let source = CodeLensSource::References;
if let Some(parent) = &mp
&& parent.kind == tsc::ScriptElementKind::EnumElement
{
code_lenses.push(i.to_code_lens(line_index.clone(), uri, source));
}
match i.kind {
tsc::ScriptElementKind::FunctionElement => {
if code_lens_settings.references_all_functions {
code_lenses.push(i.to_code_lens(line_index.clone(), uri, source));
}
}
tsc::ScriptElementKind::ConstElement
| tsc::ScriptElementKind::LetElement
| tsc::ScriptElementKind::VariableElement => {
if EXPORT_MODIFIER.is_match(&i.kind_modifiers) {
code_lenses.push(i.to_code_lens(line_index.clone(), uri, source));
}
}
tsc::ScriptElementKind::ClassElement => {
if i.text != "<class>" {
code_lenses.push(i.to_code_lens(line_index.clone(), uri, source));
}
}
tsc::ScriptElementKind::InterfaceElement
| tsc::ScriptElementKind::TypeElement
| tsc::ScriptElementKind::EnumElement => {
code_lenses.push(i.to_code_lens(line_index.clone(), uri, source));
}
tsc::ScriptElementKind::LocalFunctionElement
| tsc::ScriptElementKind::MemberFunctionElement
| tsc::ScriptElementKind::MemberGetAccessorElement
| tsc::ScriptElementKind::MemberSetAccessorElement
| tsc::ScriptElementKind::ConstructorImplementationElement
| tsc::ScriptElementKind::MemberVariableElement => {
if let Some(parent) = &mp
&& parent.spans[0].start != i.spans[0].start
{
match parent.kind {
tsc::ScriptElementKind::ClassElement
| tsc::ScriptElementKind::InterfaceElement
| tsc::ScriptElementKind::TypeElement => {
code_lenses.push(i.to_code_lens(
line_index.clone(),
uri,
source,
));
}
_ => (),
}
}
}
_ => (),
}
}
})?;
Ok(Rc::try_unwrap(code_lenses).unwrap().into_inner())
}
#[cfg(test)]
mod tests {
use deno_ast::MediaType;
use deno_core::resolve_url;
use super::*;
#[test]
fn test_deno_test_collector() {
let specifier = resolve_url("https://deno.land/x/mod.ts").unwrap();
let source = r#"
Deno.test({
name: "test a",
fn() {}
});
Deno.test(function useFnName() {});
Deno.test("test b", function anotherTest() {});
Deno.test.ignore("test ignore", () => {});
Deno.test.only("test only", () => {});
Deno.test(`test template literal name`, () => {});
describe("test describe", () => {});
describe.ignore("test describe ignore", () => {});
describe.only("test describe only", () => {});
describe.skip("test describe skip", () => {});
"#;
let parsed_module = deno_ast::parse_module(deno_ast::ParseParams {
specifier: specifier.clone(),
text: source.into(),
media_type: MediaType::TypeScript,
capture_tokens: true,
scope_analysis: true,
maybe_syntax: None,
})
.unwrap();
let mut collector =
DenoTestCollector::new(specifier, parsed_module.clone());
parsed_module.program().visit_with(&mut collector);
assert_eq!(
collector.take(),
vec![
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 1,
character: 11
},
end: lsp::Position {
line: 1,
character: 15
}
},
command: Some(lsp::Command {
title: "▶\u{fe0e} Run Test".to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!("https://deno.land/x/mod.ts"),
json!("test a"),
json!({
"inspect": false,
}),
])
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 1,
character: 11
},
end: lsp::Position {
line: 1,
character: 15
}
},
command: Some(lsp::Command {
title: "Debug".to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!("https://deno.land/x/mod.ts"),
json!("test a"),
json!({
"inspect": true,
}),
])
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 6,
character: 11
},
end: lsp::Position {
line: 6,
character: 15
}
},
command: Some(lsp::Command {
title: "▶\u{fe0e} Run Test".to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!("https://deno.land/x/mod.ts"),
json!("useFnName"),
json!({
"inspect": false,
}),
])
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 6,
character: 11
},
end: lsp::Position {
line: 6,
character: 15
}
},
command: Some(lsp::Command {
title: "Debug".to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!("https://deno.land/x/mod.ts"),
json!("useFnName"),
json!({
"inspect": true,
}),
])
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 8,
character: 11
},
end: lsp::Position {
line: 8,
character: 15
}
},
command: Some(lsp::Command {
title: "▶\u{fe0e} Run Test".to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!("https://deno.land/x/mod.ts"),
json!("test b"),
json!({
"inspect": false,
}),
])
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 8,
character: 11
},
end: lsp::Position {
line: 8,
character: 15
}
},
command: Some(lsp::Command {
title: "Debug".to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!("https://deno.land/x/mod.ts"),
json!("test b"),
json!({
"inspect": true,
}),
])
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 10,
character: 16,
},
end: lsp::Position {
line: 10,
character: 22,
},
},
command: Some(lsp::Command {
title: "▶\u{fe0e} Run Test".to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!("https://deno.land/x/mod.ts"),
json!("test ignore"),
json!({
"inspect": false,
}),
]),
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 10,
character: 16,
},
end: lsp::Position {
line: 10,
character: 22,
},
},
command: Some(lsp::Command {
title: "Debug".to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!("https://deno.land/x/mod.ts"),
json!("test ignore"),
json!({
"inspect": true,
}),
]),
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 12,
character: 16,
},
end: lsp::Position {
line: 12,
character: 20,
},
},
command: Some(lsp::Command {
title: "▶\u{fe0e} Run Test".to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!("https://deno.land/x/mod.ts"),
json!("test only"),
json!({
"inspect": false,
}),
]),
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 12,
character: 16,
},
end: lsp::Position {
line: 12,
character: 20,
},
},
command: Some(lsp::Command {
title: "Debug".to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!("https://deno.land/x/mod.ts"),
json!("test only"),
json!({
"inspect": true,
}),
]),
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 14,
character: 11,
},
end: lsp::Position {
line: 14,
character: 15,
},
},
command: Some(lsp::Command {
title: "▶\u{fe0e} Run Test".to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!("https://deno.land/x/mod.ts"),
json!("test template literal name"),
json!({
"inspect": false,
}),
]),
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 14,
character: 11,
},
end: lsp::Position {
line: 14,
character: 15,
},
},
command: Some(lsp::Command {
title: "Debug".to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!("https://deno.land/x/mod.ts"),
json!("test template literal name"),
json!({
"inspect": true,
}),
]),
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 16,
character: 6,
},
end: lsp::Position {
line: 16,
character: 14,
},
},
command: Some(lsp::Command {
title: "▶\u{fe0e} Run Test".to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!("https://deno.land/x/mod.ts"),
json!("test describe"),
json!({
"inspect": false,
}),
]),
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 16,
character: 6,
},
end: lsp::Position {
line: 16,
character: 14,
},
},
command: Some(lsp::Command {
title: "Debug".to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!("https://deno.land/x/mod.ts"),
json!("test describe"),
json!({
"inspect": true,
}),
]),
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 18,
character: 15,
},
end: lsp::Position {
line: 18,
character: 21,
},
},
command: Some(lsp::Command {
title: "▶\u{fe0e} Run Test".to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!("https://deno.land/x/mod.ts"),
json!("test describe ignore"),
json!({
"inspect": false,
}),
]),
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 18,
character: 15,
},
end: lsp::Position {
line: 18,
character: 21,
},
},
command: Some(lsp::Command {
title: "Debug".to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!("https://deno.land/x/mod.ts"),
json!("test describe ignore"),
json!({
"inspect": true,
}),
]),
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 20,
character: 15,
},
end: lsp::Position {
line: 20,
character: 19,
},
},
command: Some(lsp::Command {
title: "▶\u{fe0e} Run Test".to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!("https://deno.land/x/mod.ts"),
json!("test describe only"),
json!({
"inspect": false,
}),
]),
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 20,
character: 15,
},
end: lsp::Position {
line: 20,
character: 19,
},
},
command: Some(lsp::Command {
title: "Debug".to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!("https://deno.land/x/mod.ts"),
json!("test describe only"),
json!({
"inspect": true,
}),
]),
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 22,
character: 15,
},
end: lsp::Position {
line: 22,
character: 19,
},
},
command: Some(lsp::Command {
title: "▶\u{fe0e} Run Test".to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!("https://deno.land/x/mod.ts"),
json!("test describe skip"),
json!({
"inspect": false,
}),
]),
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 22,
character: 15,
},
end: lsp::Position {
line: 22,
character: 19,
},
},
command: Some(lsp::Command {
title: "Debug".to_string(),
command: "deno.client.test".to_string(),
arguments: Some(vec![
json!("https://deno.land/x/mod.ts"),
json!("test describe skip"),
json!({
"inspect": true,
}),
]),
}),
data: None,
},
]
);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/analysis.rs | cli/lsp/analysis.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::cmp::Ordering;
use std::collections::HashMap;
use std::collections::HashSet;
use std::path::Path;
use std::str::FromStr;
use std::sync::Arc;
use deno_ast::SourceRange;
use deno_ast::SourceRangedForSpanned;
use deno_ast::SourceTextInfo;
use deno_core::ModuleSpecifier;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::resolve_url;
use deno_core::serde::Deserialize;
use deno_core::serde::Serialize;
use deno_core::serde_json;
use deno_core::serde_json::json;
use deno_core::url::Url;
use deno_error::JsErrorBox;
use deno_lint::diagnostic::LintDiagnosticRange;
use deno_npm::NpmPackageId;
use deno_path_util::url_to_file_path;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_runtime::deno_node::PathClean;
use deno_semver::SmallStackString;
use deno_semver::StackString;
use deno_semver::Version;
use deno_semver::jsr::JsrPackageNvReference;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageNvReference;
use deno_semver::package::PackageReq;
use deno_semver::package::PackageReqReference;
use import_map::ImportMap;
use lsp_types::Uri;
use node_resolver::InNpmPackageChecker;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use once_cell::sync::Lazy;
use regex::Regex;
use tokio_util::sync::CancellationToken;
use tower_lsp::lsp_types as lsp;
use tower_lsp::lsp_types::Position;
use tower_lsp::lsp_types::Range;
use super::diagnostics::DenoDiagnostic;
use super::diagnostics::DiagnosticSource;
use super::documents::DocumentModule;
use super::documents::DocumentModules;
use super::language_server;
use super::resolver::LspResolver;
use super::tsc;
use crate::args::jsr_url;
use crate::lsp::urls::uri_to_url;
use crate::tools::lint::CliLinter;
use crate::util::path::relative_specifier;
/// Diagnostic error codes which actually are the same, and so when grouping
/// fixes we treat them the same.
static FIX_ALL_ERROR_CODES: Lazy<HashMap<&'static str, &'static str>> =
Lazy::new(|| ([("2339", "2339"), ("2345", "2339")]).into_iter().collect());
/// Fixes which help determine if there is a preferred fix when there are
/// multiple fixes available.
static PREFERRED_FIXES: Lazy<HashMap<&'static str, (u32, bool)>> =
Lazy::new(|| {
([
("annotateWithTypeFromJSDoc", (1, false)),
("constructorForDerivedNeedSuperCall", (1, false)),
("extendsInterfaceBecomesImplements", (1, false)),
("awaitInSyncFunction", (1, false)),
("classIncorrectlyImplementsInterface", (3, false)),
("classDoesntImplementInheritedAbstractMember", (3, false)),
("unreachableCode", (1, false)),
("unusedIdentifier", (1, false)),
("forgottenThisPropertyAccess", (1, false)),
("spelling", (2, false)),
("addMissingAwait", (1, false)),
("fixImport", (0, true)),
])
.into_iter()
.collect()
});
static IMPORT_SPECIFIER_RE: Lazy<Regex> = lazy_regex::lazy_regex!(
r#"\sfrom\s+["']([^"']*)["']|import\s*\(\s*["']([^"']*)["']\s*\)"#
);
const SUPPORTED_EXTENSIONS: &[&str] = &[
".ts", ".tsx", ".js", ".jsx", ".mjs", ".mts", ".cjs", ".cts", ".d.ts",
".d.mts", ".d.cts",
];
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct DataQuickFixChange {
pub range: Range,
pub new_text: String,
}
/// A quick fix that's stored in the diagnostic's data field.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct DataQuickFix {
pub description: String,
pub changes: Vec<DataQuickFixChange>,
}
/// Category of self-generated diagnostic messages (those not coming from)
/// TypeScript.
#[derive(Debug, PartialEq, Eq)]
pub enum Category {
/// A lint diagnostic, where the first element is the message.
Lint {
message: String,
code: String,
hint: Option<String>,
quick_fixes: Vec<DataQuickFix>,
},
}
/// A structure to hold a reference to a diagnostic message.
#[derive(Debug, PartialEq, Eq)]
pub struct Reference {
category: Category,
range: Range,
}
impl Reference {
pub fn to_diagnostic(&self) -> lsp::Diagnostic {
match &self.category {
Category::Lint {
message,
code,
hint,
quick_fixes,
} => lsp::Diagnostic {
range: self.range,
severity: Some(lsp::DiagnosticSeverity::WARNING),
code: Some(lsp::NumberOrString::String(code.to_string())),
code_description: None,
source: Some(DiagnosticSource::Lint.as_lsp_source().to_string()),
message: {
let mut msg = message.to_string();
if let Some(hint) = hint {
msg.push('\n');
msg.push_str(hint);
}
msg
},
related_information: None,
tags: None, // we should tag unused code
data: if quick_fixes.is_empty() {
None
} else {
serde_json::to_value(quick_fixes).ok()
},
},
}
}
}
fn as_lsp_range_from_lint_diagnostic(
diagnostic_range: &LintDiagnosticRange,
) -> Range {
as_lsp_range(diagnostic_range.range, &diagnostic_range.text_info)
}
fn as_lsp_range(
source_range: SourceRange,
text_info: &SourceTextInfo,
) -> Range {
let start_lc = text_info.line_and_column_index(source_range.start);
let end_lc = text_info.line_and_column_index(source_range.end);
Range {
start: Position {
line: start_lc.line_index as u32,
character: start_lc.column_index as u32,
},
end: Position {
line: end_lc.line_index as u32,
character: end_lc.column_index as u32,
},
}
}
pub fn get_lint_references(
parsed_source: &deno_ast::ParsedSource,
linter: &CliLinter,
token: CancellationToken,
) -> Result<Vec<Reference>, AnyError> {
let lint_diagnostics = linter.lint_with_ast(parsed_source, token)?;
Ok(
lint_diagnostics
.into_iter()
.filter_map(|d| {
let range = d.range.as_ref()?;
Some(Reference {
range: as_lsp_range_from_lint_diagnostic(range),
category: Category::Lint {
message: d.details.message,
code: d.details.code.to_string(),
hint: d.details.hint,
quick_fixes: d
.details
.fixes
.into_iter()
.map(|f| DataQuickFix {
description: f.description.to_string(),
changes: f
.changes
.into_iter()
.map(|change| DataQuickFixChange {
range: as_lsp_range(change.range, &range.text_info),
new_text: change.new_text.to_string(),
})
.collect(),
})
.collect(),
},
})
})
.collect(),
)
}
fn code_as_string(code: &Option<lsp::NumberOrString>) -> String {
match code {
Some(lsp::NumberOrString::String(str)) => str.clone(),
Some(lsp::NumberOrString::Number(num)) => num.to_string(),
_ => "".to_string(),
}
}
/// Given a specifier and a referring specifier, determine if a value in the
/// import map could be used as an import specifier that resolves using the
/// import map.
///
/// This was inlined from the import_map crate in order to ignore more
/// entries.
pub fn import_map_lookup(
import_map: &ImportMap,
specifier: &Url,
referrer: &Url,
) -> Option<String> {
let specifier_str = specifier.as_str();
for entry in import_map.entries_for_referrer(referrer) {
if let Some(address) = entry.value {
let address_str = address.as_str();
if referrer.as_str().starts_with(address_str) {
// ignore when the referrer has a common base with the
// import map entry (ex. `./src/a.ts` importing `./src/b.ts`
// and there's a `"$src/": "./src/"` import map entry)
continue;
}
if address_str == specifier_str {
return Some(entry.raw_key.to_string());
}
if address_str.ends_with('/') && specifier_str.starts_with(address_str) {
return Some(specifier_str.replace(address_str, entry.raw_key));
}
}
}
None
}
/// Rewrites imports in quick fixes and code changes to be Deno specific.
pub struct TsResponseImportMapper<'a> {
document_modules: &'a DocumentModules,
scope: Option<Arc<ModuleSpecifier>>,
maybe_import_map: Option<&'a ImportMap>,
resolver: &'a LspResolver,
tsc_specifier_map: &'a tsc::TscSpecifierMap,
}
impl<'a> TsResponseImportMapper<'a> {
pub fn new(
document_modules: &'a DocumentModules,
scope: Option<Arc<ModuleSpecifier>>,
resolver: &'a LspResolver,
tsc_specifier_map: &'a tsc::TscSpecifierMap,
) -> Self {
let maybe_import_map = resolver
.get_scoped_resolver(scope.as_deref())
.as_workspace_resolver()
.maybe_import_map();
Self {
document_modules,
scope,
maybe_import_map,
resolver,
tsc_specifier_map,
}
}
pub fn check_specifier(
&self,
specifier: &ModuleSpecifier,
referrer: &ModuleSpecifier,
) -> Option<String> {
fn concat_npm_specifier(
prefix: &str,
pkg_req: &PackageReq,
sub_path: Option<&str>,
) -> String {
let result = format!("{}{}", prefix, pkg_req);
match sub_path {
Some(path) => format!("{}/{}", result, path),
None => result,
}
}
if specifier.scheme() == "node" {
return Some(specifier.to_string());
}
let scoped_resolver =
self.resolver.get_scoped_resolver(self.scope.as_deref());
if let Some(dep_name) =
scoped_resolver.resource_url_to_configured_dep_key(specifier, referrer)
{
return Some(dep_name);
}
if let Some(jsr_path) = specifier.as_str().strip_prefix(jsr_url().as_str())
{
let mut segments = jsr_path.split('/');
let name = if jsr_path.starts_with('@') {
let scope = segments.next()?;
let name = segments.next()?;
capacity_builder::StringBuilder::<StackString>::build(|builder| {
builder.append(scope);
builder.append("/");
builder.append(name);
})
.unwrap()
} else {
StackString::from(segments.next()?)
};
let version = Version::parse_standard(segments.next()?).ok()?;
let nv = PackageNv { name, version };
let path = segments.collect::<Vec<_>>().join("/");
let export = scoped_resolver.jsr_lookup_export_for_path(&nv, &path)?;
let sub_path = (export != ".")
.then_some(export)
.map(SmallStackString::from_string);
let mut req = None;
req = req.or_else(|| {
let import_map = self.maybe_import_map?;
for entry in import_map.entries_for_referrer(referrer) {
let Some(value) = entry.raw_value else {
continue;
};
let Ok(req_ref) = JsrPackageReqReference::from_str(value) else {
continue;
};
let req = req_ref.req();
if req.name == nv.name
&& req.version_req.tag().is_none()
&& req.version_req.matches(&nv.version)
{
return Some(req.clone());
}
}
None
});
req = req.or_else(|| scoped_resolver.jsr_lookup_req_for_nv(&nv));
let spec_str = if let Some(req) = req {
let req_ref = PackageReqReference { req, sub_path };
JsrPackageReqReference::new(req_ref).to_string()
} else {
let nv_ref = PackageNvReference { nv, sub_path };
JsrPackageNvReference::new(nv_ref).to_string()
};
let specifier = ModuleSpecifier::parse(&spec_str).ok()?;
if let Some(import_map) = self.maybe_import_map {
if let Some(result) =
import_map_lookup(import_map, &specifier, referrer)
{
return Some(result);
}
if let Some(req_ref_str) = specifier.as_str().strip_prefix("jsr:")
&& !req_ref_str.starts_with('/')
{
let specifier_str = format!("jsr:/{req_ref_str}");
if let Ok(specifier) = ModuleSpecifier::parse(&specifier_str)
&& let Some(result) =
import_map_lookup(import_map, &specifier, referrer)
{
return Some(result);
}
}
}
return Some(spec_str);
}
if let Some(npm_resolver) = scoped_resolver.as_maybe_managed_npm_resolver()
{
let match_specifier = || {
let in_npm_pkg = scoped_resolver
.as_in_npm_pkg_checker()
.in_npm_package(specifier);
if !in_npm_pkg {
return None;
}
let pkg_id = npm_resolver
.resolve_pkg_id_from_specifier(specifier)
.ok()??;
let pkg_reqs =
maybe_reverse_definitely_typed(&pkg_id, npm_resolver.resolution())
.unwrap_or_else(|| {
npm_resolver
.resolution()
.resolve_pkg_reqs_from_pkg_id(&pkg_id)
});
if pkg_reqs.is_empty() {
return None;
}
// check if any pkg reqs match what is found in an import map
let sub_path = npm_resolver
.resolve_pkg_folder_from_pkg_id(&pkg_id)
.ok()
.and_then(|pkg_folder| {
self.resolve_package_path(specifier, &pkg_folder)
})?;
let sub_path = Some(sub_path).filter(|s| !s.is_empty());
if let Some(import_map) = self.maybe_import_map {
let pkg_reqs = pkg_reqs.iter().collect::<HashSet<_>>();
let mut matches = Vec::new();
for entry in import_map.entries_for_referrer(referrer) {
if let Some(value) = entry.raw_value
&& let Ok(package_ref) = NpmPackageReqReference::from_str(value)
&& pkg_reqs.contains(package_ref.req())
{
let sub_path = sub_path.as_deref().unwrap_or("");
let value_sub_path = package_ref.sub_path().unwrap_or("");
if let Some(key_sub_path) = sub_path.strip_prefix(value_sub_path)
{
// keys that don't end in a slash can't be mapped to a subpath
if entry.raw_key.ends_with('/') || key_sub_path.is_empty() {
matches.push(format!("{}{}", entry.raw_key, key_sub_path));
}
}
}
}
// select the shortest match
matches.sort_by_key(|a| a.len());
if let Some(matched) = matches.first() {
return Some(matched.to_string());
}
}
// if not found in the import map, return the first pkg req
if let Some(pkg_req) = pkg_reqs.first() {
return Some(concat_npm_specifier(
"npm:",
pkg_req,
sub_path.as_deref(),
));
}
None
};
if let Some(result) = match_specifier() {
return Some(result);
}
}
if let Some(bare_package_specifier) =
scoped_resolver.jsr_lookup_bare_specifier_for_workspace_file(specifier)
{
return Some(bare_package_specifier);
}
// check if the import map has this specifier
if let Some(import_map) = self.maybe_import_map
&& let Some(result) = import_map_lookup(import_map, specifier, referrer)
{
return Some(result);
}
None
}
fn resolve_package_path(
&self,
specifier: &ModuleSpecifier,
package_root_folder: &Path,
) -> Option<String> {
let scoped_resolver = self.resolver.get_scoped_resolver(Some(specifier));
let package_json = scoped_resolver
.as_pkg_json_resolver()
// the specifier might have a closer package.json, but we
// want the root of the package's package.json
.get_closest_package_json(&package_root_folder.join("package.json"))
.ok()
.flatten()?;
let Some(exports) = &package_json.exports else {
return Some("".to_string());
};
let root_folder = package_json.path.parent()?;
let specifier_path = url_to_file_path(specifier).ok()?;
let mut search_paths = vec![specifier_path.clone()];
// TypeScript will provide a .js extension for quick fixes, so do
// a search for the .d.ts file instead
if specifier_path.extension().and_then(|e| e.to_str()) == Some("js") {
search_paths.insert(0, specifier_path.with_extension("d.ts"));
} else if let Some(file_name) =
specifier_path.file_name().and_then(|f| f.to_str())
{
// In some other cases, typescript will provide the .d.ts extension, but the
// export might not have a .d.ts defined. In that case, look for the corresponding
// JavaScript file after not being able to find the .d.ts file.
if let Some(file_stem) = file_name.strip_suffix(".d.ts") {
search_paths
.push(specifier_path.with_file_name(format!("{}.js", file_stem)));
} else if let Some(file_stem) = file_name.strip_suffix(".d.cts") {
search_paths
.push(specifier_path.with_file_name(format!("{}.cjs", file_stem)));
} else if let Some(file_stem) = file_name.strip_suffix(".d.mts") {
search_paths
.push(specifier_path.with_file_name(format!("{}.mjs", file_stem)));
}
}
for search_path in search_paths {
if let Some(result) =
try_reverse_map_package_json_exports(root_folder, &search_path, exports)
{
return Some(result);
}
}
None
}
/// Iterate over the supported extensions, concatenating the extension on the
/// specifier, returning the first specifier that is resolve-able, otherwise
/// None if none match.
pub fn check_unresolved_specifier(
&self,
specifier: &str,
referrer: &ModuleSpecifier,
resolution_mode: ResolutionMode,
new_file_hints: &[Url],
) -> Option<String> {
let specifier_stem = specifier.strip_suffix(".js").unwrap_or(specifier);
let specifiers = std::iter::once(Cow::Borrowed(specifier)).chain(
SUPPORTED_EXTENSIONS
.iter()
.map(|ext| Cow::Owned(format!("{specifier_stem}{ext}"))),
);
let scoped_resolver =
self.resolver.get_scoped_resolver(self.scope.as_deref());
for specifier in specifiers {
if let Some(specifier) = scoped_resolver
.as_cli_resolver()
.resolve(
&specifier,
referrer,
deno_graph::Position::zeroed(),
resolution_mode,
NodeResolutionKind::Types,
)
.ok()
.and_then(|s| self.tsc_specifier_map.normalize(s.as_str()).ok())
.filter(|s| {
new_file_hints.contains(s)
|| self
.document_modules
.specifier_exists(s, self.scope.as_deref())
})
&& let Some(specifier) = self
.check_specifier(&specifier, referrer)
.or_else(|| relative_specifier(referrer, &specifier))
.filter(|s| !s.contains("/node_modules/"))
{
return Some(specifier);
}
}
None
}
pub fn is_valid_import(
&self,
specifier_text: &str,
referrer: &ModuleSpecifier,
resolution_mode: ResolutionMode,
) -> bool {
self
.resolver
.get_scoped_resolver(self.scope.as_deref())
.as_cli_resolver()
.resolve(
specifier_text,
referrer,
deno_graph::Position::zeroed(),
resolution_mode,
NodeResolutionKind::Types,
)
.ok()
.filter(|s| {
let specifier = self
.tsc_specifier_map
.normalize(s.as_str())
.map(Cow::Owned)
.unwrap_or(Cow::Borrowed(s));
!specifier.as_str().contains("/node_modules/")
})
.is_some()
}
}
fn maybe_reverse_definitely_typed(
pkg_id: &NpmPackageId,
resolution: &NpmResolutionCell,
) -> Option<Vec<PackageReq>> {
let rest = pkg_id.nv.name.strip_prefix("@types/")?;
let package_name = if rest.contains("__") {
Cow::Owned(format!("@{}", rest.replace("__", "/")))
} else {
Cow::Borrowed(rest)
};
let reqs = resolution
.package_reqs()
.into_iter()
.filter_map(|(req, nv)| (*nv.name == package_name).then_some(req))
.collect::<Vec<_>>();
if reqs.is_empty() { None } else { Some(reqs) }
}
fn try_reverse_map_package_json_exports(
root_path: &Path,
target_path: &Path,
exports: &serde_json::Map<String, serde_json::Value>,
) -> Option<String> {
use deno_core::serde_json::Value;
fn try_reverse_map_package_json_exports_inner(
root_path: &Path,
target_path: &Path,
exports: &serde_json::Map<String, Value>,
) -> Option<String> {
for (key, value) in exports {
match value {
Value::String(str) => {
if root_path.join(str).clean() == target_path {
return Some(if let Some(suffix) = key.strip_prefix("./") {
suffix.to_string()
} else {
String::new() // condition (ex. "types"), ignore
});
}
}
Value::Object(obj) => {
if let Some(result) = try_reverse_map_package_json_exports_inner(
root_path,
target_path,
obj,
) {
return Some(if let Some(suffix) = key.strip_prefix("./") {
if result.is_empty() {
suffix.to_string()
} else {
format!("{}/{}", suffix, result)
}
} else {
result // condition (ex. "types"), ignore
});
}
}
_ => {}
}
}
None
}
try_reverse_map_package_json_exports_inner(root_path, target_path, exports)
}
/// For a set of tsc changes, can them for any that contain something that looks
/// like an import and rewrite the import specifier to include the extension
pub fn fix_ts_import_changes(
changes: &[tsc::FileTextChanges],
module: &DocumentModule,
language_server: &language_server::Inner,
token: &CancellationToken,
) -> Result<Vec<tsc::FileTextChanges>, AnyError> {
let mut r = Vec::new();
let new_file_hints = changes
.iter()
.filter(|c| c.is_new_file.unwrap_or(false))
.filter_map(|c| resolve_url(&c.file_name).ok())
.collect::<Vec<_>>();
for change in changes {
if token.is_cancelled() {
return Err(anyhow!("request cancelled"));
}
let is_new_file = change.is_new_file.unwrap_or(false);
let Ok(target_specifier) = resolve_url(&change.file_name) else {
continue;
};
let target_module = if is_new_file {
None
} else {
let Some(target_module) =
language_server.document_modules.module_for_specifier(
&target_specifier,
module.scope.as_deref(),
Some(&module.compiler_options_key),
)
else {
continue;
};
Some(target_module)
};
let resolution_mode = target_module
.as_ref()
.map(|m| m.resolution_mode)
.unwrap_or(ResolutionMode::Import);
let import_mapper = language_server.get_ts_response_import_mapper(module);
let mut text_changes = Vec::new();
for text_change in &change.text_changes {
let lines = text_change.new_text.split('\n');
let new_lines: Vec<String> = lines
.map(|line| {
// This assumes that there's only one import per line.
if let Some(captures) = IMPORT_SPECIFIER_RE.captures(line) {
let specifier =
captures.iter().skip(1).find_map(|s| s).unwrap().as_str();
if let Some(new_specifier) = import_mapper
.check_unresolved_specifier(
specifier,
&target_specifier,
resolution_mode,
&new_file_hints,
)
{
line.replace(specifier, &new_specifier)
} else {
line.to_string()
}
} else {
line.to_string()
}
})
.collect();
text_changes.push(tsc::TextChange {
span: text_change.span.clone(),
new_text: new_lines.join("\n").to_string(),
});
}
r.push(tsc::FileTextChanges {
file_name: change.file_name.clone(),
text_changes,
is_new_file: change.is_new_file,
});
}
Ok(r)
}
pub fn fix_ts_import_changes_for_file_rename(
changes: Vec<tsc::FileTextChanges>,
new_uri: &str,
old_module: &DocumentModule,
language_server: &language_server::Inner,
token: &CancellationToken,
) -> Result<Vec<tsc::FileTextChanges>, AnyError> {
let Ok(new_uri) = Uri::from_str(new_uri) else {
return Ok(Vec::new());
};
if !new_uri.scheme().is_some_and(|s| s.eq_lowercase("file")) {
return Ok(Vec::new());
}
let new_file_hints = [uri_to_url(&new_uri)];
let mut r = Vec::with_capacity(changes.len());
for mut change in changes {
if token.is_cancelled() {
return Err(anyhow!("request cancelled"));
}
let Ok(target_specifier) = resolve_url(&change.file_name) else {
continue;
};
let Some(target_module) =
language_server.document_modules.module_for_specifier(
&target_specifier,
old_module.scope.as_deref(),
Some(&old_module.compiler_options_key),
)
else {
continue;
};
let import_mapper =
language_server.get_ts_response_import_mapper(&target_module);
for text_change in &mut change.text_changes {
if let Some(new_specifier) = import_mapper.check_unresolved_specifier(
&text_change.new_text,
&target_module.specifier,
target_module.resolution_mode,
&new_file_hints,
) {
text_change.new_text = new_specifier;
}
}
r.push(change);
}
Ok(r)
}
/// Fix tsc import code actions so that the module specifier is correct for
/// resolution by Deno (includes the extension).
fn fix_ts_import_action<'a>(
action: &'a tsc::CodeFixAction,
module: &DocumentModule,
language_server: &language_server::Inner,
) -> Option<Cow<'a, tsc::CodeFixAction>> {
if !matches!(
action.fix_name.as_str(),
"import" | "fixMissingFunctionDeclaration"
) {
return Some(Cow::Borrowed(action));
}
let specifier = (|| {
let text_change = action.changes.first()?.text_changes.first()?;
let captures = IMPORT_SPECIFIER_RE.captures(&text_change.new_text)?;
Some(captures.get(1)?.as_str())
})();
let Some(specifier) = specifier else {
return Some(Cow::Borrowed(action));
};
let import_mapper = language_server.get_ts_response_import_mapper(module);
if let Some(new_specifier) = import_mapper.check_unresolved_specifier(
specifier,
&module.specifier,
module.resolution_mode,
&action
.changes
.iter()
.filter(|c| c.is_new_file.unwrap_or(false))
.filter_map(|c| resolve_url(&c.file_name).ok())
.collect::<Vec<_>>(),
) {
let description = action.description.replace(specifier, &new_specifier);
let changes = action
.changes
.iter()
.map(|c| {
let text_changes = c
.text_changes
.iter()
.map(|tc| tsc::TextChange {
span: tc.span.clone(),
new_text: tc.new_text.replace(specifier, &new_specifier),
})
.collect();
tsc::FileTextChanges {
file_name: c.file_name.clone(),
text_changes,
is_new_file: c.is_new_file,
}
})
.collect();
Some(Cow::Owned(tsc::CodeFixAction {
description,
changes,
commands: None,
fix_name: action.fix_name.clone(),
fix_id: None,
fix_all_description: None,
}))
} else if !import_mapper.is_valid_import(
specifier,
&module.specifier,
module.resolution_mode,
) {
None
} else {
Some(Cow::Borrowed(action))
}
}
/// Determines if two TypeScript diagnostic codes are effectively equivalent.
fn is_equivalent_code(
a: &Option<lsp::NumberOrString>,
b: &Option<lsp::NumberOrString>,
) -> bool {
let a_code = code_as_string(a);
let b_code = code_as_string(b);
FIX_ALL_ERROR_CODES.get(a_code.as_str())
== FIX_ALL_ERROR_CODES.get(b_code.as_str())
}
/// Return a boolean flag to indicate if the specified action is the preferred
/// action for a given set of actions.
fn is_preferred(
action: &tsc::CodeFixAction,
actions: &[CodeActionKind],
fix_priority: u32,
only_one: bool,
) -> bool {
actions.iter().all(|i| {
if let CodeActionKind::Tsc(_, a) = i {
if action == a {
return true;
}
if a.fix_id.is_some() {
return true;
}
if let Some((other_fix_priority, _)) =
PREFERRED_FIXES.get(a.fix_name.as_str())
{
match other_fix_priority.cmp(&fix_priority) {
Ordering::Less => return true,
Ordering::Greater => return false,
Ordering::Equal => (),
}
if only_one && action.fix_name == a.fix_name {
return false;
}
}
true
} else if let CodeActionKind::Deno(_) = i {
// This is to make sure 'Remove import' isn't preferred over 'Cache
// dependencies'.
false
} else {
true
}
})
}
/// Convert changes returned from a TypeScript quick fix action into edits
/// for an LSP CodeAction.
pub fn ts_changes_to_edit(
changes: &[tsc::FileTextChanges],
module: &DocumentModule,
language_server: &language_server::Inner,
) -> Result<Option<lsp::WorkspaceEdit>, AnyError> {
let mut text_document_edits = Vec::new();
for change in changes {
let Some(edit) = change.to_text_document_edit(module, language_server)
else {
continue;
};
text_document_edits.push(edit);
}
Ok(Some(lsp::WorkspaceEdit {
changes: None,
document_changes: Some(lsp::DocumentChanges::Edits(text_document_edits)),
change_annotations: None,
}))
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct CodeActionData {
pub uri: Uri,
pub fix_id: String,
}
#[derive(Debug, Clone)]
enum CodeActionKind {
Deno(lsp::CodeAction),
DenoLint(lsp::CodeAction),
Tsc(lsp::CodeAction, tsc::CodeFixAction),
}
#[derive(Debug, Hash, PartialEq, Eq)]
enum FixAllKind {
Tsc(String),
}
#[derive(Debug, Default)]
pub struct CodeActionCollection {
actions: Vec<CodeActionKind>,
fix_all_actions: HashMap<FixAllKind, CodeActionKind>,
}
impl CodeActionCollection {
pub fn add_deno_fix_action(
&mut self,
uri: &Uri,
specifier: &ModuleSpecifier,
diagnostic: &lsp::Diagnostic,
) -> Result<(), AnyError> {
let code_action =
DenoDiagnostic::get_code_action(uri, specifier, diagnostic)?;
self.actions.push(CodeActionKind::Deno(code_action));
Ok(())
}
pub fn add_deno_lint_actions(
&mut self,
uri: &Uri,
module: &DocumentModule,
diagnostic: &lsp::Diagnostic,
) -> Result<(), AnyError> {
if let Some(data_quick_fixes) = diagnostic
.data
.as_ref()
.and_then(|d| serde_json::from_value::<Vec<DataQuickFix>>(d.clone()).ok())
{
for quick_fix in data_quick_fixes {
let mut changes = HashMap::new();
changes.insert(
uri.clone(),
quick_fix
.changes
.into_iter()
.map(|change| lsp::TextEdit {
new_text: change.new_text.clone(),
range: change.range,
})
.collect(),
);
let code_action = lsp::CodeAction {
title: quick_fix.description.to_string(),
kind: Some(lsp::CodeActionKind::QUICKFIX),
diagnostics: Some(vec![diagnostic.clone()]),
command: None,
is_preferred: None,
disabled: None,
data: None,
edit: Some(lsp::WorkspaceEdit {
changes: Some(changes),
change_annotations: None,
document_changes: None,
}),
};
self.actions.push(CodeActionKind::DenoLint(code_action));
}
}
self.add_deno_lint_ignore_action(uri, module, diagnostic)
}
fn add_deno_lint_ignore_action(
&mut self,
uri: &Uri,
module: &DocumentModule,
diagnostic: &lsp::Diagnostic,
) -> Result<(), AnyError> {
let code = diagnostic
.code
.as_ref()
.map(|v| match v {
lsp::NumberOrString::String(v) => v.to_owned(),
_ => "".to_string(),
})
.unwrap();
let text_info = module.text_info();
let line_content = text_info
.line_text(diagnostic.range.start.line as usize)
.to_string();
let mut changes = HashMap::new();
changes.insert(
uri.clone(),
vec![lsp::TextEdit {
new_text: prepend_whitespace(
format!("// deno-lint-ignore {code}\n"),
Some(line_content),
),
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/semantic_tokens.rs | cli/lsp/semantic_tokens.rs | // Copyright 2018-2025 the Deno authors. MIT license.
// The logic of this module is heavily influenced by
// https://github.com/microsoft/vscode/blob/main/extensions/typescript-language-features/src/languageFeatures/semanticTokens.ts
// and https://github.com/microsoft/vscode/blob/main/src/vs/workbench/api/common/extHostTypes.ts
// for the SemanticTokensBuilder implementation.
use std::ops::Index;
use std::ops::IndexMut;
use tower_lsp::lsp_types as lsp;
use tower_lsp::lsp_types::SemanticToken;
use tower_lsp::lsp_types::SemanticTokenModifier;
use tower_lsp::lsp_types::SemanticTokenType;
use tower_lsp::lsp_types::SemanticTokens;
use tower_lsp::lsp_types::SemanticTokensLegend;
pub const MODIFIER_MASK: u32 = 255;
pub const TYPE_OFFSET: u32 = 8;
enum TokenType {
Class = 0,
Enum = 1,
Interface = 2,
Namespace = 3,
TypeParameter = 4,
Type = 5,
Parameter = 6,
Variable = 7,
EnumMember = 8,
Property = 9,
Function = 10,
Method = 11,
}
impl<T> Index<TokenType> for Vec<T> {
type Output = T;
fn index(&self, idx: TokenType) -> &T {
&self[idx as usize]
}
}
impl<T> IndexMut<TokenType> for Vec<T> {
fn index_mut(&mut self, idx: TokenType) -> &mut T {
&mut self[idx as usize]
}
}
enum TokenModifier {
Declaration = 0,
Static = 1,
Async = 2,
Readonly = 3,
DefaultLibrary = 4,
Local = 5,
}
impl<T> Index<TokenModifier> for Vec<T> {
type Output = T;
fn index(&self, idx: TokenModifier) -> &T {
&self[idx as usize]
}
}
impl<T> IndexMut<TokenModifier> for Vec<T> {
fn index_mut(&mut self, idx: TokenModifier) -> &mut T {
&mut self[idx as usize]
}
}
pub fn get_legend() -> SemanticTokensLegend {
let mut token_types = vec![SemanticTokenType::from(""); 12];
token_types[TokenType::Class] = "class".into();
token_types[TokenType::Enum] = "enum".into();
token_types[TokenType::Interface] = "interface".into();
token_types[TokenType::Namespace] = "namespace".into();
token_types[TokenType::TypeParameter] = "typeParameter".into();
token_types[TokenType::Type] = "type".into();
token_types[TokenType::Parameter] = "parameter".into();
token_types[TokenType::Variable] = "variable".into();
token_types[TokenType::EnumMember] = "enumMember".into();
token_types[TokenType::Property] = "property".into();
token_types[TokenType::Function] = "function".into();
token_types[TokenType::Method] = "method".into();
let mut token_modifiers = vec![SemanticTokenModifier::from(""); 6];
token_modifiers[TokenModifier::Async] = "async".into();
token_modifiers[TokenModifier::Declaration] = "declaration".into();
token_modifiers[TokenModifier::Readonly] = "readonly".into();
token_modifiers[TokenModifier::Static] = "static".into();
token_modifiers[TokenModifier::Local] = "local".into();
token_modifiers[TokenModifier::DefaultLibrary] = "defaultLibrary".into();
SemanticTokensLegend {
token_types,
token_modifiers,
}
}
pub struct SemanticTokensBuilder {
prev_line: u32,
prev_char: u32,
data_is_sorted_and_delta_encoded: bool,
data: Vec<u32>,
}
impl SemanticTokensBuilder {
pub fn new() -> Self {
Self {
prev_line: 0,
prev_char: 0,
data_is_sorted_and_delta_encoded: true,
data: Vec::new(),
}
}
pub fn push(
&mut self,
line: u32,
char: u32,
length: u32,
token_type: u32,
token_modifiers: u32,
) {
if self.data_is_sorted_and_delta_encoded
&& (line < self.prev_line
|| (line == self.prev_line && char < self.prev_char))
{
// push calls were ordered and are no longer ordered
self.data_is_sorted_and_delta_encoded = false;
// Remove delta encoding from data
let token_count = self.data.len() / 5;
let mut prev_line = 0;
let mut prev_char = 0;
for i in 0..token_count {
let mut line = self.data[5 * i];
let mut char = self.data[5 * i + 1];
if line == 0 {
// on the same line as previous token
line = prev_line;
char += prev_char;
} else {
// on a different line than previous token
line += prev_line;
}
self.data[5 * i] = line;
self.data[5 * i + 1] = char;
prev_line = line;
prev_char = char;
}
}
let mut push_line = line;
let mut push_char = char;
if self.data_is_sorted_and_delta_encoded && !self.data.is_empty() {
push_line -= self.prev_line;
if push_line == 0 {
push_char -= self.prev_char;
}
}
self.data.reserve(5);
self.data.push(push_line);
self.data.push(push_char);
self.data.push(length);
self.data.push(token_type);
self.data.push(token_modifiers);
self.prev_line = line;
self.prev_char = char;
}
fn data_to_semantic_token_vec(
data: &[u32],
data_is_sorted_and_delta_encoded: bool,
) -> Vec<SemanticToken> {
let token_count = data.len() / 5;
let mut result: Vec<SemanticToken> = Vec::with_capacity(token_count);
if data_is_sorted_and_delta_encoded {
for i in 0..token_count {
let src_offset = 5 * i;
result.push(SemanticToken {
delta_line: data[src_offset],
delta_start: data[src_offset + 1],
length: data[src_offset + 2],
token_type: data[src_offset + 3],
token_modifiers_bitset: data[src_offset + 4],
});
}
return result;
}
let mut pos: Vec<usize> = (0..token_count).collect();
pos.sort_by(|a, b| {
let a_line = data[5 * a];
let b_line = data[5 * b];
if a_line == b_line {
let a_char = data[5 * a + 1];
let b_char = data[5 * b + 1];
return a_char.cmp(&b_char);
}
a_line.cmp(&b_line)
});
let mut prev_line = 0;
let mut prev_char = 0;
for i in pos.iter() {
let src_offset = 5 * i;
let line = data[src_offset];
let char = data[src_offset + 1];
let length = data[src_offset + 2];
let token_type = data[src_offset + 3];
let token_modifiers_bitset = data[src_offset + 4];
let delta_line = line - prev_line;
let delta_start = if delta_line == 0 {
char - prev_char
} else {
char
};
result.push(SemanticToken {
delta_line,
delta_start,
length,
token_type,
token_modifiers_bitset,
});
prev_line = line;
prev_char = char;
}
result
}
pub fn build(&self, result_id: Option<String>) -> SemanticTokens {
SemanticTokens {
result_id,
data: SemanticTokensBuilder::data_to_semantic_token_vec(
&self.data,
self.data_is_sorted_and_delta_encoded,
),
}
}
}
pub fn tokens_within_range(
tokens: &SemanticTokens,
range: lsp::Range,
) -> SemanticTokens {
let mut line = 0;
let mut character = 0;
let mut first_token_line = 0;
let mut first_token_char = 0;
let mut keep_start_idx = tokens.data.len();
let mut keep_end_idx = keep_start_idx;
for (i, token) in tokens.data.iter().enumerate() {
if token.delta_line != 0 {
character = 0;
}
line += token.delta_line;
character += token.delta_start;
let token_start = lsp::Position::new(line, character);
if i < keep_start_idx && token_start >= range.start {
keep_start_idx = i;
first_token_line = line;
first_token_char = character;
}
if token_start > range.end {
keep_end_idx = i;
break;
}
}
if keep_end_idx == keep_start_idx {
return SemanticTokens {
result_id: None,
data: Vec::new(),
};
}
let mut data = tokens.data[keep_start_idx..keep_end_idx].to_vec();
// we need to adjust the delta_line and delta_start on the first token
// as it is relative to 0 now, not the previous token
let first_token = &mut data[0];
first_token.delta_line = first_token_line;
first_token.delta_start = first_token_char;
SemanticTokens {
result_id: None,
data,
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_semantic_tokens_builder_simple() {
let mut builder = SemanticTokensBuilder::new();
builder.push(1, 0, 5, 1, 1);
builder.push(1, 10, 4, 2, 2);
builder.push(2, 2, 3, 2, 2);
assert_eq!(
builder.build(None).data,
vec![
SemanticToken {
delta_line: 1,
delta_start: 0,
length: 5,
token_type: 1,
token_modifiers_bitset: 1
},
SemanticToken {
delta_line: 0,
delta_start: 10,
length: 4,
token_type: 2,
token_modifiers_bitset: 2
},
SemanticToken {
delta_line: 1,
delta_start: 2,
length: 3,
token_type: 2,
token_modifiers_bitset: 2
}
]
);
}
#[test]
fn test_semantic_tokens_builder_out_of_order_1() {
let mut builder = SemanticTokensBuilder::new();
builder.push(2, 0, 5, 1, 1);
builder.push(2, 10, 1, 2, 2);
builder.push(2, 15, 2, 3, 3);
builder.push(1, 0, 4, 4, 4);
assert_eq!(
builder.build(None).data,
vec![
SemanticToken {
delta_line: 1,
delta_start: 0,
length: 4,
token_type: 4,
token_modifiers_bitset: 4
},
SemanticToken {
delta_line: 1,
delta_start: 0,
length: 5,
token_type: 1,
token_modifiers_bitset: 1
},
SemanticToken {
delta_line: 0,
delta_start: 10,
length: 1,
token_type: 2,
token_modifiers_bitset: 2
},
SemanticToken {
delta_line: 0,
delta_start: 5,
length: 2,
token_type: 3,
token_modifiers_bitset: 3
}
]
);
}
#[test]
fn test_semantic_tokens_builder_out_of_order_2() {
let mut builder = SemanticTokensBuilder::new();
builder.push(2, 10, 5, 1, 1);
builder.push(2, 2, 4, 2, 2);
assert_eq!(
builder.build(None).data,
vec![
SemanticToken {
delta_line: 2,
delta_start: 2,
length: 4,
token_type: 2,
token_modifiers_bitset: 2
},
SemanticToken {
delta_line: 0,
delta_start: 8,
length: 5,
token_type: 1,
token_modifiers_bitset: 1
}
]
);
}
#[test]
fn test_tokens_within_range() {
let mut builder = SemanticTokensBuilder::new();
builder.push(1, 0, 5, 0, 0);
builder.push(2, 1, 1, 1, 0);
builder.push(2, 2, 3, 2, 0);
builder.push(2, 5, 5, 3, 0);
builder.push(3, 0, 4, 4, 0);
builder.push(5, 2, 3, 5, 0);
let tokens = builder.build(None);
let range = lsp::Range {
start: lsp::Position {
line: 2,
character: 2,
},
end: lsp::Position {
line: 4,
character: 0,
},
};
let result = tokens_within_range(&tokens, range);
assert_eq!(
result.data,
vec![
// line 2 char 2
SemanticToken {
delta_line: 2,
delta_start: 2,
length: 3,
token_type: 2,
token_modifiers_bitset: 0
},
// line 2 char 5
SemanticToken {
delta_line: 0,
delta_start: 3,
length: 5,
token_type: 3,
token_modifiers_bitset: 0
},
// line 3 char 0
SemanticToken {
delta_line: 1,
delta_start: 0,
length: 4,
token_type: 4,
token_modifiers_bitset: 0
}
]
);
}
#[test]
fn test_tokens_within_range_include_end() {
let mut builder = SemanticTokensBuilder::new();
builder.push(1, 0, 1, 0, 0);
builder.push(2, 1, 2, 1, 0);
builder.push(2, 3, 3, 2, 0);
builder.push(3, 0, 4, 3, 0);
let tokens = builder.build(None);
let range = lsp::Range {
start: lsp::Position {
line: 2,
character: 2,
},
end: lsp::Position {
line: 3,
character: 4,
},
};
let result = tokens_within_range(&tokens, range);
assert_eq!(
result.data,
vec![
// line 2 char 3
SemanticToken {
delta_line: 2,
delta_start: 3,
length: 3,
token_type: 2,
token_modifiers_bitset: 0
},
// line 3 char 0
SemanticToken {
delta_line: 1,
delta_start: 0,
length: 4,
token_type: 3,
token_modifiers_bitset: 0
}
]
);
}
#[test]
fn test_tokens_within_range_empty() {
let mut builder = SemanticTokensBuilder::new();
builder.push(1, 0, 1, 0, 0);
builder.push(2, 1, 2, 1, 0);
builder.push(2, 3, 3, 2, 0);
builder.push(3, 0, 4, 3, 0);
let tokens = builder.build(None);
let range = lsp::Range {
start: lsp::Position {
line: 3,
character: 2,
},
end: lsp::Position {
line: 3,
character: 4,
},
};
let result = tokens_within_range(&tokens, range);
assert_eq!(result.data, vec![]);
assert_eq!(
tokens_within_range(&SemanticTokens::default(), range).data,
vec![]
);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/trace.rs | cli/lsp/trace.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::fmt;
#[cfg(feature = "lsp-tracing")]
pub use real_tracing::*;
use serde::Deserialize;
use serde::Serialize;
#[cfg(not(feature = "lsp-tracing"))]
pub use stub_tracing::*;
pub(crate) struct TracingGuard {
#[allow(dead_code)]
guard: (),
// TODO(nathanwhit): use default guard here so we can change tracing after init
// but needs wiring through the subscriber to the TSC thread, as it can't be a global default
// #[allow(dead_code)] tracing::dispatcher::DefaultGuard,
#[allow(dead_code)]
defused: bool,
}
impl fmt::Debug for TracingGuard {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("TracingGuard").finish()
}
}
#[cfg(feature = "lsp-tracing")]
mod real_tracing {
use deno_core::anyhow;
pub use opentelemetry::Context;
use opentelemetry::KeyValue;
use opentelemetry::trace::TracerProvider;
use opentelemetry_otlp::WithExportConfig;
use opentelemetry_sdk::Resource;
use opentelemetry_semantic_conventions::resource::SERVICE_NAME;
pub use tracing::Span;
use tracing::level_filters::LevelFilter;
pub use tracing::span::EnteredSpan;
use tracing_opentelemetry::OpenTelemetryLayer;
pub use tracing_opentelemetry::OpenTelemetrySpanExt as SpanExt;
use tracing_subscriber::fmt::format::FmtSpan;
use tracing_subscriber::layer::SubscriberExt;
use super::TracingCollector;
use super::TracingConfig;
use super::TracingGuard;
pub(crate) fn make_tracer(
endpoint: Option<&str>,
) -> Result<opentelemetry_sdk::trace::Tracer, anyhow::Error> {
let endpoint = endpoint.unwrap_or("http://localhost:4317");
let exporter = opentelemetry_otlp::SpanExporter::builder()
.with_tonic()
.with_endpoint(endpoint)
.build()?;
let provider = opentelemetry_sdk::trace::Builder::default()
.with_batch_exporter(exporter, opentelemetry_sdk::runtime::Tokio)
.with_resource(Resource::new(vec![KeyValue::new(
SERVICE_NAME,
"deno-lsp",
)]))
.build();
opentelemetry::global::set_tracer_provider(provider.clone());
Ok(provider.tracer("deno-lsp-tracer"))
}
pub(crate) fn init_tracing_subscriber(
config: &TracingConfig,
) -> Result<TracingGuard, anyhow::Error> {
if !config.enable {
return Err(anyhow::anyhow!("Tracing is not enabled"));
}
let filter = tracing_subscriber::EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into());
let filter = if let Some(directive) = config.filter.as_ref() {
filter.parse(directive)?
} else {
filter.with_env_var("DENO_LSP_TRACE").from_env()?
};
let open_telemetry_layer = match config.collector {
TracingCollector::OpenTelemetry => Some(OpenTelemetryLayer::new(
make_tracer(config.collector_endpoint.as_deref())?,
)),
_ => None,
};
let logging_layer = match config.collector {
TracingCollector::Logging => Some(
tracing_subscriber::fmt::layer()
.with_writer(std::io::stderr)
// Include span events in the log output.
// Without this, only events get logged (and at the moment we have none).
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE),
),
_ => None,
};
tracing::subscriber::set_global_default(
tracing_subscriber::registry()
.with(filter)
.with(logging_layer)
.with(open_telemetry_layer),
)
.unwrap();
let guard = ();
Ok(TracingGuard {
guard,
defused: false,
})
}
impl Drop for TracingGuard {
fn drop(&mut self) {
if !self.defused {
crate::lsp::logging::lsp_debug!("Shutting down tracing");
tokio::task::spawn_blocking(|| {
opentelemetry::global::shutdown_tracer_provider()
});
}
}
}
}
#[cfg(not(feature = "lsp-tracing"))]
mod stub_tracing {
pub trait SpanExt {
#[allow(dead_code)]
fn set_parent(&self, _context: Context);
fn context(&self) -> Context;
}
#[derive(Debug, Clone)]
pub struct Span {}
impl SpanExt for Span {
#[allow(dead_code)]
fn set_parent(&self, _context: Context) {}
fn context(&self) -> Context {
Context {}
}
}
impl Span {
pub fn entered(self) -> EnteredSpan {
EnteredSpan {}
}
pub fn current() -> Self {
Self {}
}
}
#[derive(Debug)]
pub struct EnteredSpan {}
#[derive(Clone, Debug)]
pub struct Context {}
pub(crate) fn init_tracing_subscriber(
_config: &super::TracingConfig,
) -> Result<super::TracingGuard, deno_core::anyhow::Error> {
Ok(super::TracingGuard {
defused: false,
guard: {},
})
}
}
#[derive(
Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Copy, Default,
)]
#[serde(rename_all = "camelCase")]
pub(crate) enum TracingCollector {
#[default]
OpenTelemetry,
Logging,
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Default)]
#[serde(default, rename_all = "camelCase")]
pub(crate) struct TracingConfig {
/// Enable tracing.
pub(crate) enable: bool,
/// The collector to use. Defaults to `OpenTelemetry`.
/// If `Logging` is used, the collected traces will be written to stderr.
pub(crate) collector: TracingCollector,
/// The filter to use. Defaults to `INFO`.
pub(crate) filter: Option<String>,
/// The endpoint to use for the OpenTelemetry collector.
pub(crate) collector_endpoint: Option<String>,
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(untagged)]
pub(crate) enum TracingConfigOrEnabled {
Config(TracingConfig),
Enabled(bool),
}
impl From<TracingConfig> for TracingConfigOrEnabled {
fn from(value: TracingConfig) -> Self {
TracingConfigOrEnabled::Config(value)
}
}
impl From<TracingConfigOrEnabled> for TracingConfig {
fn from(value: TracingConfigOrEnabled) -> Self {
match value {
TracingConfigOrEnabled::Config(config) => config,
TracingConfigOrEnabled::Enabled(enabled) => TracingConfig {
enable: enabled,
..Default::default()
},
}
}
}
impl TracingConfigOrEnabled {
pub(crate) fn enabled(&self) -> bool {
match self {
TracingConfigOrEnabled::Config(config) => config.enable,
TracingConfigOrEnabled::Enabled(enabled) => *enabled,
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/lint.rs | cli/lsp/lint.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::path::PathBuf;
use std::sync::Arc;
use dashmap::DashMap;
use deno_config::glob::FilePatterns;
use deno_config::workspace::WorkspaceDirLintConfig;
use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_lint::linter::LintConfig;
use deno_resolver::deno_json::CompilerOptionsKey;
use deno_runtime::tokio_util::create_basic_runtime;
use once_cell::sync::Lazy;
use crate::args::LintFlags;
use crate::args::LintOptions;
use crate::lsp::compiler_options::LspCompilerOptionsResolver;
use crate::lsp::config::Config;
use crate::lsp::documents::DocumentModule;
use crate::lsp::logging::lsp_log;
use crate::lsp::logging::lsp_warn;
use crate::lsp::resolver::LspResolver;
use crate::tools::lint::CliLinter;
use crate::tools::lint::CliLinterOptions;
use crate::tools::lint::LintRuleProvider;
use crate::tools::lint::PluginHostProxy;
#[derive(Debug)]
pub struct LspLinter {
pub inner: CliLinter,
pub lint_config: WorkspaceDirLintConfig,
}
#[derive(Debug, Default)]
pub struct LspLinterResolver {
config: Config,
compiler_options_resolver: Arc<LspCompilerOptionsResolver>,
resolver: Arc<LspResolver>,
linters: DashMap<(CompilerOptionsKey, Option<Arc<Url>>), Arc<LspLinter>>,
}
impl LspLinterResolver {
pub fn new(
config: &Config,
compiler_options_resolver: &Arc<LspCompilerOptionsResolver>,
resolver: &Arc<LspResolver>,
) -> Self {
Self {
config: config.clone(),
compiler_options_resolver: compiler_options_resolver.clone(),
resolver: resolver.clone(),
linters: Default::default(),
}
}
pub fn for_module(&self, module: &DocumentModule) -> Arc<LspLinter> {
self
.linters
.entry((module.compiler_options_key.clone(), module.scope.clone()))
.or_insert_with(|| {
let config_data = module
.scope
.as_ref()
.and_then(|s| self.config.tree.data_for_specifier(s));
let workspace_resolver = self
.resolver
.get_scoped_resolver(config_data.map(|d| d.scope.as_ref()))
.as_workspace_resolver()
.clone();
let lint_rule_provider =
LintRuleProvider::new(Some(workspace_resolver));
let lint_config = config_data
.and_then(|d| {
d.member_dir
.to_lint_config(FilePatterns::new_with_base(
d.member_dir.dir_path(),
))
.inspect_err(|err| {
lsp_warn!("Couldn't read lint configuration: {}", err)
})
.ok()
})
.unwrap_or_else(|| WorkspaceDirLintConfig {
rules: Default::default(),
plugins: Default::default(),
files: FilePatterns::new_with_base(PathBuf::from("/")),
});
let lint_options =
LintOptions::resolve(lint_config.clone(), &LintFlags::default())
.inspect_err(|err| {
lsp_warn!("Failed to resolve linter options: {}", err)
})
.ok()
.unwrap_or_default();
let compiler_options_data = self
.compiler_options_resolver
.for_key(&module.compiler_options_key)
.expect("Key should be in sync with resolver.");
let deno_lint_config = if compiler_options_data
.compiler_options
.0
.get("jsx")
.and_then(|v| v.as_str())
== Some("react")
{
let default_jsx_factory = compiler_options_data
.compiler_options
.0
.get("jsxFactory")
.and_then(|v| v.as_str());
let default_jsx_fragment_factory = compiler_options_data
.compiler_options
.0
.get("jsxFragmentFactory")
.and_then(|v| v.as_str());
LintConfig {
default_jsx_factory: default_jsx_factory.map(String::from),
default_jsx_fragment_factory: default_jsx_fragment_factory
.map(String::from),
}
} else {
LintConfig {
default_jsx_factory: None,
default_jsx_fragment_factory: None,
}
};
let mut plugin_runner = None;
if !lint_options.plugins.is_empty() {
let load_plugins_result = LOAD_PLUGINS_THREAD.load_plugins(
lint_options.plugins.clone(),
lint_options.rules.exclude.clone(),
);
match load_plugins_result {
Ok(runner) => {
plugin_runner = Some(Arc::new(runner));
}
Err(err) => {
lsp_warn!("Failed to load lint plugins: {}", err);
}
}
}
let inner = CliLinter::new(CliLinterOptions {
configured_rules: lint_rule_provider.resolve_lint_rules(
lint_options.rules,
config_data.map(|d| d.member_dir.as_ref()),
),
fix: false,
deno_lint_config,
maybe_plugin_runner: plugin_runner,
});
Arc::new(LspLinter { inner, lint_config })
})
.clone()
}
}
#[derive(Debug)]
struct LoadPluginsRequest {
plugins: Vec<Url>,
exclude: Option<Vec<String>>,
response_tx: std::sync::mpsc::Sender<Result<PluginHostProxy, AnyError>>,
}
#[derive(Debug)]
struct LoadPluginsThread {
join_handle: Option<std::thread::JoinHandle<()>>,
request_tx: Option<tokio::sync::mpsc::UnboundedSender<LoadPluginsRequest>>,
}
impl LoadPluginsThread {
fn create() -> Self {
let (request_tx, mut request_rx) =
tokio::sync::mpsc::unbounded_channel::<LoadPluginsRequest>();
let join_handle = std::thread::spawn(move || {
create_basic_runtime().block_on(async move {
while let Some(request) = request_rx.recv().await {
let result = crate::tools::lint::create_runner_and_load_plugins(
request.plugins,
crate::tools::lint::PluginLogger::new(|msg, _is_err| {
lsp_log!("pluggin runner - {}", msg);
}),
request.exclude,
)
.await;
request.response_tx.send(result).unwrap();
}
});
});
Self {
join_handle: Some(join_handle),
request_tx: Some(request_tx),
}
}
fn load_plugins(
&self,
plugins: Vec<Url>,
exclude: Option<Vec<String>>,
) -> Result<PluginHostProxy, AnyError> {
let request_tx = self.request_tx.as_ref().unwrap();
let (response_tx, response_rx) = std::sync::mpsc::channel();
let _ = request_tx.send(LoadPluginsRequest {
plugins,
exclude,
response_tx,
});
response_rx.recv().unwrap()
}
}
impl Drop for LoadPluginsThread {
fn drop(&mut self) {
drop(self.request_tx.take());
self.join_handle.take().unwrap().join().unwrap();
}
}
static LOAD_PLUGINS_THREAD: Lazy<LoadPluginsThread> =
Lazy::new(LoadPluginsThread::create);
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/parent_process_checker.rs | cli/lsp/parent_process_checker.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::time::Duration;
/// Starts a thread that will check for the existence of the
/// provided process id. Once that process no longer exists
/// it will terminate the current process.
pub fn start(parent_process_id: u32) {
// use a separate thread in case the runtime gets hung up
std::thread::spawn(move || {
loop {
std::thread::sleep(Duration::from_secs(10));
if !is_process_active(parent_process_id) {
deno_runtime::exit(1);
}
}
});
}
#[cfg(unix)]
fn is_process_active(process_id: u32) -> bool {
// TODO(bartlomieju):
#[allow(clippy::undocumented_unsafe_blocks)]
unsafe {
// signal of 0 checks for the existence of the process id
libc::kill(process_id as i32, 0) == 0
}
}
#[cfg(windows)]
fn is_process_active(process_id: u32) -> bool {
use winapi::shared::minwindef::DWORD;
use winapi::shared::minwindef::FALSE;
use winapi::shared::ntdef::NULL;
use winapi::shared::winerror::WAIT_TIMEOUT;
use winapi::um::handleapi::CloseHandle;
use winapi::um::processthreadsapi::OpenProcess;
use winapi::um::synchapi::WaitForSingleObject;
use winapi::um::winnt::SYNCHRONIZE;
// SAFETY: winapi calls
unsafe {
let process = OpenProcess(SYNCHRONIZE, FALSE, process_id as DWORD);
let result = if process == NULL {
false
} else {
WaitForSingleObject(process, 0) == WAIT_TIMEOUT
};
CloseHandle(process);
result
}
}
#[cfg(test)]
mod test {
use std::process::Command;
use test_util::deno_exe_path;
use super::is_process_active;
#[test]
fn process_active() {
// launch a long running process
let mut child = Command::new(deno_exe_path()).arg("lsp").spawn().unwrap();
let pid = child.id();
assert!(is_process_active(pid));
child.kill().unwrap();
child.wait().unwrap();
assert!(!is_process_active(pid));
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/search.rs | cli/lsp/search.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::sync::Arc;
use deno_core::error::AnyError;
use deno_semver::Version;
use deno_semver::package::PackageNv;
#[async_trait::async_trait(?Send)]
pub trait PackageSearchApi {
async fn search(&self, query: &str) -> Result<Arc<Vec<String>>, AnyError>;
async fn versions(&self, name: &str) -> Result<Arc<Vec<Version>>, AnyError>;
async fn exports(&self, nv: &PackageNv)
-> Result<Arc<Vec<String>>, AnyError>;
}
#[cfg(test)]
pub mod tests {
use std::collections::BTreeMap;
use deno_core::anyhow::anyhow;
use super::*;
#[derive(Debug, Default)]
pub struct TestPackageSearchApi {
/// [(name -> [(version -> [export])])]
package_versions: BTreeMap<String, BTreeMap<Version, Vec<String>>>,
}
impl TestPackageSearchApi {
pub fn with_package_version(
mut self,
name: &str,
version: &str,
exports: &[&str],
) -> Self {
let exports_by_version =
self.package_versions.entry(name.to_string()).or_default();
exports_by_version.insert(
Version::parse_standard(version).unwrap(),
exports.iter().map(|s| s.to_string()).collect(),
);
self
}
}
#[async_trait::async_trait(?Send)]
impl PackageSearchApi for TestPackageSearchApi {
async fn search(&self, query: &str) -> Result<Arc<Vec<String>>, AnyError> {
let names = self
.package_versions
.keys()
.filter_map(|n| n.contains(query).then(|| n.clone()))
.collect::<Vec<_>>();
Ok(Arc::new(names))
}
async fn versions(
&self,
name: &str,
) -> Result<Arc<Vec<Version>>, AnyError> {
let Some(exports_by_version) = self.package_versions.get(name) else {
return Err(anyhow!("Package not found."));
};
Ok(Arc::new(exports_by_version.keys().rev().cloned().collect()))
}
async fn exports(
&self,
nv: &PackageNv,
) -> Result<Arc<Vec<String>>, AnyError> {
let Some(exports_by_version) =
self.package_versions.get(nv.name.as_str())
else {
return Err(anyhow!("Package not found."));
};
let Some(exports) = exports_by_version.get(&nv.version) else {
return Err(anyhow!("Package version not found."));
};
Ok(Arc::new(exports.clone()))
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/client.rs | cli/lsp/client.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::sync::Arc;
use async_trait::async_trait;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::serde_json::json;
use deno_core::unsync::spawn;
use lsp_types::Uri;
use tower_lsp::lsp_types as lsp;
use tower_lsp::lsp_types::ConfigurationItem;
use super::config::SETTINGS_SECTION;
use super::config::WorkspaceSettings;
use super::lsp_custom;
use super::testing::lsp_custom as testing_lsp_custom;
use crate::lsp::logging::lsp_warn;
use crate::lsp::repl::get_repl_workspace_settings;
#[derive(Debug)]
pub enum TestingNotification {
Module(testing_lsp_custom::TestModuleNotificationParams),
DeleteModule(testing_lsp_custom::TestModuleDeleteNotificationParams),
Progress(testing_lsp_custom::TestRunProgressParams),
}
#[derive(Clone)]
pub struct Client(Arc<dyn ClientTrait>);
impl std::fmt::Debug for Client {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("Client").finish()
}
}
impl Client {
pub fn from_tower(client: tower_lsp::Client) -> Self {
Self(Arc::new(TowerClient(client)))
}
pub fn new_for_repl() -> Self {
Self(Arc::new(ReplClient))
}
/// Gets additional methods that should only be called outside
/// the LSP's lock to prevent deadlocking scenarios.
pub fn when_outside_lsp_lock(&self) -> OutsideLockClient {
OutsideLockClient(self.0.clone())
}
pub async fn publish_diagnostics(
&self,
uri: Uri,
diags: Vec<lsp::Diagnostic>,
version: Option<i32>,
) {
self.0.publish_diagnostics(uri, diags, version).await;
}
pub fn send_registry_state_notification(
&self,
params: lsp_custom::RegistryStateNotificationParams,
) {
// do on a task in case the caller currently is in the lsp lock
let client = self.0.clone();
spawn(async move {
client.send_registry_state_notification(params).await;
});
}
pub fn send_diagnostic_batch_start_notification(&self) {
// do on a task in case the caller currently is in the lsp lock
let client = self.0.clone();
spawn(async move {
client.send_diagnostic_batch_start_notification().await;
});
}
pub fn send_diagnostic_batch_end_notification(&self) {
// do on a task in case the caller currently is in the lsp lock
let client = self.0.clone();
spawn(async move {
client.send_diagnostic_batch_end_notification().await;
});
}
pub fn send_test_notification(&self, params: TestingNotification) {
// do on a task in case the caller currently is in the lsp lock
let client = self.0.clone();
spawn(async move {
client.send_test_notification(params).await;
});
}
pub fn send_did_refresh_deno_configuration_tree_notification(
&self,
params: lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams,
) {
// do on a task in case the caller currently is in the lsp lock
let client = self.0.clone();
spawn(async move {
client
.send_did_refresh_deno_configuration_tree_notification(params)
.await;
});
}
pub fn send_did_change_deno_configuration_notification(
&self,
params: lsp_custom::DidChangeDenoConfigurationNotificationParams,
) {
// do on a task in case the caller currently is in the lsp lock
let client = self.0.clone();
spawn(async move {
client
.send_did_change_deno_configuration_notification(params)
.await;
});
}
pub fn send_did_upgrade_check_notification(
&self,
params: lsp_custom::DidUpgradeCheckNotificationParams,
) {
// do on a task in case the caller currently is in the lsp lock
let client = self.0.clone();
spawn(async move {
client.send_did_upgrade_check_notification(params).await;
});
}
pub fn show_message(
&self,
message_type: lsp::MessageType,
message: impl std::fmt::Display,
) {
// do on a task in case the caller currently is in the lsp lock
let client = self.0.clone();
let message = message.to_string();
spawn(async move {
client.show_message(message_type, message).await;
});
}
pub fn refresh_diagnostics(&self) {
// do on a task in case the caller currently is in the lsp lock
let client = self.0.clone();
spawn(async move {
if let Err(err) = client.refresh_diagnostics().await {
lsp_warn!("Client failed to refresh diagnostics: ${err:#}");
}
});
}
}
/// DANGER: The methods on this client should only be called outside
/// the LSP's lock. The reason is you never want to call into the client
/// while holding the lock because the client might call back into the
/// server and cause a deadlock.
pub struct OutsideLockClient(Arc<dyn ClientTrait>);
impl OutsideLockClient {
pub async fn register_capability(
&self,
registrations: Vec<lsp::Registration>,
) -> Result<(), AnyError> {
self.0.register_capability(registrations).await
}
pub async fn workspace_configuration(
&self,
scopes: Vec<Option<lsp::Uri>>,
) -> Result<Vec<WorkspaceSettings>, AnyError> {
self.0.workspace_configuration(scopes).await
}
}
#[async_trait]
trait ClientTrait: Send + Sync {
async fn publish_diagnostics(
&self,
uri: lsp::Uri,
diagnostics: Vec<lsp::Diagnostic>,
version: Option<i32>,
);
async fn send_registry_state_notification(
&self,
params: lsp_custom::RegistryStateNotificationParams,
);
async fn send_diagnostic_batch_start_notification(&self);
async fn send_diagnostic_batch_end_notification(&self);
async fn send_test_notification(&self, params: TestingNotification);
async fn send_did_refresh_deno_configuration_tree_notification(
&self,
params: lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams,
);
async fn send_did_change_deno_configuration_notification(
&self,
params: lsp_custom::DidChangeDenoConfigurationNotificationParams,
);
async fn send_did_upgrade_check_notification(
&self,
params: lsp_custom::DidUpgradeCheckNotificationParams,
);
async fn refresh_diagnostics(&self) -> Result<(), AnyError>;
async fn workspace_configuration(
&self,
scopes: Vec<Option<lsp::Uri>>,
) -> Result<Vec<WorkspaceSettings>, AnyError>;
async fn show_message(&self, message_type: lsp::MessageType, text: String);
async fn register_capability(
&self,
registrations: Vec<lsp::Registration>,
) -> Result<(), AnyError>;
}
#[derive(Clone)]
struct TowerClient(tower_lsp::Client);
#[async_trait]
impl ClientTrait for TowerClient {
async fn publish_diagnostics(
&self,
uri: lsp::Uri,
diagnostics: Vec<lsp::Diagnostic>,
version: Option<i32>,
) {
self.0.publish_diagnostics(uri, diagnostics, version).await
}
async fn send_registry_state_notification(
&self,
params: lsp_custom::RegistryStateNotificationParams,
) {
self
.0
.send_notification::<lsp_custom::RegistryStateNotification>(params)
.await
}
async fn send_diagnostic_batch_start_notification(&self) {
self
.0
.send_notification::<lsp_custom::DiagnosticBatchStartNotification>(())
.await
}
async fn send_diagnostic_batch_end_notification(&self) {
self
.0
.send_notification::<lsp_custom::DiagnosticBatchEndNotification>(())
.await
}
async fn send_test_notification(&self, notification: TestingNotification) {
match notification {
TestingNotification::Module(params) => {
self
.0
.send_notification::<testing_lsp_custom::TestModuleNotification>(
params,
)
.await
}
TestingNotification::DeleteModule(params) => self
.0
.send_notification::<testing_lsp_custom::TestModuleDeleteNotification>(
params,
)
.await,
TestingNotification::Progress(params) => {
self
.0
.send_notification::<testing_lsp_custom::TestRunProgressNotification>(
params,
)
.await
}
}
}
async fn send_did_refresh_deno_configuration_tree_notification(
&self,
params: lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams,
) {
self
.0
.send_notification::<lsp_custom::DidRefreshDenoConfigurationTreeNotification>(
params,
)
.await
}
async fn send_did_change_deno_configuration_notification(
&self,
params: lsp_custom::DidChangeDenoConfigurationNotificationParams,
) {
self
.0
.send_notification::<lsp_custom::DidChangeDenoConfigurationNotification>(
params,
)
.await
}
async fn send_did_upgrade_check_notification(
&self,
params: lsp_custom::DidUpgradeCheckNotificationParams,
) {
self
.0
.send_notification::<lsp_custom::DidUpgradeCheckNotification>(params)
.await
}
async fn refresh_diagnostics(&self) -> Result<(), AnyError> {
self
.0
.send_request::<lsp::request::WorkspaceDiagnosticRefresh>(())
.await
.map_err(|err| anyhow!("{err:#}"))
}
async fn workspace_configuration(
&self,
scopes: Vec<Option<lsp::Uri>>,
) -> Result<Vec<WorkspaceSettings>, AnyError> {
let config_response = self
.0
.configuration(
scopes
.iter()
.flat_map(|scope_uri| {
vec![
ConfigurationItem {
scope_uri: scope_uri.clone(),
section: Some(SETTINGS_SECTION.to_string()),
},
ConfigurationItem {
scope_uri: scope_uri.clone(),
section: Some("javascript".to_string()),
},
ConfigurationItem {
scope_uri: scope_uri.clone(),
section: Some("typescript".to_string()),
},
]
})
.collect(),
)
.await;
match config_response {
Ok(configs) => {
let mut configs = configs.into_iter();
let mut result = Vec::with_capacity(scopes.len());
for _ in 0..scopes.len() {
let deno = json!(configs.next());
let javascript = json!(configs.next());
let typescript = json!(configs.next());
result.push(WorkspaceSettings::from_raw_settings(
deno, javascript, typescript,
));
}
Ok(result)
}
Err(err) => {
bail!("Error getting workspace configurations: {}", err)
}
}
}
async fn show_message(
&self,
message_type: lsp::MessageType,
message: String,
) {
self.0.show_message(message_type, message).await
}
async fn register_capability(
&self,
registrations: Vec<lsp::Registration>,
) -> Result<(), AnyError> {
self
.0
.register_capability(registrations)
.await
.map_err(|err| anyhow!("{}", err))
}
}
#[derive(Clone)]
struct ReplClient;
#[async_trait]
impl ClientTrait for ReplClient {
async fn publish_diagnostics(
&self,
_uri: lsp::Uri,
_diagnostics: Vec<lsp::Diagnostic>,
_version: Option<i32>,
) {
}
async fn send_registry_state_notification(
&self,
_params: lsp_custom::RegistryStateNotificationParams,
) {
}
async fn send_diagnostic_batch_start_notification(&self) {}
async fn send_diagnostic_batch_end_notification(&self) {}
async fn send_test_notification(&self, _params: TestingNotification) {}
async fn send_did_refresh_deno_configuration_tree_notification(
&self,
_params: lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams,
) {
}
async fn send_did_change_deno_configuration_notification(
&self,
_params: lsp_custom::DidChangeDenoConfigurationNotificationParams,
) {
}
async fn send_did_upgrade_check_notification(
&self,
_params: lsp_custom::DidUpgradeCheckNotificationParams,
) {
}
async fn refresh_diagnostics(&self) -> Result<(), AnyError> {
Ok(())
}
async fn workspace_configuration(
&self,
scopes: Vec<Option<lsp::Uri>>,
) -> Result<Vec<WorkspaceSettings>, AnyError> {
Ok(vec![get_repl_workspace_settings(); scopes.len()])
}
async fn show_message(
&self,
_message_type: lsp::MessageType,
_message: String,
) {
}
async fn register_capability(
&self,
_registrations: Vec<lsp::Registration>,
) -> Result<(), AnyError> {
Ok(())
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/text.rs | cli/lsp/text.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use deno_core::error::AnyError;
use dissimilar::Chunk;
use dissimilar::diff;
use text_size::TextRange;
use text_size::TextSize;
use tower_lsp::jsonrpc;
use tower_lsp::lsp_types as lsp;
use tower_lsp::lsp_types::TextEdit;
use crate::util::text_encoding::Utf16Map;
#[derive(Debug, Clone, Default, Eq, PartialEq)]
pub struct LineIndex {
inner: Utf16Map,
}
impl LineIndex {
pub fn new(text: &str) -> LineIndex {
LineIndex {
inner: Utf16Map::new(text),
}
}
/// Convert a u16 based range to a u8 TextRange.
pub fn get_text_range(
&self,
range: lsp::Range,
) -> Result<TextRange, AnyError> {
let start = self.offset(range.start)?;
let end = self.offset(range.end)?;
Ok(TextRange::new(start, end))
}
/// Return a u8 offset based on a u16 position.
pub fn offset(&self, position: lsp::Position) -> Result<TextSize, AnyError> {
self.inner.offset(position.line, position.character)
}
/// Convert an lsp Position into a tsc/TypeScript "position", which is really
/// an u16 byte offset from the start of the string represented as an u32.
pub fn offset_tsc(&self, position: lsp::Position) -> jsonrpc::Result<u32> {
self
.inner
.offset_utf16(position.line, position.character)
.map(|ts| ts.into())
.map_err(|err| jsonrpc::Error::invalid_params(err.to_string()))
}
/// Returns a u16 position based on a u16 offset, which TypeScript offsets are
/// returned as u16.
pub fn position_utf16(&self, offset: TextSize) -> lsp::Position {
let lc = self.inner.position_utf16(offset);
lsp::Position {
line: lc.line_index as u32,
character: lc.column_index as u32,
}
}
pub fn line_length_utf16(&self, line: u32) -> TextSize {
self.inner.line_length_utf16(line)
}
pub fn text_content_length_utf16(&self) -> TextSize {
self.inner.text_content_length_utf16()
}
}
/// Compare two strings and return a vector of text edit records which are
/// supported by the Language Server Protocol.
pub fn get_edits(a: &str, b: &str, line_index: &LineIndex) -> Vec<TextEdit> {
if a == b {
return vec![];
}
// Heuristic to detect things like large JSON or minified files. `diff()` is
// expensive.
let b_lines = b.chars().filter(|c| *c == '\n').count();
if b_lines > 10000 || b_lines > line_index.inner.utf8_offsets_len() * 3 {
return vec![TextEdit {
range: lsp::Range {
start: lsp::Position::new(0, 0),
end: line_index.position_utf16(TextSize::from(a.len() as u32)),
},
new_text: b.to_string(),
}];
}
let chunks = diff(a, b);
let mut text_edits = Vec::<TextEdit>::new();
let mut iter = chunks.iter().peekable();
let mut a_pos = TextSize::from(0);
loop {
let chunk = iter.next();
match chunk {
None => break,
Some(Chunk::Equal(e)) => {
a_pos += TextSize::from(e.encode_utf16().count() as u32);
}
Some(Chunk::Delete(d)) => {
let start = line_index.position_utf16(a_pos);
a_pos += TextSize::from(d.encode_utf16().count() as u32);
let end = line_index.position_utf16(a_pos);
let range = lsp::Range { start, end };
match iter.peek() {
Some(Chunk::Insert(i)) => {
iter.next();
text_edits.push(TextEdit {
range,
new_text: i.to_string(),
});
}
_ => text_edits.push(TextEdit {
range,
new_text: "".to_string(),
}),
}
}
Some(Chunk::Insert(i)) => {
let pos = line_index.position_utf16(a_pos);
let range = lsp::Range {
start: pos,
end: pos,
};
text_edits.push(TextEdit {
range,
new_text: i.to_string(),
});
}
}
}
text_edits
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_get_edits() {
let a = "abcdefg";
let b = "a\nb\nchije\nfg\n";
let actual = get_edits(a, b, &LineIndex::new(a));
assert_eq!(
actual,
vec![
TextEdit {
range: lsp::Range {
start: lsp::Position {
line: 0,
character: 1
},
end: lsp::Position {
line: 0,
character: 5
}
},
new_text: "\nb\nchije\n".to_string()
},
TextEdit {
range: lsp::Range {
start: lsp::Position {
line: 0,
character: 7
},
end: lsp::Position {
line: 0,
character: 7
}
},
new_text: "\n".to_string()
},
]
);
}
#[test]
fn test_get_edits_mbc() {
let a = "const bar = \"👍🇺🇸😃\";\nconsole.log('hello deno')\n";
let b = "const bar = \"👍🇺🇸😃\";\nconsole.log(\"hello deno\");\n";
let actual = get_edits(a, b, &LineIndex::new(a));
assert_eq!(
actual,
vec![
TextEdit {
range: lsp::Range {
start: lsp::Position {
line: 1,
character: 12
},
end: lsp::Position {
line: 1,
character: 13
}
},
new_text: "\"".to_string()
},
TextEdit {
range: lsp::Range {
start: lsp::Position {
line: 1,
character: 23
},
end: lsp::Position {
line: 1,
character: 25
}
},
new_text: "\");".to_string()
},
]
)
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/refactor.rs | cli/lsp/refactor.rs | // Copyright 2018-2025 the Deno authors. MIT license.
// The logic of this module is heavily influenced by
// https://github.com/microsoft/vscode/blob/main/extensions/typescript-language-features/src/languageFeatures/refactor.ts
use deno_core::serde::Deserialize;
use deno_core::serde::Serialize;
use lsp_types::Uri;
use once_cell::sync::Lazy;
use tower_lsp::lsp_types as lsp;
pub struct RefactorCodeActionKind {
pub kind: lsp::CodeActionKind,
matches_callback: Box<dyn Fn(&str) -> bool + Send + Sync>,
}
impl RefactorCodeActionKind {
pub fn matches(&self, tag: &str) -> bool {
(self.matches_callback)(tag)
}
}
pub static EXTRACT_FUNCTION: Lazy<RefactorCodeActionKind> =
Lazy::new(|| RefactorCodeActionKind {
kind: [lsp::CodeActionKind::REFACTOR_EXTRACT.as_str(), "function"]
.join(".")
.into(),
matches_callback: Box::new(|tag: &str| tag.starts_with("function_")),
});
pub static EXTRACT_CONSTANT: Lazy<RefactorCodeActionKind> =
Lazy::new(|| RefactorCodeActionKind {
kind: [lsp::CodeActionKind::REFACTOR_EXTRACT.as_str(), "constant"]
.join(".")
.into(),
matches_callback: Box::new(|tag: &str| tag.starts_with("constant_")),
});
pub static EXTRACT_TYPE: Lazy<RefactorCodeActionKind> =
Lazy::new(|| RefactorCodeActionKind {
kind: [lsp::CodeActionKind::REFACTOR_EXTRACT.as_str(), "type"]
.join(".")
.into(),
matches_callback: Box::new(|tag: &str| {
tag.starts_with("Extract to type alias")
}),
});
pub static EXTRACT_INTERFACE: Lazy<RefactorCodeActionKind> =
Lazy::new(|| RefactorCodeActionKind {
kind: [lsp::CodeActionKind::REFACTOR_EXTRACT.as_str(), "interface"]
.join(".")
.into(),
matches_callback: Box::new(|tag: &str| {
tag.starts_with("Extract to interface")
}),
});
pub static MOVE_NEWFILE: Lazy<RefactorCodeActionKind> =
Lazy::new(|| RefactorCodeActionKind {
kind: [lsp::CodeActionKind::REFACTOR.as_str(), "move", "newFile"]
.join(".")
.into(),
matches_callback: Box::new(|tag: &str| {
tag.starts_with("Move to a new file")
}),
});
pub static REWRITE_IMPORT: Lazy<RefactorCodeActionKind> =
Lazy::new(|| RefactorCodeActionKind {
kind: [lsp::CodeActionKind::REFACTOR_REWRITE.as_str(), "import"]
.join(".")
.into(),
matches_callback: Box::new(|tag: &str| {
tag.starts_with("Convert namespace import")
|| tag.starts_with("Convert named imports")
}),
});
pub static REWRITE_EXPORT: Lazy<RefactorCodeActionKind> =
Lazy::new(|| RefactorCodeActionKind {
kind: [lsp::CodeActionKind::REFACTOR_REWRITE.as_str(), "export"]
.join(".")
.into(),
matches_callback: Box::new(|tag: &str| {
tag.starts_with("Convert default export")
|| tag.starts_with("Convert named export")
}),
});
pub static REWRITE_ARROW_BRACES: Lazy<RefactorCodeActionKind> =
Lazy::new(|| RefactorCodeActionKind {
kind: [
lsp::CodeActionKind::REFACTOR_REWRITE.as_str(),
"arrow",
"braces",
]
.join(".")
.into(),
matches_callback: Box::new(|tag: &str| {
tag.starts_with("Add or remove braces in an arrow function")
}),
});
pub static REWRITE_PARAMETERS_TO_DESTRUCTURED: Lazy<RefactorCodeActionKind> =
Lazy::new(|| RefactorCodeActionKind {
kind: [
lsp::CodeActionKind::REFACTOR_REWRITE.as_str(),
"parameters",
"toDestructured",
]
.join(".")
.into(),
matches_callback: Box::new(|tag: &str| {
tag.starts_with("Convert parameters to destructured object")
}),
});
pub static REWRITE_PROPERTY_GENERATEACCESSORS: Lazy<RefactorCodeActionKind> =
Lazy::new(|| RefactorCodeActionKind {
kind: [
lsp::CodeActionKind::REFACTOR_REWRITE.as_str(),
"property",
"generateAccessors",
]
.join(".")
.into(),
matches_callback: Box::new(|tag: &str| {
tag.starts_with("Generate 'get' and 'set' accessors")
}),
});
pub static ALL_KNOWN_REFACTOR_ACTION_KINDS: Lazy<
Vec<&'static RefactorCodeActionKind>,
> = Lazy::new(|| {
vec![
&EXTRACT_FUNCTION,
&EXTRACT_CONSTANT,
&EXTRACT_TYPE,
&EXTRACT_INTERFACE,
&MOVE_NEWFILE,
&REWRITE_IMPORT,
&REWRITE_EXPORT,
&REWRITE_ARROW_BRACES,
&REWRITE_PARAMETERS_TO_DESTRUCTURED,
&REWRITE_PROPERTY_GENERATEACCESSORS,
]
});
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RefactorCodeActionData {
pub uri: Uri,
pub range: lsp::Range,
pub refactor_name: String,
pub action_name: String,
}
pub fn prune_invalid_actions(
actions: Vec<lsp::CodeAction>,
number_of_invalid: usize,
) -> Vec<lsp::CodeAction> {
let mut available_actions = Vec::<lsp::CodeAction>::new();
let mut invalid_common_actions = Vec::<lsp::CodeAction>::new();
let mut invalid_uncommon_actions = Vec::<lsp::CodeAction>::new();
for action in actions {
if action.disabled.is_none() {
available_actions.push(action);
continue;
}
// These are the common refactors that we should always show if applicable.
let action_kind =
action.kind.as_ref().map(|a| a.as_str()).unwrap_or_default();
if action_kind.starts_with(EXTRACT_CONSTANT.kind.as_str())
|| action_kind.starts_with(EXTRACT_FUNCTION.kind.as_str())
{
invalid_common_actions.push(action);
continue;
}
// These are the remaining refactors that we can show if we haven't reached the max limit with just common refactors.
invalid_uncommon_actions.push(action);
}
let mut prioritized_actions = Vec::<lsp::CodeAction>::new();
prioritized_actions.extend(invalid_common_actions);
prioritized_actions.extend(invalid_uncommon_actions);
let top_n_invalid = prioritized_actions
[0..std::cmp::min(number_of_invalid, prioritized_actions.len())]
.to_vec();
available_actions.extend(top_n_invalid);
available_actions
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/mod.rs | cli/lsp/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use deno_core::error::AnyError;
pub use repl::ReplCompletionItem;
pub use repl::ReplLanguageServer;
use tower_lsp::LspService;
use tower_lsp::Server;
use crate::lsp::language_server::LanguageServer;
mod analysis;
mod cache;
mod capabilities;
mod client;
mod code_lens;
mod compiler_options;
mod completions;
mod config;
mod diagnostics;
mod documents;
mod jsr;
pub mod language_server;
mod lint;
mod logging;
mod lsp_custom;
mod npm;
mod parent_process_checker;
mod path_to_regex;
mod performance;
mod refactor;
mod registries;
mod repl;
mod resolver;
mod search;
mod semantic_tokens;
mod testing;
mod text;
mod trace;
mod tsc;
mod urls;
pub async fn start() -> Result<(), AnyError> {
let stdin = tokio::io::stdin();
let stdout = tokio::io::stdout();
let builder = LspService::build(|client| {
language_server::LanguageServer::new(client::Client::from_tower(client))
})
.custom_method(
lsp_custom::PERFORMANCE_REQUEST,
LanguageServer::performance_request,
)
.custom_method(lsp_custom::TASK_REQUEST, LanguageServer::task_definitions)
.custom_method(testing::TEST_RUN_REQUEST, LanguageServer::test_run_request)
.custom_method(
testing::TEST_RUN_CANCEL_REQUEST,
LanguageServer::test_run_cancel_request,
)
.custom_method(
lsp_custom::VIRTUAL_TEXT_DOCUMENT,
LanguageServer::virtual_text_document,
)
.custom_method(
lsp_custom::VIRTUAL_TEXT_DOCUMENT,
LanguageServer::virtual_text_document,
)
.custom_method(
lsp_custom::VIRTUAL_TEXT_DOCUMENT,
LanguageServer::virtual_text_document,
);
let (service, socket, pending) = builder.finish();
Server::new(stdin, stdout, socket, pending)
.concurrency_level(32)
.serve(service)
.await;
Ok(())
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/completions.rs | cli/lsp/completions.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use deno_ast::LineAndColumnIndex;
use deno_ast::SourceTextInfo;
use deno_core::ModuleSpecifier;
use deno_core::resolve_path;
use deno_core::resolve_url;
use deno_core::serde::Deserialize;
use deno_core::serde::Serialize;
use deno_core::serde_json::json;
use deno_core::url::Position;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::package::PackageNv;
use import_map::ImportMap;
use indexmap::IndexSet;
use lsp_types::CompletionList;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use once_cell::sync::Lazy;
use regex::Regex;
use tower_lsp::lsp_types as lsp;
use super::client::Client;
use super::config::Config;
use super::config::WorkspaceSettings;
use super::documents::DocumentModule;
use super::documents::DocumentModules;
use super::documents::ServerDocumentKind;
use super::jsr::CliJsrSearchApi;
use super::lsp_custom;
use super::npm::CliNpmSearchApi;
use super::registries::ModuleRegistry;
use super::resolver::LspResolver;
use super::search::PackageSearchApi;
use super::tsc;
use crate::jsr::JsrFetchResolver;
use crate::util::path::is_importable_ext;
use crate::util::path::relative_specifier;
static FILE_PROTO_RE: Lazy<Regex> =
lazy_regex::lazy_regex!(r#"^file:/{2}(?:/[A-Za-z]:)?"#);
const CURRENT_PATH: &str = ".";
const PARENT_PATH: &str = "..";
const LOCAL_PATHS: &[&str] = &[CURRENT_PATH, PARENT_PATH];
pub(crate) const IMPORT_COMMIT_CHARS: &[&str] = &["\"", "'"];
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CompletionItemData {
#[serde(skip_serializing_if = "Option::is_none")]
pub documentation: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tsc: Option<tsc::CompletionItemData>,
}
/// Check if the origin can be auto-configured for completions, and if so, send
/// a notification to the client.
async fn check_auto_config_registry(
url_str: &str,
workspace_settings: &WorkspaceSettings,
client: &Client,
module_registries: &ModuleRegistry,
) {
// check to see if auto discovery is enabled
if workspace_settings.suggest.imports.auto_discover
&& let Ok(specifier) = resolve_url(url_str)
{
let scheme = specifier.scheme();
let path = &specifier[Position::BeforePath..];
if scheme.starts_with("http") && !path.is_empty() && url_str.ends_with(path)
{
// check to see if this origin is already explicitly set
let in_config =
workspace_settings
.suggest
.imports
.hosts
.iter()
.any(|(h, _)| {
resolve_url(h).map(|u| u.origin()) == Ok(specifier.origin())
});
// if it isn't in the configuration, we will check to see if it supports
// suggestions and send a notification to the client.
if !in_config {
let origin = specifier.origin().ascii_serialization();
let suggestions = module_registries.check_origin(&origin).await.is_ok();
// we are only sending registry state when enabled now, but changing
// the custom notification would make older versions of the plugin
// incompatible.
// TODO(@kitsonk) clean up protocol when doing v2 of suggestions
if suggestions {
client.send_registry_state_notification(
lsp_custom::RegistryStateNotificationParams {
origin,
suggestions,
},
);
}
}
}
}
}
/// Ranges from the graph for specifiers include the leading and maybe trailing quote,
/// which we want to ignore when replacing text.
fn to_narrow_lsp_range(
text_info: &SourceTextInfo,
range: deno_graph::PositionRange,
) -> lsp::Range {
let end_byte_index = text_info
.loc_to_source_pos(LineAndColumnIndex {
line_index: range.end.line,
column_index: range.end.character,
})
.as_byte_index(text_info.range().start);
let start_byte_index = text_info
.loc_to_source_pos(LineAndColumnIndex {
line_index: range.start.line,
column_index: range.start.character,
})
.as_byte_index(text_info.range().start);
let text_bytes = text_info.text_str().as_bytes();
let is_empty = end_byte_index - 1 == start_byte_index;
let has_trailing_quote =
!is_empty && matches!(text_bytes[end_byte_index - 1], b'"' | b'\'');
lsp::Range {
start: lsp::Position {
line: range.start.line as u32,
// skip the leading quote
character: (range.start.character + 1) as u32,
},
end: lsp::Position {
line: range.end.line as u32,
character: if has_trailing_quote {
range.end.character - 1 // do not include it
} else {
range.end.character
} as u32,
},
}
}
/// Given a specifier, a position, and a snapshot, optionally return a
/// completion response, which will be valid import completions for the specific
/// context.
#[allow(clippy::too_many_arguments)]
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub async fn get_import_completions(
module: &DocumentModule,
position: &lsp::Position,
config: &Config,
client: &Client,
module_registries: &ModuleRegistry,
jsr_search_api: &CliJsrSearchApi,
npm_search_api: &CliNpmSearchApi,
document_modules: &DocumentModules,
resolver: &LspResolver,
) -> Option<lsp::CompletionResponse> {
let maybe_import_map = resolver
.get_scoped_resolver(module.scope.as_deref())
.as_workspace_resolver()
.maybe_import_map();
let (text, _, graph_range) = module.dependency_at_position(position)?;
let resolution_mode = graph_range
.resolution_mode
.map(node_resolver::ResolutionMode::from_deno_graph)
.unwrap_or_else(|| module.resolution_mode);
let range = to_narrow_lsp_range(module.text_info(), graph_range.range);
let scoped_resolver = resolver.get_scoped_resolver(module.scope.as_deref());
let resolved = scoped_resolver
.as_cli_resolver()
.resolve(
text,
&module.specifier,
deno_graph::Position::zeroed(),
resolution_mode,
NodeResolutionKind::Execution,
)
.ok();
match get_jsr_completions(
&module.specifier,
text,
&range,
resolved.as_ref(),
jsr_search_api,
Some(jsr_search_api.get_resolver()),
)
.await
{
Some(completion_list) => {
Some(lsp::CompletionResponse::List(completion_list))
}
_ => {
match get_npm_completions(&module.specifier, text, &range, npm_search_api)
.await
{
Some(completion_list) => {
Some(lsp::CompletionResponse::List(completion_list))
}
_ => {
match get_node_completions(text, &range) {
Some(completion_list) => {
Some(lsp::CompletionResponse::List(completion_list))
}
_ => {
match get_import_map_completions(
&module.specifier,
text,
&range,
maybe_import_map,
) {
Some(completion_list) => {
// completions for import map specifiers
Some(lsp::CompletionResponse::List(completion_list))
}
_ => {
match get_local_completions(
&module.specifier,
resolution_mode,
text,
&range,
resolver,
) {
Some(completion_list) => {
// completions for local relative modules
Some(lsp::CompletionResponse::List(completion_list))
}
_ => {
if !text.is_empty() {
// completion of modules from a module registry or cache
check_auto_config_registry(
text,
config.workspace_settings_for_specifier(
&module.specifier,
),
client,
module_registries,
)
.await;
let maybe_list = module_registries
.get_completions(
text,
&range,
resolved.as_ref(),
|s| {
document_modules
.specifier_exists(s, module.scope.as_deref())
},
)
.await;
let maybe_list = maybe_list.or_else(|| {
module_registries.get_origin_completions(text, &range)
});
let list =
maybe_list.unwrap_or_else(|| CompletionList {
items: get_remote_completions(
module,
text,
&range,
document_modules,
),
is_incomplete: false,
});
Some(lsp::CompletionResponse::List(list))
} else {
// the import specifier is empty, so provide all possible specifiers we are
// aware of
let mut items: Vec<lsp::CompletionItem> = LOCAL_PATHS
.iter()
.map(|s| lsp::CompletionItem {
label: s.to_string(),
kind: Some(lsp::CompletionItemKind::FOLDER),
detail: Some("(local)".to_string()),
sort_text: Some("1".to_string()),
insert_text: Some(s.to_string()),
commit_characters: Some(
IMPORT_COMMIT_CHARS
.iter()
.map(|&c| c.into())
.collect(),
),
..Default::default()
})
.collect();
let mut is_incomplete = false;
if let Some(import_map) = maybe_import_map {
items.extend(get_base_import_map_completions(
import_map,
&module.specifier,
));
}
if let Some(origin_items) =
module_registries.get_origin_completions(text, &range)
{
is_incomplete = origin_items.is_incomplete;
items.extend(origin_items.items);
}
Some(lsp::CompletionResponse::List(CompletionList {
is_incomplete,
items,
}))
}
}
}
}
}
}
}
}
}
}
}
}
/// When the specifier is an empty string, return all the keys from the import
/// map as completion items.
fn get_base_import_map_completions(
import_map: &ImportMap,
referrer: &ModuleSpecifier,
) -> Vec<lsp::CompletionItem> {
import_map
.entries_for_referrer(referrer)
.map(|entry| {
// for some strange reason, keys that start with `/` get stored in the
// import map as `file:///`, and so when we pull the keys out, we need to
// change the behavior
let mut label = if entry.key.starts_with("file://") {
FILE_PROTO_RE.replace(entry.key, "").to_string()
} else {
entry.key.to_string()
};
let kind = if entry.key.ends_with('/') {
label.pop();
Some(lsp::CompletionItemKind::FOLDER)
} else {
Some(lsp::CompletionItemKind::FILE)
};
lsp::CompletionItem {
label: label.clone(),
kind,
detail: Some("(import map)".to_string()),
sort_text: Some(label.clone()),
insert_text: Some(label),
commit_characters: Some(
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
),
..Default::default()
}
})
.collect()
}
/// Given an existing specifier, return any completions that could apply derived
/// from the import map. There are two main type of import map keys, those that
/// a literal, which don't end in `/`, which expects a one for one replacement
/// of specifier to specifier, and then those that end in `/` which indicates
/// that the path post the `/` should be appended to resolved specifier. This
/// handles both cases, pulling any completions from the workspace completions.
fn get_import_map_completions(
_specifier: &ModuleSpecifier,
text: &str,
range: &lsp::Range,
maybe_import_map: Option<&ImportMap>,
) -> Option<CompletionList> {
if !text.is_empty()
&& let Some(import_map) = maybe_import_map
{
let mut specifiers = IndexSet::new();
for key in import_map.imports().keys() {
// for some reason, the import_map stores keys that begin with `/` as
// `file:///` in its index, so we have to reverse that here
let key = if key.starts_with("file://") {
FILE_PROTO_RE.replace(key, "").to_string()
} else {
key.to_string()
};
if key.starts_with(text) && key != text {
specifiers.insert(key.trim_end_matches('/').to_string());
}
}
if !specifiers.is_empty() {
let items = specifiers
.into_iter()
.map(|specifier| lsp::CompletionItem {
label: specifier.clone(),
kind: Some(lsp::CompletionItemKind::FILE),
detail: Some("(import map)".to_string()),
sort_text: Some("1".to_string()),
text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: specifier,
})),
commit_characters: Some(
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
),
..Default::default()
})
.collect();
return Some(CompletionList {
items,
is_incomplete: false,
});
}
}
None
}
/// Return local completions that are relative to the base specifier.
fn get_local_completions(
referrer: &ModuleSpecifier,
resolution_mode: ResolutionMode,
text: &str,
range: &lsp::Range,
resolver: &LspResolver,
) -> Option<CompletionList> {
if referrer.scheme() != "file" {
return None;
}
let parent = &text[..text.char_indices().rfind(|(_, c)| *c == '/')?.0 + 1];
let scoped_resolver = resolver.get_scoped_resolver(Some(referrer));
let resolved_parent = scoped_resolver
.as_cli_resolver()
.resolve(
parent,
referrer,
deno_graph::Position::zeroed(),
resolution_mode,
NodeResolutionKind::Execution,
)
.ok()?;
let resolved_parent_path = url_to_file_path(&resolved_parent).ok()?;
if resolved_parent_path.is_dir() {
let cwd = std::env::current_dir().ok()?;
let entries = std::fs::read_dir(resolved_parent_path).ok()?;
let items = entries
.filter_map(|de| {
let de = de.ok()?;
let label = de.path().file_name()?.to_string_lossy().into_owned();
let entry_specifier = resolve_path(de.path().to_str()?, &cwd).ok()?;
if entry_specifier == *referrer {
return None;
}
let full_text = format!("{parent}{label}");
let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: full_text.clone(),
}));
let filter_text = Some(full_text);
match de.file_type() {
Ok(file_type) if file_type.is_dir() => Some(lsp::CompletionItem {
label,
kind: Some(lsp::CompletionItemKind::FOLDER),
detail: Some("(local)".to_string()),
filter_text,
sort_text: Some("1".to_string()),
text_edit,
commit_characters: Some(
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
),
..Default::default()
}),
Ok(file_type) if file_type.is_file() => {
if is_importable_ext(&de.path()) {
Some(lsp::CompletionItem {
label,
kind: Some(lsp::CompletionItemKind::FILE),
detail: Some("(local)".to_string()),
filter_text,
sort_text: Some("1".to_string()),
text_edit,
commit_characters: Some(
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
),
..Default::default()
})
} else {
None
}
}
_ => None,
}
})
.collect();
Some(CompletionList {
is_incomplete: false,
items,
})
} else {
None
}
}
/// Find the index of the '@' delimiting the package name and version, if any.
fn parse_bare_specifier_version_index(bare_specifier: &str) -> Option<usize> {
if bare_specifier.starts_with('@') {
bare_specifier
.find('/')
.filter(|idx| !bare_specifier[1..*idx].is_empty())
.and_then(|idx| {
bare_specifier[idx..]
.find('@')
.filter(|idx2| !bare_specifier[idx..][1..*idx2].is_empty())
.filter(|idx2| !bare_specifier[idx..][1..*idx2].contains('/'))
.map(|idx2| idx + idx2)
})
} else {
bare_specifier
.find('@')
.filter(|idx| !bare_specifier[1..*idx].is_empty())
.filter(|idx| !bare_specifier[1..*idx].contains('/'))
}
}
async fn get_jsr_completions(
referrer: &ModuleSpecifier,
specifier: &str,
range: &lsp::Range,
resolved: Option<&ModuleSpecifier>,
jsr_search_api: &impl PackageSearchApi,
jsr_resolver: Option<&JsrFetchResolver>,
) -> Option<CompletionList> {
// First try to match `jsr:some-package@some-version/<export-to-complete>`.
let req_ref = resolved
.and_then(|s| JsrPackageReqReference::from_specifier(s).ok())
.or_else(|| JsrPackageReqReference::from_str(specifier).ok());
if let Some(req_ref) = req_ref {
let sub_path = req_ref.sub_path();
if sub_path.is_some() || specifier.ends_with('/') {
let export_prefix = sub_path.unwrap_or("");
let req = req_ref.req();
let nv = match jsr_resolver {
Some(jsr_resolver) => jsr_resolver.req_to_nv(req).await.ok().flatten(),
None => None,
};
let nv = nv.or_else(|| PackageNv::from_str(&req.to_string()).ok())?;
let exports = jsr_search_api.exports(&nv).await.ok()?;
let items = exports
.iter()
.enumerate()
.filter_map(|(idx, export)| {
if export == "." {
return None;
}
let export = export.strip_prefix("./").unwrap_or(export.as_str());
if !export.starts_with(export_prefix) {
return None;
}
let specifier = format!(
"{}/{export}",
specifier.strip_suffix(export_prefix)?.trim_end_matches('/')
);
let command = Some(lsp::Command {
title: "".to_string(),
command: "deno.cache".to_string(),
arguments: Some(vec![
json!([&specifier]),
json!(referrer),
json!({ "forceGlobalCache": true }),
]),
});
let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: specifier.clone(),
}));
Some(lsp::CompletionItem {
label: specifier,
kind: Some(lsp::CompletionItemKind::FILE),
detail: Some("(jsr)".to_string()),
sort_text: Some(format!("{:0>10}", idx + 1)),
text_edit,
command,
commit_characters: Some(
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
),
..Default::default()
})
})
.collect();
return Some(CompletionList {
is_incomplete: false,
items,
});
}
}
// Then try to match `jsr:some-package@<version-to-complete>`.
let bare_specifier = specifier.strip_prefix("jsr:")?;
if let Some(v_index) = parse_bare_specifier_version_index(bare_specifier) {
let package_name = &bare_specifier[..v_index];
let v_prefix = &bare_specifier[(v_index + 1)..];
let versions = jsr_search_api.versions(package_name).await.ok()?;
let items = versions
.iter()
.enumerate()
.filter_map(|(idx, version)| {
let version = version.to_string();
if !version.starts_with(v_prefix) {
return None;
}
let specifier = format!("jsr:{}@{}", package_name, version);
let command = Some(lsp::Command {
title: "".to_string(),
command: "deno.cache".to_string(),
arguments: Some(vec![
json!([&specifier]),
json!(referrer),
json!({ "forceGlobalCache": true }),
]),
});
let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: specifier.clone(),
}));
Some(lsp::CompletionItem {
label: specifier,
kind: Some(lsp::CompletionItemKind::FILE),
detail: Some("(jsr)".to_string()),
sort_text: Some(format!("{:0>10}", idx + 1)),
text_edit,
command,
commit_characters: Some(
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
),
..Default::default()
})
})
.collect();
return Some(CompletionList {
is_incomplete: false,
items,
});
}
// Otherwise match `jsr:<package-to-complete>`.
let names = jsr_search_api.search(bare_specifier).await.ok()?;
let items = names
.iter()
.enumerate()
.map(|(idx, name)| {
let specifier = format!("jsr:{}", name);
let command = Some(lsp::Command {
title: "".to_string(),
command: "deno.cache".to_string(),
arguments: Some(vec![
json!([&specifier]),
json!(referrer),
json!({ "forceGlobalCache": true }),
]),
});
let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: specifier.clone(),
}));
lsp::CompletionItem {
label: specifier,
kind: Some(lsp::CompletionItemKind::FILE),
detail: Some("(jsr)".to_string()),
sort_text: Some(format!("{:0>10}", idx + 1)),
text_edit,
command,
commit_characters: Some(
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
),
..Default::default()
}
})
.collect();
Some(CompletionList {
is_incomplete: true,
items,
})
}
/// Get completions for `npm:` specifiers.
async fn get_npm_completions(
referrer: &ModuleSpecifier,
specifier: &str,
range: &lsp::Range,
npm_search_api: &impl PackageSearchApi,
) -> Option<CompletionList> {
// First try to match `npm:some-package@<version-to-complete>`.
let bare_specifier = specifier.strip_prefix("npm:")?;
if let Some(v_index) = parse_bare_specifier_version_index(bare_specifier) {
let package_name = &bare_specifier[..v_index];
let v_prefix = &bare_specifier[(v_index + 1)..];
let versions = npm_search_api.versions(package_name).await.ok()?;
let items = versions
.iter()
.enumerate()
.filter_map(|(idx, version)| {
let version = version.to_string();
if !version.starts_with(v_prefix) {
return None;
}
let specifier = format!("npm:{}@{}", package_name, version);
let command = Some(lsp::Command {
title: "".to_string(),
command: "deno.cache".to_string(),
arguments: Some(vec![
json!([&specifier]),
json!(referrer),
json!({ "forceGlobalCache": true }),
]),
});
let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: specifier.clone(),
}));
Some(lsp::CompletionItem {
label: specifier,
kind: Some(lsp::CompletionItemKind::FILE),
detail: Some("(npm)".to_string()),
sort_text: Some(format!("{:0>10}", idx + 1)),
text_edit,
command,
commit_characters: Some(
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
),
..Default::default()
})
})
.collect();
return Some(CompletionList {
is_incomplete: false,
items,
});
}
// Otherwise match `npm:<package-to-complete>`.
let names = npm_search_api.search(bare_specifier).await.ok()?;
let items = names
.iter()
.enumerate()
.map(|(idx, name)| {
let specifier = format!("npm:{}", name);
let command = Some(lsp::Command {
title: "".to_string(),
command: "deno.cache".to_string(),
arguments: Some(vec![
json!([&specifier]),
json!(referrer),
json!({ "forceGlobalCache": true }),
]),
});
let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: specifier.clone(),
}));
lsp::CompletionItem {
label: specifier,
kind: Some(lsp::CompletionItemKind::FILE),
detail: Some("(npm)".to_string()),
sort_text: Some(format!("{:0>10}", idx + 1)),
text_edit,
command,
commit_characters: Some(
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
),
..Default::default()
}
})
.collect();
Some(CompletionList {
is_incomplete: true,
items,
})
}
/// Get completions for `node:` specifiers.
fn get_node_completions(
specifier: &str,
range: &lsp::Range,
) -> Option<CompletionList> {
if !specifier.starts_with("node:") {
return None;
}
let items = SUPPORTED_BUILTIN_NODE_MODULES
.iter()
.filter_map(|name| {
if name.starts_with('_') {
return None;
}
let specifier = format!("node:{}", name);
let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: specifier.clone(),
}));
Some(lsp::CompletionItem {
label: specifier,
kind: Some(lsp::CompletionItemKind::FILE),
detail: Some("(node)".to_string()),
text_edit,
commit_characters: Some(
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
),
..Default::default()
})
})
.collect();
Some(CompletionList {
is_incomplete: false,
items,
})
}
/// Get remote completions that include modules in the Deno cache which match
/// the current specifier string.
fn get_remote_completions(
module: &DocumentModule,
current: &str,
range: &lsp::Range,
document_modules: &DocumentModules,
) -> Vec<lsp::CompletionItem> {
let specifiers = document_modules
.documents
.server_docs()
.into_iter()
.filter_map(|d| {
if let ServerDocumentKind::RemoteUrl { url, .. } = &d.kind {
if *url == module.specifier {
return None;
}
return Some(
relative_specifier(&module.specifier, url)
.unwrap_or_else(|| url.to_string()),
);
}
None
});
specifiers
.filter_map(|label| {
if label.starts_with(current) {
let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: *range,
new_text: label.clone(),
}));
Some(lsp::CompletionItem {
label,
kind: Some(lsp::CompletionItemKind::FILE),
detail: Some("(remote)".to_string()),
sort_text: Some("1".to_string()),
text_edit,
commit_characters: Some(
IMPORT_COMMIT_CHARS.iter().map(|&c| c.into()).collect(),
),
..Default::default()
})
} else {
None
}
})
.collect()
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use deno_core::resolve_url;
use deno_resolver::deno_json::CompilerOptionsKey;
use pretty_assertions::assert_eq;
use test_util::TempDir;
use super::*;
use crate::cache::HttpCache;
use crate::lsp::cache::LspCache;
use crate::lsp::documents::LanguageId;
use crate::lsp::search::tests::TestPackageSearchApi;
use crate::lsp::urls::url_to_uri;
fn setup(
open_sources: &[(&str, &str, i32, LanguageId)],
fs_sources: &[(&str, &str)],
) -> DocumentModules {
let temp_dir = TempDir::new();
let cache = LspCache::new(Some(temp_dir.url().join(".deno_dir").unwrap()));
let mut document_modules = DocumentModules::default();
document_modules.update_config(
&Default::default(),
&Default::default(),
&Default::default(),
&cache,
&Default::default(),
);
for (specifier, source, version, language_id) in open_sources {
let specifier =
resolve_url(specifier).expect("failed to create specifier");
let uri = url_to_uri(&specifier).unwrap();
document_modules.open_document(
uri,
*version,
*language_id,
(*source).into(),
None,
);
}
for (specifier, source) in fs_sources {
let specifier =
resolve_url(specifier).expect("failed to create specifier");
cache
.global()
.set(&specifier, HashMap::default(), source.as_bytes())
.expect("could not cache file");
let module = document_modules.module_for_specifier(
&specifier,
None,
Some(&CompilerOptionsKey::WorkspaceConfig(None)),
);
assert!(module.is_some(), "source could not be setup");
}
document_modules
}
#[test]
fn test_get_local_completions() {
let temp_dir = TempDir::new();
let fixtures = temp_dir.path().join("fixtures");
std::fs::create_dir(&fixtures).expect("could not create");
let dir_a = fixtures.join("a");
std::fs::create_dir(&dir_a).expect("could not create");
let dir_b = dir_a.join("b");
std::fs::create_dir(&dir_b).expect("could not create");
let file_c = dir_a.join("c.ts");
std::fs::write(&file_c, b"").expect("could not create");
let file_d = dir_b.join("d.ts");
std::fs::write(file_d, b"").expect("could not create");
let file_e = dir_a.join("e.txt");
std::fs::write(file_e, b"").expect("could not create");
let file_f = dir_a.join("f.mjs");
std::fs::write(file_f, b"").expect("could not create");
let file_g = dir_a.join("g.json");
std::fs::write(file_g, b"").expect("could not create");
let specifier =
ModuleSpecifier::from_file_path(file_c).expect("could not create");
let actual = get_local_completions(
&specifier,
ResolutionMode::Import,
"./",
&lsp::Range {
start: lsp::Position {
line: 0,
character: 20,
},
end: lsp::Position {
line: 0,
character: 22,
},
},
&Default::default(),
)
.unwrap();
assert!(!actual.is_incomplete);
assert_eq!(actual.items.len(), 3);
for item in actual.items {
match item.text_edit {
Some(lsp::CompletionTextEdit::Edit(text_edit)) => {
assert!(
["./b", "./f.mjs", "./g.json"]
.contains(&text_edit.new_text.as_str())
);
}
_ => unreachable!(),
}
}
}
#[tokio::test]
async fn test_get_remote_completions() {
let specifier = resolve_url("file:///a/b/c.ts").unwrap();
let range = lsp::Range {
start: lsp::Position {
line: 0,
character: 20,
},
end: lsp::Position {
line: 0,
character: 21,
},
};
let document_modules = setup(
&[
(
"file:///a/b/c.ts",
"import * as d from \"h\"",
1,
LanguageId::TypeScript,
),
("file:///a/c.ts", r#""#, 1, LanguageId::TypeScript),
],
&[("https://deno.land/x/a/b/c.ts", "console.log(1);\n")],
);
let module = document_modules
.module_for_specifier(&specifier, None, None)
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/language_server.rs | cli/lsp/language_server.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::cell::RefCell;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap;
use std::collections::HashSet;
use std::collections::VecDeque;
use std::env;
use std::fmt::Write as _;
use std::path::PathBuf;
use std::rc::Rc;
use std::str::FromStr;
use std::sync::Arc;
use dashmap::DashMap;
use deno_ast::MediaType;
use deno_cache_dir::GlobalOrLocalHttpCache;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_core::ModuleSpecifier;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::serde_json;
use deno_core::serde_json::Value;
use deno_core::serde_json::json;
use deno_core::unsync::spawn;
use deno_core::url;
use deno_core::url::Url;
use deno_graph::CheckJsOption;
use deno_graph::GraphKind;
use deno_graph::Resolution;
use deno_lib::args::CaData;
use deno_lib::args::get_root_cert_store;
use deno_lib::version::DENO_VERSION_INFO;
use deno_npm::resolution::NpmVersionResolver;
use deno_npm_installer::graph::NpmCachingStrategy;
use deno_path_util::url_to_file_path;
use deno_resolver::deno_json::CompilerOptionsKey;
use deno_resolver::loader::MemoryFilesRc;
use deno_runtime::deno_tls::RootCertStoreProvider;
use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_semver::jsr::JsrPackageReqReference;
use indexmap::IndexMap;
use indexmap::IndexSet;
use log::error;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use serde::Deserialize;
use serde_json::from_value;
use tokio::sync::OnceCell;
use tokio::sync::mpsc::UnboundedReceiver;
use tokio::sync::mpsc::UnboundedSender;
use tokio::sync::mpsc::unbounded_channel;
use tokio_util::sync::CancellationToken;
use tower_lsp::jsonrpc::Error as LspError;
use tower_lsp::jsonrpc::Result as LspResult;
use tower_lsp::lsp_types::request::*;
use tower_lsp::lsp_types::*;
use super::analysis::CodeActionCollection;
use super::analysis::CodeActionData;
use super::analysis::TsResponseImportMapper;
use super::analysis::fix_ts_import_changes;
use super::analysis::ts_changes_to_edit;
use super::cache::LspCache;
use super::capabilities;
use super::capabilities::semantic_tokens_registration_options;
use super::client::Client;
use super::code_lens;
use super::completions;
use super::config::Config;
use super::config::SETTINGS_SECTION;
use super::config::UpdateImportsOnFileMoveEnabled;
use super::config::WorkspaceSettings;
use super::diagnostics;
use super::diagnostics::DiagnosticDataSpecifier;
use super::diagnostics::DiagnosticsServer;
use super::diagnostics::DiagnosticsUpdateMessage;
use super::documents::Document;
use super::documents::DocumentModule;
use super::documents::DocumentModules;
use super::documents::LanguageId;
use super::jsr::CliJsrSearchApi;
use super::logging::lsp_log;
use super::logging::lsp_warn;
use super::lsp_custom;
use super::lsp_custom::TaskDefinition;
use super::npm::CliNpmSearchApi;
use super::parent_process_checker;
use super::performance::Performance;
use super::refactor;
use super::registries::ModuleRegistry;
use super::resolver::LspResolver;
use super::testing;
use super::text;
use super::tsc;
use super::tsc::ChangeKind;
use super::tsc::TsServer;
use super::urls::uri_to_file_path;
use super::urls::uri_to_url;
use super::urls::url_to_uri;
use crate::args::Flags;
use crate::args::InternalFlags;
use crate::args::UnstableFmtOptions;
use crate::factory::CliFactory;
use crate::file_fetcher::CreateCliFileFetcherOptions;
use crate::file_fetcher::create_cli_file_fetcher;
use crate::graph_util;
use crate::http_util::HttpClientProvider;
use crate::lsp::analysis::fix_ts_import_changes_for_file_rename;
use crate::lsp::compiler_options::LspCompilerOptionsResolver;
use crate::lsp::config::ConfigWatchedFileType;
use crate::lsp::diagnostics::generate_module_diagnostics;
use crate::lsp::lint::LspLinterResolver;
use crate::lsp::logging::init_log_file;
use crate::lsp::tsc::file_text_changes_to_workspace_edit;
use crate::sys::CliSys;
use crate::tools::fmt::format_file;
use crate::tools::fmt::format_parsed_source;
use crate::tools::upgrade::check_for_upgrades_for_lsp;
use crate::tools::upgrade::upgrade_check_enabled;
use crate::util::fs::remove_dir_all_if_exists;
use crate::util::path::to_percent_decoded_str;
use crate::util::sync::AsyncFlag;
struct LspRootCertStoreProvider(RootCertStore);
impl RootCertStoreProvider for LspRootCertStoreProvider {
fn get_or_try_init(&self) -> Result<&RootCertStore, deno_error::JsErrorBox> {
Ok(&self.0)
}
}
/// Used as a filtering option for `Inner::get_document()`.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Enabled {
/// Return Ok(None) if not enabled.
Filter,
/// Allow even if not enabled.
Ignore,
}
/// Used as a filtering option for `Inner::get_document()`.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Exists {
/// Return Err(LspError::InvalidParams(...)) if non-existent.
Enforce,
/// Return Ok(None) if non-existent.
Filter,
}
/// Used as a filtering option for `Inner::get_document()`.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Diagnosable {
/// Return Ok(None) if not diagnosable.
Filter,
/// Allow even if not diagnosable.
Ignore,
}
pub fn to_lsp_range(referrer: &deno_graph::Range) -> lsp_types::Range {
lsp_types::Range {
start: lsp_types::Position {
line: referrer.range.start.line as u32,
character: referrer.range.start.character as u32,
},
end: lsp_types::Position {
line: referrer.range.end.line as u32,
character: referrer.range.end.character as u32,
},
}
}
#[derive(Debug)]
struct DidChangeBatchQueueEntry {
version: i32,
content_changes: Vec<TextDocumentContentChangeEvent>,
}
#[derive(Debug)]
struct DidChangeBatchQueue {
uri: Uri,
entries: Mutex<VecDeque<(DidChangeBatchQueueEntry, CancellationToken)>>,
}
impl DidChangeBatchQueue {
fn new(uri: Uri) -> Self {
DidChangeBatchQueue {
uri,
entries: Default::default(),
}
}
fn enqueue(&self, entry: DidChangeBatchQueueEntry) -> CancellationToken {
let token = CancellationToken::new();
self.entries.lock().push_back((entry, token.clone()));
token
}
fn dequeue(&self) -> Option<DidChangeBatchQueueEntry> {
let (entry, token) = self.entries.lock().pop_front()?;
token.cancel();
Some(entry)
}
fn clear(&self) {
let entries = std::mem::take(&mut *self.entries.lock());
entries.into_iter().for_each(|(_, token)| token.cancel());
}
}
#[derive(Debug, Clone)]
pub struct LanguageServer {
client: Client,
pub inner: Rc<tokio::sync::RwLock<Inner>>,
/// This is used to block out standard request handling until the complete
/// user configuration has been fetched. This is done in the `initialized`
/// handler which normally may occur concurrently with those other requests.
/// TODO(nayeemrmn): This wouldn't be necessary if LSP allowed
/// `workspace/configuration` requests in the `initialize` handler. See:
/// https://github.com/Microsoft/language-server-protocol/issues/567#issuecomment-2085131917
init_flag: AsyncFlag,
did_change_batch_queue: RefCell<Option<Arc<DidChangeBatchQueue>>>,
performance: Arc<Performance>,
}
/// Snapshot of the state used by TSC.
#[derive(Clone, Debug, Default)]
pub struct StateSnapshot {
pub project_version: usize,
pub config: Arc<Config>,
pub compiler_options_resolver: Arc<LspCompilerOptionsResolver>,
pub linter_resolver: Arc<LspLinterResolver>,
pub document_modules: DocumentModules,
pub resolver: Arc<LspResolver>,
pub cache: Arc<LspCache>,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
enum ProjectScopesChange {
None,
OpenNotebooks,
Config,
}
type LanguageServerTaskFn = Box<dyn FnOnce(LanguageServer) + Send + Sync>;
/// Used to queue tasks from inside of the language server lock that must be
/// commenced from outside of it. For example, queue a request to cache a module
/// after having loaded a config file which references it.
#[derive(Debug)]
struct LanguageServerTaskQueue {
task_tx: UnboundedSender<LanguageServerTaskFn>,
/// This is moved out to its own task after initializing.
task_rx: Option<UnboundedReceiver<LanguageServerTaskFn>>,
}
impl Default for LanguageServerTaskQueue {
fn default() -> Self {
let (task_tx, task_rx) = unbounded_channel();
Self {
task_tx,
task_rx: Some(task_rx),
}
}
}
impl LanguageServerTaskQueue {
pub fn queue_task(&self, task_fn: LanguageServerTaskFn) -> bool {
self.task_tx.send(task_fn).is_ok()
}
/// Panics if called more than once.
fn start(&mut self, ls: LanguageServer) {
let mut task_rx = self.task_rx.take().unwrap();
spawn(async move {
while let Some(task_fn) = task_rx.recv().await {
task_fn(ls.clone());
}
});
}
}
pub type OnceCellMap<K, V> = DashMap<K, Arc<OnceCell<V>>>;
pub struct Inner {
/// (_, notebook_uri) -> _
ambient_modules_regex_cache:
OnceCellMap<(CompilerOptionsKey, Option<Arc<Uri>>), Option<regex::Regex>>,
pub cache: LspCache,
/// The LSP client that this LSP server is connected to.
pub client: Client,
compiler_options_resolver: Arc<LspCompilerOptionsResolver>,
/// Configuration information.
pub config: Config,
diagnostics_cache: OnceCellMap<Arc<Uri>, Arc<Vec<Diagnostic>>>,
diagnostics_server: Option<diagnostics::DiagnosticsServer>,
/// The collection of documents that the server is currently handling, either
/// on disk or "open" within the client.
pub document_modules: DocumentModules,
http_client_provider: Arc<HttpClientProvider>,
initial_cwd: PathBuf,
jsr_search_api: CliJsrSearchApi,
linter_resolver: Arc<LspLinterResolver>,
/// Handles module registries, which allow discovery of modules
module_registry: ModuleRegistry,
/// A lazily create "server" for handling test run requests.
maybe_testing_server: Option<testing::TestServer>,
pub npm_search_api: CliNpmSearchApi,
project_version: usize,
/// A collection of measurements which instrument that performance of the LSP.
performance: Arc<Performance>,
registered_semantic_tokens_capabilities: bool,
pub resolver: Arc<LspResolver>,
task_queue: LanguageServerTaskQueue,
ts_fixable_diagnostics: tokio::sync::OnceCell<Vec<String>>,
pub ts_server: Arc<TsServer>,
workspace_files: Arc<IndexSet<PathBuf>>,
/// Set to `self.config.settings.enable_settings_hash()` after
/// refreshing `self.workspace_files`.
workspace_files_hash: u64,
_tracing: Option<super::trace::TracingGuard>,
}
impl std::fmt::Debug for Inner {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Inner")
.field("cache", &self.cache)
.field("client", &self.client)
.field("config", &self.config)
.field("diagnostics_server", &self.diagnostics_server)
.field("document_modules", &self.document_modules)
.field("http_client_provider", &self.http_client_provider)
.field("initial_cwd", &self.initial_cwd)
.field("jsr_search_api", &self.jsr_search_api)
.field("module_registry", &self.module_registry)
.field("maybe_testing_server", &self.maybe_testing_server)
.field("npm_search_api", &self.npm_search_api)
.field("project_version", &self.project_version)
.field("performance", &self.performance)
.field(
"registered_semantic_tokens_capabilities",
&self.registered_semantic_tokens_capabilities,
)
.field("resolver", &self.resolver)
.field("task_queue", &self.task_queue)
.field("ts_fixable_diagnostics", &self.ts_fixable_diagnostics)
.field("ts_server", &self.ts_server)
.field("workspace_files", &self.workspace_files)
.field("workspace_files_hash", &self.workspace_files_hash)
.field("_tracing", &self._tracing)
.finish()
}
}
impl LanguageServer {
pub fn new(client: Client) -> Self {
let performance = Arc::new(Performance::default());
Self {
client: client.clone(),
inner: Rc::new(tokio::sync::RwLock::new(Inner::new(
client,
performance.clone(),
))),
init_flag: Default::default(),
did_change_batch_queue: Default::default(),
performance,
}
}
/// Similar to `deno install --entrypoint` on the command line, where modules will be cached
/// in the Deno cache, including any of their dependencies.
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub async fn cache(
&self,
specifiers: Vec<ModuleSpecifier>,
referrer: ModuleSpecifier,
force_global_cache: bool,
) -> LspResult<Option<Value>> {
async fn create_graph_for_caching(
factory: CliFactory,
roots: Vec<ModuleSpecifier>,
open_modules: Vec<Arc<DocumentModule>>,
) -> Result<(), AnyError> {
let open_modules = open_modules
.into_iter()
.map(|m| (m.specifier.clone(), m))
.collect::<HashMap<_, _>>();
let module_graph_builder = factory.module_graph_builder().await?;
let module_graph_creator = factory.module_graph_creator().await?;
let mut inner_loader =
module_graph_builder.create_graph_loader_with_root_permissions();
let mut loader = crate::lsp::documents::OpenDocumentsGraphLoader {
inner_loader: &mut inner_loader,
open_modules: &open_modules,
};
let graph = module_graph_creator
.create_graph_with_loader(
GraphKind::All,
roots.clone(),
&mut loader,
NpmCachingStrategy::Eager,
)
.await?;
graph_util::graph_valid(
&graph,
&CliSys::default(),
&roots,
graph_util::GraphValidOptions {
kind: GraphKind::All,
will_type_check: true,
check_js: CheckJsOption::False,
exit_integrity_errors: false,
allow_unknown_media_types: true,
allow_unknown_jsr_exports: false,
},
)?;
Ok(())
}
self.init_flag.wait_raised().await;
// prepare the cache inside the lock
let mark = self
.performance
.mark_with_args("lsp.cache", (&specifiers, &referrer));
*self.did_change_batch_queue.borrow_mut() = None;
let prepare_cache_result = self.inner.write().await.prepare_cache(
specifiers,
referrer,
force_global_cache,
);
match prepare_cache_result {
Ok(result) => {
// cache outside the lock
let cli_factory = result.cli_factory;
let roots = result.roots;
let open_modules = result.open_modules;
let handle = spawn(async move {
create_graph_for_caching(cli_factory, roots, open_modules).await
});
if let Err(err) = handle.await.unwrap() {
lsp_warn!("Error caching: {:#}", err);
self.client.show_message(MessageType::WARNING, err);
}
// now get the lock back to update with the new information
*self.did_change_batch_queue.borrow_mut() = None;
self.inner.write().await.post_cache().await;
self.performance.measure(mark);
}
Err(err) => {
lsp_warn!("Error preparing caching: {:#}", err);
self.client.show_message(MessageType::WARNING, err);
return Err(LspError::internal_error());
}
}
Ok(Some(json!(true)))
}
pub async fn performance_request(
&self,
_token: CancellationToken,
) -> LspResult<Option<Value>> {
self.init_flag.wait_raised().await;
Ok(Some(self.inner.read().await.get_performance()))
}
pub async fn task_definitions(
&self,
_token: CancellationToken,
) -> LspResult<Vec<TaskDefinition>> {
self.init_flag.wait_raised().await;
self.inner.read().await.task_definitions()
}
pub async fn test_run_request(
&self,
params: Option<Value>,
_token: CancellationToken,
) -> LspResult<Option<Value>> {
self.init_flag.wait_raised().await;
self.inner.read().await.test_run_request(params).await
}
pub async fn test_run_cancel_request(
&self,
params: Option<Value>,
_token: CancellationToken,
) -> LspResult<Option<Value>> {
self.init_flag.wait_raised().await;
self.inner.read().await.test_run_cancel_request(params)
}
pub async fn virtual_text_document(
&self,
params: Option<Value>,
_token: CancellationToken,
) -> LspResult<Option<Value>> {
self.init_flag.wait_raised().await;
match params.map(serde_json::from_value) {
Some(Ok(params)) => Ok(Some(
serde_json::to_value(
self.inner.read().await.virtual_text_document(params)?,
)
.map_err(|err| {
error!(
"Failed to serialize virtual_text_document response: {:#}",
err
);
LspError::internal_error()
})?,
)),
Some(Err(err)) => Err(LspError::invalid_params(err.to_string())),
None => Err(LspError::invalid_params("Missing parameters")),
}
}
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub async fn refresh_configuration(&self) {
let (folders, capable) = {
let inner = self.inner.read().await;
(
inner.config.workspace_folders.clone(),
inner.config.workspace_configuration_capable(),
)
};
if capable {
let mut scopes = Vec::with_capacity(folders.len() + 1);
scopes.push(None);
for (_, folder) in folders.as_ref() {
scopes.push(Some(folder.uri.clone()));
}
let configs = self
.client
.when_outside_lsp_lock()
.workspace_configuration(scopes)
.await;
if let Ok(configs) = configs {
if configs.len() != folders.len() + 1 {
lsp_warn!("Incorrect number of configurations received.");
return;
}
let mut configs = configs.into_iter();
let unscoped = configs.next().unwrap();
let mut folder_settings = Vec::with_capacity(folders.len());
for (folder_uri, _) in folders.as_ref() {
folder_settings.push((folder_uri.clone(), configs.next().unwrap()));
}
*self.did_change_batch_queue.borrow_mut() = None;
self
.inner
.write()
.await
.config
.set_workspace_settings(unscoped, folder_settings);
}
}
}
}
impl Inner {
fn new(client: Client, performance: Arc<Performance>) -> Self {
let cache = LspCache::default();
let http_client_provider = Arc::new(HttpClientProvider::new(None, None));
let module_registry = ModuleRegistry::new(
cache.deno_dir().registries_folder_path(),
http_client_provider.clone(),
);
let jsr_search_api =
CliJsrSearchApi::new(module_registry.file_fetcher.clone());
let npm_search_api = CliNpmSearchApi::new(
module_registry.file_fetcher.clone(),
Arc::new(NpmVersionResolver {
types_node_version_req: None,
link_packages: Default::default(),
newest_dependency_date_options: Default::default(),
}),
);
let config = Config::default();
let ts_server = Arc::new(TsServer::new(performance.clone()));
let initial_cwd = std::env::current_dir().unwrap_or_else(|_| {
panic!("Could not resolve current working directory")
});
Self {
ambient_modules_regex_cache: Default::default(),
cache,
client,
compiler_options_resolver: Default::default(),
config,
diagnostics_cache: Default::default(),
diagnostics_server: None,
document_modules: Default::default(),
http_client_provider,
initial_cwd: initial_cwd.clone(),
jsr_search_api,
linter_resolver: Default::default(),
project_version: 0,
task_queue: Default::default(),
maybe_testing_server: None,
module_registry,
npm_search_api,
performance,
registered_semantic_tokens_capabilities: false,
resolver: Default::default(),
ts_fixable_diagnostics: Default::default(),
ts_server,
workspace_files: Default::default(),
workspace_files_hash: 0,
_tracing: Default::default(),
}
}
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub fn get_document(
&self,
uri: &Uri,
enabled: Enabled,
exists: Exists,
diagnosable: Diagnosable,
) -> LspResult<Option<Document>> {
match enabled {
Enabled::Filter => {
if !self.config.uri_enabled(uri) {
return Ok(None);
}
}
Enabled::Ignore => {}
}
let Some(document) = self.document_modules.documents.get(uri) else {
match exists {
Exists::Enforce
if !uri.scheme().is_some_and(|s| s.eq_lowercase("deno")) =>
{
return Err(LspError::invalid_params(format!(
"Unable to find document for: {}",
uri.as_str()
)));
}
Exists::Filter => {
return Ok(None);
}
_ => return Ok(None),
}
};
match diagnosable {
Diagnosable::Filter => {
if !document.is_diagnosable() {
return Ok(None);
}
}
Diagnosable::Ignore => {}
}
Ok(Some(document))
}
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub fn get_primary_module(
&self,
document: &Document,
) -> LspResult<Option<Arc<DocumentModule>>> {
let Some(module) = self.document_modules.primary_module(document) else {
let url = uri_to_url(document.uri());
if url.scheme() != "file"
|| self.resolver.in_node_modules(&url)
|| self.cache.in_cache_directory(&url)
{
return Ok(None);
}
lsp_warn!(
"Unable to get module for document: {}",
document.uri().as_str(),
);
return Err(LspError::internal_error());
};
Ok(Some(module))
}
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub async fn get_navigation_tree(
&self,
module: &DocumentModule,
token: &CancellationToken,
) -> LspResult<Arc<tsc::NavigationTree>> {
let mark = self.performance.mark_with_args(
"lsp.get_navigation_tree",
json!({ "specifier": &module.specifier }),
);
let result = module
.navigation_tree
.get_or_try_init(|| async {
self
.ts_server
.get_navigation_tree(self.snapshot(), module, token)
.await
.map(Arc::new)
.map_err(|err| {
if token.is_cancelled() {
LspError::request_cancelled()
} else {
error!(
"Error getting navigation tree for \"{}\": {:#}",
&module.specifier, err,
);
LspError::internal_error()
}
})
})
.await
.map(Clone::clone);
self.performance.measure(mark);
result
}
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub fn snapshot(&self) -> Arc<StateSnapshot> {
Arc::new(StateSnapshot {
project_version: self.project_version,
config: Arc::new(self.config.clone()),
compiler_options_resolver: self.compiler_options_resolver.clone(),
linter_resolver: self.linter_resolver.clone(),
document_modules: self.document_modules.clone(),
resolver: self.resolver.snapshot(),
cache: Arc::new(self.cache.clone()),
})
}
pub async fn ts_fixable_diagnostics(&self) -> &Vec<String> {
self
.ts_fixable_diagnostics
.get_or_init(|| async {
self
.ts_server
.get_supported_code_fixes(self.snapshot())
.await
.unwrap()
})
.await
}
pub fn update_tracing(&mut self) {
let tracing =
self
.config
.workspace_settings()
.tracing
.clone()
.or_else(|| {
std::env::var("DENO_LSP_TRACE").ok().map(|_| {
super::trace::TracingConfig {
enable: true,
..Default::default()
}
.into()
})
});
self
.ts_server
.set_tracing_enabled(tracing.as_ref().is_some_and(|t| t.enabled()));
self._tracing = tracing.and_then(|conf| {
if !conf.enabled() {
return None;
}
lsp_log!("Initializing tracing subscriber: {:#?}", conf);
let config = conf.into();
super::trace::init_tracing_subscriber(&config)
.inspect_err(|e| {
lsp_warn!("Error initializing tracing subscriber: {e:#}");
})
.ok()
});
}
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub async fn update_global_cache(&mut self) {
let mark = self.performance.mark("lsp.update_global_cache");
let maybe_cache = self.config.workspace_settings().cache.as_ref();
let global_cache_url = maybe_cache.and_then(|cache_str| {
if let Ok(url) = Url::from_file_path(cache_str) {
Some(url)
} else if let Some(root_url) = self.config.root_url() {
root_url.join(cache_str).inspect_err(|err| lsp_warn!("Failed to resolve custom cache path: {err}")).ok()
} else {
lsp_warn!(
"The configured cache path \"{cache_str}\" is not resolvable outside of a workspace.",
);
None
}
});
self.cache = LspCache::new(global_cache_url);
let deno_dir = self.cache.deno_dir();
let workspace_settings = self.config.workspace_settings();
let maybe_root_path = self
.config
.root_url()
.and_then(|url| url_to_file_path(url).ok());
let root_cert_store = get_root_cert_store(
maybe_root_path,
workspace_settings.certificate_stores.clone(),
workspace_settings.tls_certificate.clone().map(CaData::File),
)
.inspect_err(|err| lsp_warn!("Failed to load root cert store: {err}"))
.unwrap_or_else(|_| RootCertStore::empty());
let root_cert_store_provider =
Arc::new(LspRootCertStoreProvider(root_cert_store));
self.http_client_provider = Arc::new(HttpClientProvider::new(
Some(root_cert_store_provider),
workspace_settings
.unsafely_ignore_certificate_errors
.clone(),
));
self.module_registry = ModuleRegistry::new(
deno_dir.registries_folder_path(),
self.http_client_provider.clone(),
);
let workspace_settings = self.config.workspace_settings();
for (registry, enabled) in workspace_settings.suggest.imports.hosts.iter() {
if *enabled {
lsp_log!("Enabling import suggestions for: {}", registry);
self.module_registry.enable(registry).await;
} else {
self.module_registry.disable(registry);
}
}
self.jsr_search_api =
CliJsrSearchApi::new(self.module_registry.file_fetcher.clone());
self.npm_search_api = CliNpmSearchApi::new(
self.module_registry.file_fetcher.clone(),
Arc::new(NpmVersionResolver {
types_node_version_req: None,
// todo(dsherret): the npm_search_api should probably be specific
// to each workspace so that the link packages can be properly
// hooked up
link_packages: Default::default(),
newest_dependency_date_options: Default::default(),
}),
);
self.performance.measure(mark);
}
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub fn update_cache(&mut self) {
let mark = self.performance.mark("lsp.update_cache");
self.cache.update_config(&self.config);
self.performance.measure(mark);
}
pub fn update_debug_flag(&self) {
let internal_debug = self.config.workspace_settings().internal_debug;
super::logging::set_lsp_debug_flag(internal_debug)
}
pub fn check_semantic_tokens_capabilities(&mut self) {
if self.registered_semantic_tokens_capabilities {
return;
}
if !self
.config
.client_capabilities
.text_document
.as_ref()
.and_then(|t| t.semantic_tokens.as_ref())
.and_then(|s| s.dynamic_registration)
.unwrap_or_default()
{
return;
}
let exists_enabled_document = self
.document_modules
.documents
.open_docs()
.any(|doc| doc.is_diagnosable() && self.config.uri_enabled(&doc.uri));
if !exists_enabled_document {
return;
}
self.task_queue.queue_task(Box::new(|ls| {
spawn(async move {
let register_options =
serde_json::to_value(semantic_tokens_registration_options()).unwrap();
ls.client.when_outside_lsp_lock().register_capability(vec![Registration {
id: "textDocument/semanticTokens".to_string(),
method: "textDocument/semanticTokens".to_string(),
register_options: Some(register_options.clone()),
}]).await.inspect_err(|err| {
lsp_warn!("Couldn't register capability for \"textDocument/semanticTokens\": {err}");
}).ok();
});
}));
self.registered_semantic_tokens_capabilities = true;
}
}
// lspower::LanguageServer methods. This file's LanguageServer delegates to us.
impl Inner {
fn initialize(
&mut self,
params: InitializeParams,
) -> LspResult<InitializeResult> {
lsp_log!("Starting Deno language server...");
let mark = self.performance.mark_with_args("lsp.initialize", ¶ms);
// exit this process when the parent is lost
if let Some(parent_pid) = params.process_id {
parent_process_checker::start(parent_pid)
}
let capabilities = capabilities::server_capabilities(¶ms.capabilities);
let version = format!(
"{} ({}, {})",
DENO_VERSION_INFO.deno,
env!("PROFILE"),
env!("TARGET")
);
lsp_log!(" version: {}", version);
if let Ok(path) = std::env::current_exe() {
lsp_log!(" executable: {}", path.to_string_lossy());
}
let server_info = ServerInfo {
name: "deno-language-server".to_string(),
version: Some(version),
};
if let Some(client_info) = params.client_info {
lsp_log!(
"Connected to \"{}\" {}",
client_info.name,
client_info.version.unwrap_or_default(),
);
}
{
let mut workspace_folders = vec![];
if let Some(folders) = params.workspace_folders {
workspace_folders = folders
.into_iter()
.map(|folder| {
let mut url = uri_to_url(&folder.uri);
if !url.path().ends_with('/')
&& let Ok(mut path_segments) = url.path_segments_mut()
{
path_segments.push("");
}
(Arc::new(url), folder)
})
.collect();
}
// rootUri is deprecated by the LSP spec. If it's specified, merge it into
// workspace_folders.
#[allow(deprecated)]
if let Some(root_uri) = params.root_uri
&& !workspace_folders.iter().any(|(_, f)| f.uri == root_uri)
{
let mut root_url = uri_to_url(&root_uri);
let name = root_url
.path_segments()
.and_then(|mut s| s.next_back())
.unwrap_or_default()
.to_string();
if !root_url.path().ends_with('/')
&& let Ok(mut path_segments) = root_url.path_segments_mut()
{
path_segments.push("");
}
workspace_folders.insert(
0,
(
Arc::new(root_url),
WorkspaceFolder {
uri: root_uri,
name,
},
),
);
}
self.config.set_workspace_folders(workspace_folders);
if let Some(options) = params.initialization_options {
self.config.set_workspace_settings(
WorkspaceSettings::from_initialization_options(options),
vec![],
);
}
self.config.set_client_capabilities(params.capabilities);
}
if !self.config.diagnostic_capable() {
let mut diagnostics_server = DiagnosticsServer::new(
self.client.clone(),
self.performance.clone(),
self.ts_server.clone(),
);
diagnostics_server.start();
self.diagnostics_server = Some(diagnostics_server);
}
self
.ts_server
.set_inspector_server_addr(self.config.internal_inspect().to_address());
self.update_tracing();
self.update_debug_flag();
if capabilities.semantic_tokens_provider.is_some() {
self.registered_semantic_tokens_capabilities = true;
}
self.performance.measure(mark);
Ok(InitializeResult {
capabilities,
server_info: Some(server_info),
offset_encoding: None,
})
}
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
fn walk_workspace(config: &Config) -> (IndexSet<PathBuf>, bool) {
if !config.workspace_capable() {
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/resolver.rs | cli/lsp/resolver.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap;
use std::collections::HashSet;
use std::sync::Arc;
use dashmap::DashMap;
use deno_ast::MediaType;
use deno_cache_dir::HttpCache;
use deno_cache_dir::npm::NpmCacheDir;
use deno_core::parking_lot::Mutex;
use deno_core::url::Url;
use deno_error::JsErrorBox;
use deno_graph::ModuleSpecifier;
use deno_graph::Range;
use deno_npm::NpmSystemInfo;
use deno_npm::resolution::NpmVersionResolver;
use deno_npm_cache::TarballCache;
use deno_npm_installer::LifecycleScriptsConfig;
use deno_npm_installer::initializer::NpmResolutionInitializer;
use deno_npm_installer::initializer::NpmResolverManagedSnapshotOption;
use deno_npm_installer::lifecycle_scripts::NullLifecycleScriptsExecutor;
use deno_npm_installer::package_json::NpmInstallDepsProvider;
use deno_npm_installer::resolution::NpmResolutionInstaller;
use deno_path_util::url_to_file_path;
use deno_resolver::DenoResolverOptions;
use deno_resolver::NodeAndNpmResolvers;
use deno_resolver::cjs::IsCjsResolutionMode;
use deno_resolver::deno_json::CompilerOptionsResolver;
use deno_resolver::deno_json::JsxImportSourceConfig;
use deno_resolver::graph::FoundPackageJsonDepFlag;
use deno_resolver::npm::CreateInNpmPkgCheckerOptions;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_resolver::npm::NpmReqResolverOptions;
use deno_resolver::npm::managed::ManagedInNpmPkgCheckerCreateOptions;
use deno_resolver::npm::managed::ManagedNpmResolverCreateOptions;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_resolver::npmrc::create_default_npmrc;
use deno_resolver::workspace::CreateResolverOptions;
use deno_resolver::workspace::FsCacheOptions;
use deno_resolver::workspace::PackageJsonDepResolution;
use deno_resolver::workspace::SloppyImportsOptions;
use deno_resolver::workspace::WorkspaceNpmLinkPackagesRc;
use deno_resolver::workspace::WorkspaceResolver;
use deno_runtime::tokio_util::create_basic_runtime;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use indexmap::IndexMap;
use node_resolver::DenoIsBuiltInNodeModuleChecker;
use node_resolver::NodeResolutionKind;
use node_resolver::NodeResolverOptions;
use node_resolver::PackageJson;
use node_resolver::PackageJsonThreadLocalCache;
use node_resolver::ResolutionMode;
use node_resolver::cache::NodeResolutionSys;
use node_resolver::cache::NodeResolutionThreadLocalCache;
use once_cell::sync::Lazy;
use super::cache::LspCache;
use super::documents::DocumentModule;
use super::jsr::JsrCacheResolver;
use crate::args::CliLockfile;
use crate::factory::Deferred;
use crate::http_util::HttpClientProvider;
use crate::lsp::config::Config;
use crate::lsp::config::ConfigData;
use crate::lsp::logging::lsp_warn;
use crate::node::CliNodeResolver;
use crate::node::CliPackageJsonResolver;
use crate::npm::CliByonmNpmResolverCreateOptions;
use crate::npm::CliManagedNpmResolver;
use crate::npm::CliNpmCache;
use crate::npm::CliNpmCacheHttpClient;
use crate::npm::CliNpmInstaller;
use crate::npm::CliNpmRegistryInfoProvider;
use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverCreateOptions;
use crate::resolver::CliIsCjsResolver;
use crate::resolver::CliNpmReqResolver;
use crate::resolver::CliResolver;
use crate::resolver::on_resolve_diagnostic;
use crate::sys::CliSys;
use crate::tsc::into_specifier_and_media_type;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
#[derive(Debug, Clone)]
pub struct LspScopedResolver {
resolver: Arc<CliResolver>,
workspace_resolver: Arc<WorkspaceResolver<CliSys>>,
in_npm_pkg_checker: DenoInNpmPackageChecker,
is_cjs_resolver: Arc<CliIsCjsResolver>,
jsr_resolver: Option<Arc<JsrCacheResolver>>,
npm_installer: Option<Arc<CliNpmInstaller>>,
npm_installer_reqs: Arc<Mutex<BTreeSet<PackageReq>>>,
npm_resolution: Arc<NpmResolutionCell>,
npm_resolver: Option<CliNpmResolver>,
node_resolver: Option<Arc<CliNodeResolver>>,
npm_pkg_req_resolver: Option<Arc<CliNpmReqResolver>>,
pkg_json_resolver: Arc<CliPackageJsonResolver>,
redirect_resolver: Option<Arc<RedirectResolver>>,
dep_info: Arc<Mutex<Arc<ScopeDepInfo>>>,
configured_dep_resolutions: Arc<ConfiguredDepResolutions>,
config_data: Option<Arc<ConfigData>>,
}
impl Default for LspScopedResolver {
fn default() -> Self {
let factory = ResolverFactory::new(None);
Self {
resolver: factory.cli_resolver().clone(),
workspace_resolver: factory.workspace_resolver().clone(),
in_npm_pkg_checker: factory.in_npm_pkg_checker().clone(),
is_cjs_resolver: factory.is_cjs_resolver().clone(),
jsr_resolver: None,
npm_installer: None,
npm_installer_reqs: Default::default(),
npm_resolver: None,
node_resolver: None,
npm_resolution: factory.services.npm_resolution.clone(),
npm_pkg_req_resolver: None,
pkg_json_resolver: factory.pkg_json_resolver().clone(),
redirect_resolver: None,
dep_info: Default::default(),
configured_dep_resolutions: Default::default(),
config_data: None,
}
}
}
impl LspScopedResolver {
async fn from_config_data(
config_data: Option<&Arc<ConfigData>>,
cache: &LspCache,
http_client_provider: Option<&Arc<HttpClientProvider>>,
) -> Self {
let mut factory = ResolverFactory::new(config_data);
let workspace_resolver = factory.workspace_resolver().clone();
if let Some(http_client_provider) = http_client_provider {
factory.init_npm_resolver(http_client_provider, cache).await;
}
let in_npm_pkg_checker = factory.in_npm_pkg_checker().clone();
let npm_resolver = factory.npm_resolver().cloned();
let npm_installer = factory.npm_installer().cloned();
let node_resolver = factory.node_resolver().cloned();
let npm_pkg_req_resolver = factory.npm_pkg_req_resolver().cloned();
let cli_resolver = factory.cli_resolver().clone();
let pkg_json_resolver = factory.pkg_json_resolver().clone();
let jsr_resolver = Some(Arc::new(JsrCacheResolver::new(
cache.for_specifier(config_data.map(|d| d.scope.as_ref())),
config_data.map(|d| d.as_ref()),
&workspace_resolver,
)));
let redirect_resolver = Some(Arc::new(RedirectResolver::new(
cache.for_specifier(config_data.map(|d| d.scope.as_ref())),
config_data.and_then(|d| d.lockfile.clone()),
)));
let configured_dep_resolutions = (|| {
let npm_pkg_req_resolver = npm_pkg_req_resolver.as_ref()?;
Some(Arc::new(ConfiguredDepResolutions::new(
workspace_resolver.clone(),
config_data.and_then(|d| d.maybe_pkg_json().map(|p| p.as_ref())),
npm_pkg_req_resolver,
&pkg_json_resolver,
)))
})()
.unwrap_or_default();
Self {
resolver: cli_resolver,
workspace_resolver,
in_npm_pkg_checker,
is_cjs_resolver: factory.is_cjs_resolver().clone(),
jsr_resolver,
npm_pkg_req_resolver,
npm_resolver,
npm_installer,
npm_installer_reqs: Default::default(),
npm_resolution: factory.services.npm_resolution.clone(),
node_resolver,
pkg_json_resolver,
redirect_resolver,
dep_info: Default::default(),
configured_dep_resolutions,
config_data: config_data.cloned(),
}
}
fn snapshot(&self) -> Arc<Self> {
// create a copy of the resolution and then re-initialize the npm resolver from that
// todo(dsherret): this is pretty terrible... we should improve this. It should
// be possible to just change the npm_resolution on the new factory then access
// another method to create a new npm resolver
let mut factory = ResolverFactory::new(self.config_data.as_ref());
factory
.services
.npm_resolution
.set_snapshot(self.npm_resolution.snapshot());
let npm_resolver = self.npm_resolver.as_ref();
if let Some(npm_resolver) = &npm_resolver {
factory.set_npm_resolver(CliNpmResolver::new::<CliSys>(
match npm_resolver {
CliNpmResolver::Byonm(byonm_npm_resolver) => {
CliNpmResolverCreateOptions::Byonm(
CliByonmNpmResolverCreateOptions {
root_node_modules_dir: byonm_npm_resolver
.root_node_modules_path()
.map(|p| p.to_path_buf()),
sys: factory.node_resolution_sys.clone(),
pkg_json_resolver: self.pkg_json_resolver.clone(),
},
)
}
CliNpmResolver::Managed(managed_npm_resolver) => {
CliNpmResolverCreateOptions::Managed({
let sys = CliSys::default();
let npmrc = self
.config_data
.as_ref()
.and_then(|d| d.npmrc.clone())
.unwrap_or_else(|| Arc::new(create_default_npmrc(&sys)));
let npm_cache_dir = Arc::new(NpmCacheDir::new(
&sys,
managed_npm_resolver.global_cache_root_path().to_path_buf(),
npmrc.get_all_known_registries_urls(),
));
ManagedNpmResolverCreateOptions {
sys,
npm_cache_dir,
maybe_node_modules_path: managed_npm_resolver
.root_node_modules_path()
.map(|p| p.to_path_buf()),
npmrc,
npm_resolution: factory.services.npm_resolution.clone(),
npm_system_info: NpmSystemInfo::default(),
}
})
}
},
));
}
Arc::new(Self {
resolver: factory.cli_resolver().clone(),
workspace_resolver: factory.workspace_resolver().clone(),
in_npm_pkg_checker: factory.in_npm_pkg_checker().clone(),
is_cjs_resolver: factory.is_cjs_resolver().clone(),
jsr_resolver: self.jsr_resolver.clone(),
npm_installer: self.npm_installer.clone(),
npm_installer_reqs: self.npm_installer_reqs.clone(),
npm_pkg_req_resolver: factory.npm_pkg_req_resolver().cloned(),
npm_resolution: factory.services.npm_resolution.clone(),
npm_resolver: factory.npm_resolver().cloned(),
node_resolver: factory.node_resolver().cloned(),
redirect_resolver: self.redirect_resolver.clone(),
pkg_json_resolver: factory.pkg_json_resolver().clone(),
dep_info: self.dep_info.clone(),
configured_dep_resolutions: self.configured_dep_resolutions.clone(),
config_data: self.config_data.clone(),
})
}
pub fn as_in_npm_pkg_checker(&self) -> &DenoInNpmPackageChecker {
&self.in_npm_pkg_checker
}
pub fn as_cli_resolver(&self) -> &CliResolver {
self.resolver.as_ref()
}
pub fn as_workspace_resolver(&self) -> &Arc<WorkspaceResolver<CliSys>> {
&self.workspace_resolver
}
pub fn as_is_cjs_resolver(&self) -> &CliIsCjsResolver {
self.is_cjs_resolver.as_ref()
}
pub fn as_node_resolver(&self) -> Option<&Arc<CliNodeResolver>> {
self.node_resolver.as_ref()
}
pub fn as_maybe_managed_npm_resolver(
&self,
) -> Option<&Arc<CliManagedNpmResolver>> {
self.npm_resolver.as_ref().and_then(|r| r.as_managed())
}
pub fn as_pkg_json_resolver(&self) -> &Arc<CliPackageJsonResolver> {
&self.pkg_json_resolver
}
pub fn jsr_to_resource_url(
&self,
req_ref: &JsrPackageReqReference,
) -> Option<ModuleSpecifier> {
self.jsr_resolver.as_ref()?.jsr_to_resource_url(req_ref)
}
pub fn jsr_lookup_bare_specifier_for_workspace_file(
&self,
specifier: &Url,
) -> Option<String> {
self
.jsr_resolver
.as_ref()?
.lookup_bare_specifier_for_workspace_file(specifier)
}
pub fn jsr_lookup_export_for_path(
&self,
nv: &PackageNv,
path: &str,
) -> Option<String> {
self.jsr_resolver.as_ref()?.lookup_export_for_path(nv, path)
}
pub fn jsr_lookup_req_for_nv(&self, nv: &PackageNv) -> Option<PackageReq> {
self.jsr_resolver.as_ref()?.lookup_req_for_nv(nv)
}
pub fn npm_to_file_url(
&self,
req_ref: &NpmPackageReqReference,
referrer: &ModuleSpecifier,
resolution_kind: NodeResolutionKind,
resolution_mode: ResolutionMode,
) -> Option<(ModuleSpecifier, MediaType)> {
let npm_pkg_req_resolver = self.npm_pkg_req_resolver.as_ref()?;
self.add_npm_reqs(vec![req_ref.req().clone()]);
Some(into_specifier_and_media_type(Some(
npm_pkg_req_resolver
.resolve_req_reference(
req_ref,
referrer,
resolution_mode,
resolution_kind,
)
.ok()?
.into_url()
.ok()?,
)))
}
pub fn resource_url_to_configured_dep_key(
&self,
specifier: &Url,
referrer: &Url,
) -> Option<String> {
self
.configured_dep_resolutions
.dep_key_from_resolution(specifier, referrer)
}
pub fn npm_reqs(&self) -> BTreeSet<PackageReq> {
self.npm_installer_reqs.lock().clone()
}
pub fn deno_types_to_code_resolution(
&self,
specifier: &ModuleSpecifier,
) -> Option<ModuleSpecifier> {
let dep_info = self.dep_info.lock();
dep_info
.deno_types_to_code_resolutions
.get(specifier)
.cloned()
}
pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool {
fn has_node_modules_dir(specifier: &ModuleSpecifier) -> bool {
// consider any /node_modules/ directory as being in the node_modules
// folder for the LSP because it's pretty complicated to deal with multiple scopes
specifier.scheme() == "file"
&& specifier
.path()
.to_ascii_lowercase()
.contains("/node_modules/")
}
if let Some(node_resolver) = &self.node_resolver
&& node_resolver.in_npm_package(specifier)
{
return true;
}
has_node_modules_dir(specifier)
}
pub fn resolve_redirects(
&self,
specifier: &ModuleSpecifier,
) -> Option<ModuleSpecifier> {
let Some(redirect_resolver) = self.redirect_resolver.as_ref() else {
return Some(specifier.clone());
};
redirect_resolver.resolve(specifier)
}
pub fn redirect_chain_headers(
&self,
specifier: &ModuleSpecifier,
) -> Vec<(ModuleSpecifier, Arc<HashMap<String, String>>)> {
let Some(redirect_resolver) = self.redirect_resolver.as_ref() else {
return vec![];
};
redirect_resolver
.chain(specifier)
.into_iter()
.map(|(s, e)| (s, e.headers.clone()))
.collect()
}
pub fn refresh_npm_reqs(&self) {
let Some(npm_installer) = self.npm_installer.as_ref().cloned() else {
return;
};
let npm_installer_reqs = self.npm_installer_reqs.lock();
let reqs = npm_installer_reqs.iter().cloned().collect::<Vec<_>>();
if let Err(err) = ADD_NPM_REQS_THREAD.add_npm_reqs(npm_installer, reqs) {
lsp_warn!("Could not refresh npm package requirements: {:#}", err);
}
}
pub fn add_npm_reqs(&self, reqs: Vec<PackageReq>) {
let Some(npm_installer) = self.npm_installer.as_ref().cloned() else {
return;
};
let mut npm_installer_reqs = self.npm_installer_reqs.lock();
let old_reqs_count = npm_installer_reqs.len();
npm_installer_reqs.extend(reqs.clone());
if npm_installer_reqs.len() == old_reqs_count {
return;
}
if let Err(err) = ADD_NPM_REQS_THREAD.add_npm_reqs(npm_installer, reqs) {
lsp_warn!("Could not add npm package requirements: {:#}", err);
}
}
}
#[derive(Debug, Default, Clone)]
pub struct LspResolver {
unscoped: Arc<LspScopedResolver>,
by_scope: BTreeMap<Arc<Url>, Arc<LspScopedResolver>>,
}
impl LspResolver {
pub async fn from_config(
config: &Config,
cache: &LspCache,
http_client_provider: Option<&Arc<HttpClientProvider>>,
) -> Self {
let mut by_scope = BTreeMap::new();
for (scope, config_data) in config.tree.data_by_scope().as_ref() {
by_scope.insert(
scope.clone(),
Arc::new(
LspScopedResolver::from_config_data(
Some(config_data),
cache,
http_client_provider,
)
.await,
),
);
}
let unscoped = Arc::new(
LspScopedResolver::from_config_data(None, cache, http_client_provider)
.await,
);
for resolver in std::iter::once(&unscoped).chain(by_scope.values()) {
if resolver.npm_installer.is_none() {
continue;
}
let Some(lockfile) = resolver
.config_data
.as_ref()
.and_then(|d| d.lockfile.as_ref())
else {
continue;
};
let npm_reqs = lockfile
.lock()
.content
.packages
.specifiers
.keys()
.filter(|r| r.kind == deno_semver::package::PackageKind::Npm)
.map(|r| r.req.clone())
.collect::<Vec<_>>();
resolver.add_npm_reqs(npm_reqs);
}
Self { unscoped, by_scope }
}
pub fn set_compiler_options_resolver(
&self,
value: &Arc<CompilerOptionsResolver>,
) {
for resolver in
std::iter::once(&self.unscoped).chain(self.by_scope.values())
{
resolver
.workspace_resolver
.set_compiler_options_resolver(value.clone());
}
}
pub fn snapshot(&self) -> Arc<Self> {
Arc::new(Self {
unscoped: self.unscoped.snapshot(),
by_scope: self
.by_scope
.iter()
.map(|(s, r)| (s.clone(), r.snapshot()))
.collect(),
})
}
pub fn did_cache(&self) {
for resolver in
std::iter::once(&self.unscoped).chain(self.by_scope.values())
{
resolver.jsr_resolver.as_ref().inspect(|r| r.did_cache());
resolver
.redirect_resolver
.as_ref()
.inspect(|r| r.did_cache());
resolver.refresh_npm_reqs();
}
}
pub fn did_create_module(&self, module: &DocumentModule) {
let resolver = self.get_scoped_resolver(module.scope.as_deref());
let npm_reqs = module
.dependencies
.values()
.flat_map(|d| [d.get_code(), d.get_type()])
.flatten()
.chain(
module
.types_dependency
.iter()
.flat_map(|d| d.dependency.maybe_specifier()),
)
.flat_map(|s| NpmPackageReqReference::from_specifier(s).ok())
.map(|r| r.into_inner().req)
.collect::<Vec<_>>();
resolver.add_npm_reqs(npm_reqs);
}
pub fn set_dep_info_by_scope(
&self,
dep_info_by_scope: &Arc<BTreeMap<Option<Arc<Url>>, Arc<ScopeDepInfo>>>,
) {
for (scope, resolver) in [(None, &self.unscoped)]
.into_iter()
.chain(self.by_scope.iter().map(|(s, r)| (Some(s), r)))
{
let dep_info = dep_info_by_scope
.get(&scope.cloned())
.cloned()
.unwrap_or_default();
{
let mut resolver_dep_info = resolver.dep_info.lock();
*resolver_dep_info = dep_info.clone();
}
}
}
pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool {
self
.get_scoped_resolver(Some(specifier))
.in_node_modules(specifier)
}
pub fn get_scoped_resolver(
&self,
file_referrer: Option<&ModuleSpecifier>,
) -> &LspScopedResolver {
let Some(file_referrer) = file_referrer else {
return self.unscoped.as_ref();
};
self
.by_scope
.values()
.rfind(|r| {
r.config_data
.as_ref()
.map(|d| d.scope_contains_specifier(file_referrer))
.unwrap_or(false)
})
.map(|r| r.as_ref())
.unwrap_or(self.unscoped.as_ref())
}
}
#[derive(Debug, Default, Clone)]
pub struct ScopeDepInfo {
pub deno_types_to_code_resolutions: HashMap<ModuleSpecifier, ModuleSpecifier>,
pub has_node_specifier: bool,
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
enum ConfiguredDepKind {
ImportMap { key: String, value: Url },
PackageJson,
}
#[derive(Debug, Default)]
struct ConfiguredDepResolutions {
workspace_resolver: Option<Arc<WorkspaceResolver<CliSys>>>,
deps_by_resolution: IndexMap<ModuleSpecifier, (String, ConfiguredDepKind)>,
}
impl ConfiguredDepResolutions {
fn new(
workspace_resolver: Arc<WorkspaceResolver<CliSys>>,
package_json: Option<&PackageJson>,
npm_pkg_req_resolver: &CliNpmReqResolver,
pkg_json_resolver: &CliPackageJsonResolver,
) -> Self {
let mut result = Self::default();
let insert_export_resolutions =
|key_prefix: &str,
dep_req_str: &str,
dep_package_json: &PackageJson,
referrer,
dep_kind: &ConfiguredDepKind,
result: &mut Self| {
let export_keys = dep_package_json
.exports
.as_ref()
.into_iter()
.flat_map(|e| e.keys());
for export_key in export_keys {
let Some(export_name) = export_key.strip_prefix("./") else {
continue;
};
// Wildcards are not supported here.
if export_name.chars().filter(|c| *c == '*').count() == 1 {
continue;
}
let Some(req_ref) = NpmPackageReqReference::from_str(&format!(
"npm:{dep_req_str}/{export_name}"
))
.ok() else {
continue;
};
for kind in [NodeResolutionKind::Types, NodeResolutionKind::Execution]
{
let Some(url_or_path) = npm_pkg_req_resolver
.resolve_req_reference(
&req_ref,
referrer,
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
ResolutionMode::Import,
kind,
)
.ok()
else {
continue;
};
let Some(file_url) = url_or_path.into_url().ok() else {
continue;
};
result.deps_by_resolution.insert(
file_url,
(format!("{key_prefix}/{export_name}"), dep_kind.clone()),
);
}
}
};
if let Some(import_map) = workspace_resolver.maybe_import_map() {
let referrer = import_map.base_url();
for entry in import_map.imports().entries().chain(
import_map
.scopes()
.flat_map(|scope| scope.imports.entries()),
) {
let Some(value) = entry.value else {
continue;
};
let Ok(req_ref) = NpmPackageReqReference::from_specifier(value) else {
continue;
};
let dep_kind = ConfiguredDepKind::ImportMap {
key: entry.key.to_string(),
value: value.clone(),
};
let mut dep_package_json = None;
for kind in [NodeResolutionKind::Types, NodeResolutionKind::Execution] {
let Some(file_url) = npm_pkg_req_resolver
.resolve_req_reference(
&req_ref,
referrer,
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
ResolutionMode::Import,
kind,
)
.ok()
.and_then(|u| u.into_url().ok())
else {
continue;
};
if dep_package_json.is_none() {
dep_package_json = (|| {
let path = url_to_file_path(&file_url).ok()?;
pkg_json_resolver.get_closest_package_json(&path).ok()?
})();
}
if !entry.key.ends_with('/') {
result.deps_by_resolution.insert(
file_url,
(
entry.key.to_string(),
ConfiguredDepKind::ImportMap {
key: entry.key.to_string(),
value: value.clone(),
},
),
);
}
}
if let Some(key_prefix) = entry.key.strip_suffix('/')
&& req_ref.sub_path().is_none()
&& let Some(dep_package_json) = &dep_package_json
{
insert_export_resolutions(
key_prefix,
&req_ref.req().to_string(),
dep_package_json,
referrer,
&dep_kind,
&mut result,
);
}
}
}
if let Some(package_json) = package_json {
let referrer = package_json.specifier();
let Some(dependencies) = package_json.dependencies.as_ref() else {
return Self::default();
};
for name in dependencies.keys() {
let Some(req_ref) =
NpmPackageReqReference::from_str(&format!("npm:{name}")).ok()
else {
continue;
};
let mut dep_package_json = None;
for kind in [NodeResolutionKind::Types, NodeResolutionKind::Execution] {
let Ok(req) = npm_pkg_req_resolver.resolve_req_reference(
&req_ref,
&referrer,
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
ResolutionMode::Import,
kind,
) else {
continue;
};
let Some(file_url) = req.into_url().ok() else {
continue;
};
if dep_package_json.is_none() {
dep_package_json = (|| {
let path = url_to_file_path(&file_url).ok()?;
pkg_json_resolver.get_closest_package_json(&path).ok()?
})();
}
result
.deps_by_resolution
.insert(file_url, (name.clone(), ConfiguredDepKind::PackageJson));
}
if let Some(dep_package_json) = &dep_package_json {
insert_export_resolutions(
name,
name,
dep_package_json,
&referrer,
&ConfiguredDepKind::PackageJson,
&mut result,
);
}
}
}
result.workspace_resolver = Some(workspace_resolver);
result
}
fn dep_key_from_resolution(
&self,
resolution: &Url,
referrer: &Url,
) -> Option<String> {
self
.deps_by_resolution
.get(resolution)
.and_then(|(dep_key, kind)| match kind {
// Ensure the mapping this entry came from is valid for this referrer.
ConfiguredDepKind::ImportMap { key, value } => self
.workspace_resolver
.as_ref()?
.maybe_import_map()?
.resolve(key, referrer)
.is_ok_and(|s| &s == value)
.then(|| dep_key.clone()),
ConfiguredDepKind::PackageJson => Some(dep_key.clone()),
})
}
}
#[derive(Default)]
struct ResolverFactoryServices {
cli_resolver: Deferred<Arc<CliResolver>>,
workspace_resolver: Deferred<Arc<WorkspaceResolver<CliSys>>>,
found_pkg_json_dep_flag: Arc<FoundPackageJsonDepFlag>,
in_npm_pkg_checker: Deferred<DenoInNpmPackageChecker>,
is_cjs_resolver: Deferred<Arc<CliIsCjsResolver>>,
node_resolver: Deferred<Option<Arc<CliNodeResolver>>>,
npm_installer: Option<Arc<CliNpmInstaller>>,
npm_pkg_req_resolver: Deferred<Option<Arc<CliNpmReqResolver>>>,
npm_resolver: Option<CliNpmResolver>,
npm_resolution: Arc<NpmResolutionCell>,
}
struct ResolverFactory<'a> {
config_data: Option<&'a Arc<ConfigData>>,
pkg_json_resolver: Arc<CliPackageJsonResolver>,
node_resolution_sys: NodeResolutionSys<CliSys>,
sys: CliSys,
services: ResolverFactoryServices,
}
impl<'a> ResolverFactory<'a> {
pub fn new(config_data: Option<&'a Arc<ConfigData>>) -> Self {
let sys = CliSys::default();
let pkg_json_resolver = Arc::new(CliPackageJsonResolver::new(
sys.clone(),
// this should be ok because we handle clearing this cache often in the LSP
Some(Arc::new(PackageJsonThreadLocalCache)),
));
Self {
config_data,
pkg_json_resolver,
node_resolution_sys: NodeResolutionSys::new(
sys.clone(),
Some(Arc::new(NodeResolutionThreadLocalCache)),
),
sys,
services: Default::default(),
}
}
// todo(dsherret): probably this method could be removed in the future
// and instead just `npm_resolution_initializer.ensure_initialized()` could
// be called. The reason this exists is because creating the npm resolvers
// used to be async.
async fn init_npm_resolver(
&mut self,
http_client_provider: &Arc<HttpClientProvider>,
cache: &LspCache,
) {
let enable_byonm = self.config_data.map(|d| d.byonm).unwrap_or(false);
let sys = CliSys::default();
let options = if enable_byonm {
CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions {
sys: self.node_resolution_sys.clone(),
pkg_json_resolver: self.pkg_json_resolver.clone(),
root_node_modules_dir: self.config_data.and_then(|config_data| {
config_data.node_modules_dir.clone().or_else(|| {
url_to_file_path(&config_data.scope)
.ok()
.map(|p| p.join("node_modules/"))
})
}),
})
} else {
let npmrc = self
.config_data
.and_then(|d| d.npmrc.clone())
.unwrap_or_else(|| Arc::new(create_default_npmrc(&sys)));
let npm_cache_dir = Arc::new(NpmCacheDir::new(
&sys,
cache.deno_dir().npm_folder_path(),
npmrc.get_all_known_registries_urls(),
));
let npm_cache = Arc::new(CliNpmCache::new(
npm_cache_dir.clone(),
sys.clone(),
// Use an "only" cache setting in order to make the
// user do an explicit "cache" command and prevent
// the cache from being filled with lots of packages while
// the user is typing.
deno_npm_cache::NpmCacheSetting::Only,
npmrc.clone(),
));
let pb = ProgressBar::new(ProgressBarStyle::TextOnly);
let npm_client = Arc::new(CliNpmCacheHttpClient::new(
http_client_provider.clone(),
pb.clone(),
));
let registry_info_provider = Arc::new(CliNpmRegistryInfoProvider::new(
npm_cache.clone(),
npm_client.clone(),
npmrc.clone(),
));
let link_packages: WorkspaceNpmLinkPackagesRc = self
.config_data
.as_ref()
.filter(|c| c.node_modules_dir.is_some()) // requires a node_modules dir
.map(|d| {
WorkspaceNpmLinkPackagesRc::from_workspace(&d.member_dir.workspace)
})
.unwrap_or_default();
let npm_resolution_initializer = Arc::new(NpmResolutionInitializer::new(
self.services.npm_resolution.clone(),
link_packages.clone(),
match self.config_data.and_then(|d| d.lockfile.as_ref()) {
Some(lockfile) => {
NpmResolverManagedSnapshotOption::ResolveFromLockfile(
lockfile.clone(),
)
}
None => NpmResolverManagedSnapshotOption::Specified(None),
},
));
// Don't provide the lockfile. We don't want these resolvers
// updating it. Only the cache request should update the lockfile.
let maybe_lockfile: Option<Arc<CliLockfile>> = None;
let maybe_node_modules_path =
self.config_data.and_then(|d| d.node_modules_dir.clone());
let tarball_cache = Arc::new(TarballCache::new(
npm_cache.clone(),
npm_client.clone(),
sys.clone(),
npmrc.clone(),
None,
));
let npm_version_resolver = Arc::new(NpmVersionResolver {
types_node_version_req: None,
link_packages: link_packages.0.clone(),
newest_dependency_date_options: Default::default(),
});
let npm_resolution_installer = Arc::new(NpmResolutionInstaller::new(
Default::default(),
npm_version_resolver,
registry_info_provider.clone(),
None,
self.services.npm_resolution.clone(),
maybe_lockfile.clone(),
));
let npm_installer = Arc::new(CliNpmInstaller::new(
None,
Arc::new(NullLifecycleScriptsExecutor),
npm_cache.clone(),
Arc::new(NpmInstallDepsProvider::empty()),
registry_info_provider.clone(),
self.services.npm_resolution.clone(),
npm_resolution_initializer.clone(),
npm_resolution_installer,
&pb,
sys.clone(),
tarball_cache.clone(),
deno_npm_installer::NpmInstallerOptions {
maybe_lockfile,
maybe_node_modules_path: maybe_node_modules_path.clone(),
lifecycle_scripts: Arc::new(LifecycleScriptsConfig::default()),
system_info: NpmSystemInfo::default(),
workspace_link_packages: link_packages,
},
));
self.set_npm_installer(npm_installer);
if let Err(err) = npm_resolution_initializer.ensure_initialized().await {
log::warn!("failed to initialize npm resolution: {}", err);
}
CliNpmResolverCreateOptions::Managed(ManagedNpmResolverCreateOptions {
sys: CliSys::default(),
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/urls.rs | cli/lsp/urls.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::path::Component;
use std::path::Path;
use std::path::PathBuf;
use std::str::FromStr;
use deno_config::UrlToFilePathError;
use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_path_util::url_to_file_path;
use lsp_types::Uri;
use super::logging::lsp_warn;
/// Matches the `encodeURIComponent()` encoding from JavaScript, which matches
/// the component percent encoding set.
///
/// See: <https://url.spec.whatwg.org/#component-percent-encode-set>
pub const COMPONENT: &percent_encoding::AsciiSet = &percent_encoding::CONTROLS
.add(b' ')
.add(b'"')
.add(b'#')
.add(b'<')
.add(b'>')
.add(b'?')
.add(b'`')
.add(b'{')
.add(b'}')
.add(b'/')
.add(b':')
.add(b';')
.add(b'=')
.add(b'@')
.add(b'[')
.add(b'\\')
.add(b']')
.add(b'^')
.add(b'|')
.add(b'$')
.add(b'%')
.add(b'&')
.add(b'+')
.add(b',');
/// Characters that are left unencoded in a `Url` path but will be encoded in a
/// VSCode URI.
const URL_TO_URI_PATH: &percent_encoding::AsciiSet =
&percent_encoding::CONTROLS
.add(b' ')
.add(b'!')
.add(b'$')
.add(b'&')
.add(b'\'')
.add(b'(')
.add(b')')
.add(b'*')
.add(b'+')
.add(b',')
.add(b':')
.add(b';')
.add(b'=')
.add(b'@')
.add(b'[')
.add(b']')
.add(b'^')
.add(b'|');
/// Characters that may be left unencoded in a `Url` query but not valid in a
/// `Uri` query.
const URL_TO_URI_QUERY: &percent_encoding::AsciiSet =
&URL_TO_URI_PATH.add(b'\\').add(b'`').add(b'{').add(b'}');
/// Characters that may be left unencoded in a `Url` fragment but not valid in
/// a `Uri` fragment.
const URL_TO_URI_FRAGMENT: &percent_encoding::AsciiSet =
&URL_TO_URI_PATH.add(b'#').add(b'\\').add(b'{').add(b'}');
pub fn uri_parse_unencoded(s: &str) -> Result<Uri, AnyError> {
url_to_uri(&Url::parse(s)?)
}
pub fn normalize_uri(uri: &Uri) -> Cow<'_, Uri> {
if !uri.scheme().is_some_and(|s| s.eq_lowercase("file")) {
return Cow::Borrowed(uri);
}
let url = normalize_url(Url::parse(uri.as_str()).unwrap());
let Ok(normalized_uri) = url_to_uri(&url) else {
return Cow::Borrowed(uri);
};
Cow::Owned(normalized_uri)
}
pub fn url_to_uri(url: &Url) -> Result<Uri, AnyError> {
let components = deno_core::url::quirks::internal_components(url);
let mut input = String::with_capacity(url.as_str().len());
input.push_str(&url.as_str()[..components.path_start as usize]);
let path = url.path();
let mut chars = path.chars();
let has_drive_letter = chars.next().is_some_and(|c| c == '/')
&& chars.next().is_some_and(|c| c.is_ascii_alphabetic())
&& chars.next().is_some_and(|c| c == ':')
&& chars.next().is_none_or(|c| c == '/');
if has_drive_letter {
let (dl_part, rest) = path.split_at(2);
input.push_str(&dl_part.to_ascii_lowercase());
input.push_str(
&percent_encoding::utf8_percent_encode(rest, URL_TO_URI_PATH).to_string(),
);
} else {
input.push_str(
&percent_encoding::utf8_percent_encode(path, URL_TO_URI_PATH).to_string(),
);
}
if let Some(query) = url.query() {
input.push('?');
input.push_str(
&percent_encoding::utf8_percent_encode(query, URL_TO_URI_QUERY)
.to_string(),
);
}
if let Some(fragment) = url.fragment() {
input.push('#');
input.push_str(
&percent_encoding::utf8_percent_encode(fragment, URL_TO_URI_FRAGMENT)
.to_string(),
);
}
Ok(Uri::from_str(&input).inspect_err(|err| {
lsp_warn!("Could not convert URL \"{url}\" to URI: {err}")
})?)
}
pub fn uri_to_url(uri: &Uri) -> Url {
(|| {
let scheme = uri.scheme()?;
if !scheme.eq_lowercase("untitled")
&& !scheme.eq_lowercase("vscode-notebook-cell")
&& !scheme.eq_lowercase("deno-notebook-cell")
&& !scheme.eq_lowercase("vscode-userdata")
{
return None;
}
Url::parse(&format!(
"file:///{}",
&uri.as_str()[uri.path_bounds.0 as usize..].trim_start_matches('/'),
))
.ok()
.map(normalize_url)
})()
.unwrap_or_else(|| normalize_url(Url::parse(uri.as_str()).unwrap()))
}
pub fn uri_to_file_path(uri: &Uri) -> Result<PathBuf, UrlToFilePathError> {
url_to_file_path(&uri_to_url(uri))
}
pub fn uri_is_file_like(uri: &Uri) -> bool {
let Some(scheme) = uri.scheme() else {
return false;
};
scheme.eq_lowercase("file")
|| scheme.eq_lowercase("untitled")
|| scheme.eq_lowercase("vscode-notebook-cell")
|| scheme.eq_lowercase("deno-notebook-cell")
|| scheme.eq_lowercase("vscode-userdata")
}
fn normalize_url(url: Url) -> Url {
let Ok(path) = url_to_file_path(&url) else {
return url;
};
let normalized_path = normalize_path(&path);
let Ok(mut normalized_url) = Url::from_file_path(&normalized_path) else {
return url;
};
if let Some(query) = url.query() {
normalized_url.set_query(Some(query));
}
if let Some(fragment) = url.fragment() {
normalized_url.set_fragment(Some(fragment));
}
normalized_url
}
// TODO(nayeemrmn): Change the version of this in deno_path_util to force
// uppercase on drive letters. Then remove this.
fn normalize_path<P: AsRef<Path>>(path: P) -> PathBuf {
fn inner(path: &Path) -> PathBuf {
let mut components = path.components().peekable();
let mut ret =
if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {
components.next();
let s = c.as_os_str();
if s.len() == 2 {
PathBuf::from(s.to_ascii_uppercase())
} else {
PathBuf::from(s)
}
} else {
PathBuf::new()
};
for component in components {
match component {
Component::Prefix(..) => unreachable!(),
Component::RootDir => {
ret.push(component.as_os_str());
}
Component::CurDir => {}
Component::ParentDir => {
ret.pop();
}
Component::Normal(c) => {
ret.push(c);
}
}
}
ret
}
inner(path.as_ref())
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/compiler_options.rs | cli/lsp/compiler_options.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::BTreeMap;
use std::collections::HashSet;
use std::sync::Arc;
use deno_config::deno_json::CompilerOptions;
use deno_config::workspace::TsTypeLib;
use deno_core::url::Url;
use deno_resolver::deno_json::CompilerOptionsKey;
use deno_resolver::deno_json::CompilerOptionsResolver;
use deno_resolver::deno_json::CompilerOptionsType;
use deno_resolver::deno_json::JsxImportSourceConfig;
use deno_resolver::deno_json::TsConfigFile;
use deno_resolver::deno_json::get_base_compiler_options_for_emit;
use crate::lsp::config::Config;
use crate::lsp::logging::lsp_warn;
use crate::lsp::resolver::LspResolver;
use crate::sys::CliSys;
use crate::util::fs::canonicalize_path_maybe_not_exists;
#[derive(Debug, Clone)]
pub struct LspCompilerOptionsData {
pub workspace_dir_or_source_url: Option<Arc<Url>>,
pub compiler_options: Arc<CompilerOptions>,
pub compiler_options_types: Arc<Vec<(Url, Vec<String>)>>,
pub skip_lib_check: bool,
pub jsx_import_source_config: Option<Arc<JsxImportSourceConfig>>,
pub ts_config_files: Option<(Arc<Url>, Vec<TsConfigFile>)>,
watched_files: HashSet<Arc<Url>>,
}
#[derive(Debug)]
pub struct LspCompilerOptionsResolver {
pub inner: Arc<CompilerOptionsResolver>,
data: BTreeMap<CompilerOptionsKey, LspCompilerOptionsData>,
}
impl Default for LspCompilerOptionsResolver {
fn default() -> Self {
Self::from_inner(Default::default())
}
}
impl LspCompilerOptionsResolver {
pub fn new(config: &Config, resolver: &LspResolver) -> Self {
Self::from_inner(CompilerOptionsResolver::new_for_dirs_by_scope(
&CliSys::default(),
config
.tree
.data_by_scope()
.iter()
.map(|(s, d)| (s, d.member_dir.as_ref()))
.collect(),
Box::new(|s| {
resolver
.get_scoped_resolver(Some(s))
.as_node_resolver()
.map(|r| r.as_ref())
}),
))
}
fn from_inner(inner: CompilerOptionsResolver) -> Self {
let data = inner
.entries()
.map(|(k, d, f)| {
(
k,
LspCompilerOptionsData {
workspace_dir_or_source_url: d
.workspace_dir_or_source_url()
.cloned(),
compiler_options: d
.compiler_options_for_lib(TsTypeLib::DenoWindow)
.inspect_err(|err| {
lsp_warn!("{err:#}");
})
.ok()
.cloned()
.unwrap_or_else(|| {
Arc::new(get_base_compiler_options_for_emit(
CompilerOptionsType::Check {
lib: TsTypeLib::DenoWindow,
},
d.source_kind,
))
}),
compiler_options_types: d.compiler_options_types().clone(),
skip_lib_check: d.skip_lib_check(),
jsx_import_source_config: d
.jsx_import_source_config()
.inspect_err(|err| {
lsp_warn!("{err:#}");
})
.ok()
.flatten()
.cloned(),
ts_config_files: f.map(|(r, f)| (r.clone(), f.clone())),
watched_files: d
.sources
.iter()
.flat_map(|s| {
std::iter::once(s.specifier.clone()).chain(
s.specifier
.to_file_path()
.ok()
.and_then(|p| canonicalize_path_maybe_not_exists(&p).ok())
.and_then(|p| Url::from_file_path(p).ok().map(Arc::new)),
)
})
.collect(),
},
)
})
.collect();
Self {
inner: Arc::new(inner),
data,
}
}
pub fn for_specifier(&self, specifier: &Url) -> &LspCompilerOptionsData {
self
.data
.get(&self.inner.entry_for_specifier(specifier).0)
.expect("Stored key should be mapped.")
}
pub fn entry_for_specifier(
&self,
specifier: &Url,
) -> (&CompilerOptionsKey, &LspCompilerOptionsData) {
self
.data
.get_key_value(&self.inner.entry_for_specifier(specifier).0)
.expect("Stored key should be mapped.")
}
pub fn for_key(
&self,
key: &CompilerOptionsKey,
) -> Option<&LspCompilerOptionsData> {
self.data.get(key)
}
pub fn entries(
&self,
) -> impl Iterator<Item = (&CompilerOptionsKey, &LspCompilerOptionsData)> {
self.data.iter()
}
pub fn is_watched_file(&self, specifier: &Url) -> bool {
self
.data
.values()
.any(|d| d.watched_files.contains(specifier))
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/logging.rs | cli/lsp/logging.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::fs;
use std::io::prelude::*;
use std::path::Path;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use std::thread;
use std::time::SystemTime;
use chrono::DateTime;
use chrono::Utc;
use deno_core::parking_lot::Mutex;
static LSP_DEBUG_FLAG: AtomicBool = AtomicBool::new(false);
static LSP_LOG_LEVEL: AtomicUsize = AtomicUsize::new(log::Level::Info as usize);
static LSP_WARN_LEVEL: AtomicUsize =
AtomicUsize::new(log::Level::Warn as usize);
static LOG_FILE: LogFile = LogFile {
enabled: AtomicBool::new(true),
buffer: Mutex::new(String::new()),
};
pub struct LogFile {
enabled: AtomicBool,
buffer: Mutex<String>,
}
impl LogFile {
pub fn write_line(&self, s: &str) {
if LOG_FILE.enabled.load(Ordering::Relaxed) {
let mut buffer = self.buffer.lock();
buffer.push_str(s);
buffer.push('\n');
}
}
fn commit(&self, path: &Path) {
let unbuffered = {
let mut buffer = self.buffer.lock();
if buffer.is_empty() {
return;
}
// We clone here rather than take so the buffer can retain its capacity.
let unbuffered = buffer.clone();
buffer.clear();
unbuffered
};
if let Ok(file) = fs::OpenOptions::new().append(true).open(path) {
write!(&file, "{}", unbuffered).ok();
}
}
}
pub fn init_log_file(enabled: bool) {
let prepare_path = || {
if !enabled {
return None;
}
let cwd = std::env::current_dir().ok()?;
let now = SystemTime::now();
let now: DateTime<Utc> = now.into();
let now = now.to_rfc3339().replace(':', "_");
let path = cwd.join(format!(".deno_lsp/log_{}.txt", now));
fs::create_dir_all(path.parent()?).ok()?;
fs::write(&path, "").ok()?;
Some(path)
};
let Some(path) = prepare_path() else {
LOG_FILE.enabled.store(false, Ordering::Relaxed);
LOG_FILE.buffer.lock().clear();
return;
};
thread::spawn(move || {
loop {
LOG_FILE.commit(&path);
thread::sleep(std::time::Duration::from_secs(1));
}
});
}
pub fn write_line_to_log_file(s: &str) {
LOG_FILE.write_line(s);
}
pub fn set_lsp_debug_flag(value: bool) {
LSP_DEBUG_FLAG.store(value, Ordering::SeqCst)
}
pub fn lsp_debug_enabled() -> bool {
LSP_DEBUG_FLAG.load(Ordering::SeqCst)
}
/// Change the lsp to log at the provided level.
pub fn set_lsp_log_level(level: log::Level) {
LSP_LOG_LEVEL.store(level as usize, Ordering::SeqCst)
}
pub fn lsp_log_level() -> log::Level {
let level = LSP_LOG_LEVEL.load(Ordering::SeqCst);
// TODO(bartlomieju):
#[allow(clippy::undocumented_unsafe_blocks)]
unsafe {
std::mem::transmute(level)
}
}
/// Change the lsp to warn at the provided level.
pub fn set_lsp_warn_level(level: log::Level) {
LSP_WARN_LEVEL.store(level as usize, Ordering::SeqCst)
}
pub fn lsp_warn_level() -> log::Level {
let level = LSP_LOG_LEVEL.load(Ordering::SeqCst);
// TODO(bartlomieju):
#[allow(clippy::undocumented_unsafe_blocks)]
unsafe {
std::mem::transmute(level)
}
}
/// Use this macro to do "info" logs in the lsp code. This allows
/// for downgrading these logs to another log level in the REPL.
macro_rules! lsp_log {
($($arg:tt)+) => (
let lsp_log_level = $crate::lsp::logging::lsp_log_level();
if lsp_log_level == log::Level::Debug {
$crate::lsp::logging::lsp_debug!($($arg)+)
} else {
let s = std::format!($($arg)+);
$crate::lsp::logging::write_line_to_log_file(&s);
log::log!(lsp_log_level, "{}", s)
}
)
}
/// Use this macro to do "warn" logs in the lsp code. This allows
/// for downgrading these logs to another log level in the REPL.
macro_rules! lsp_warn {
($($arg:tt)+) => (
{
let lsp_log_level = $crate::lsp::logging::lsp_warn_level();
if lsp_log_level == log::Level::Debug {
$crate::lsp::logging::lsp_debug!($($arg)+)
} else {
let s = std::format!($($arg)+);
$crate::lsp::logging::write_line_to_log_file(&s);
log::log!(lsp_log_level, "{}", s)
}
}
)
}
macro_rules! lsp_debug {
($($arg:tt)+) => (
{
let s = std::format!($($arg)+);
$crate::lsp::logging::write_line_to_log_file(&s);
if $crate::lsp::logging::lsp_debug_enabled() {
log::debug!("{}", s)
}
}
)
}
macro_rules! lsp_tracing_info_span {
($($arg:tt)*) => {{
#[cfg(feature = "lsp-tracing")]
{
::tracing::info_span!($($arg)*)
}
#[cfg(not(feature = "lsp-tracing"))]
{
$crate::lsp::trace::Span {}
}
}};
}
macro_rules! lsp_tracing_info {
($($arg:tt)*) => {
#[cfg(feature = "lsp-tracing")]
{
::tracing::info!($($arg)*);
}
#[cfg(not(feature = "lsp-tracing"))]
{}
};
}
pub(super) use lsp_debug;
pub(super) use lsp_log;
pub(super) use lsp_tracing_info;
pub(super) use lsp_tracing_info_span;
pub(super) use lsp_warn;
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/npm.rs | cli/lsp/npm.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::sync::Arc;
use dashmap::DashMap;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_npm::npm_rc::NpmRc;
use deno_npm::resolution::NpmVersionResolver;
use deno_semver::Version;
use deno_semver::package::PackageNv;
use once_cell::sync::Lazy;
use serde::Deserialize;
use super::search::PackageSearchApi;
use crate::file_fetcher::CliFileFetcher;
use crate::file_fetcher::TextDecodedFile;
use crate::npm::NpmFetchResolver;
use crate::sys::CliSys;
#[derive(Debug)]
pub struct CliNpmSearchApi {
file_fetcher: Arc<CliFileFetcher>,
resolver: NpmFetchResolver,
search_cache: DashMap<String, Arc<Vec<String>>>,
versions_cache: DashMap<String, Arc<Vec<Version>>>,
}
impl CliNpmSearchApi {
pub fn new(
file_fetcher: Arc<CliFileFetcher>,
npm_version_resolver: Arc<NpmVersionResolver>,
) -> Self {
let resolver = NpmFetchResolver::new(
file_fetcher.clone(),
Arc::new(NpmRc::default().as_resolved(npm_registry_url()).unwrap()),
npm_version_resolver,
);
Self {
file_fetcher,
resolver,
search_cache: Default::default(),
versions_cache: Default::default(),
}
}
pub fn clear_cache(&self) {
self.file_fetcher.clear_memory_files();
self.search_cache.clear();
self.versions_cache.clear();
}
}
#[async_trait::async_trait(?Send)]
impl PackageSearchApi for CliNpmSearchApi {
async fn search(&self, query: &str) -> Result<Arc<Vec<String>>, AnyError> {
if let Some(names) = self.search_cache.get(query) {
return Ok(names.clone());
}
let mut search_url = npm_registry_url().join("-/v1/search")?;
search_url
.query_pairs_mut()
.append_pair("text", &format!("{} boost-exact:false", query));
let file_fetcher = self.file_fetcher.clone();
let file = deno_core::unsync::spawn(async move {
let file = file_fetcher.fetch_bypass_permissions(&search_url).await?;
TextDecodedFile::decode(file)
})
.await??;
let names = Arc::new(parse_npm_search_response(&file.source)?);
self.search_cache.insert(query.to_string(), names.clone());
Ok(names)
}
async fn versions(&self, name: &str) -> Result<Arc<Vec<Version>>, AnyError> {
if let Some(versions) = self.versions_cache.get(name) {
return Ok(versions.clone());
}
let info = self
.resolver
.package_info(name)
.await
.ok_or_else(|| anyhow!("npm package info not found: {}", name))?;
let mut versions = self
.resolver
.applicable_version_infos(&info)
.map(|vi| &vi.version)
.cloned()
.collect::<Vec<_>>();
versions.sort();
versions.reverse();
let versions = Arc::new(versions);
self
.versions_cache
.insert(name.to_string(), versions.clone());
Ok(versions)
}
async fn exports(
&self,
_nv: &PackageNv,
) -> Result<Arc<Vec<String>>, AnyError> {
Ok(Default::default())
}
}
fn parse_npm_search_response(source: &str) -> Result<Vec<String>, AnyError> {
#[derive(Debug, Deserialize)]
struct Package {
name: String,
}
#[derive(Debug, Deserialize)]
struct Object {
package: Package,
}
#[derive(Debug, Deserialize)]
struct Response {
objects: Vec<Object>,
}
let objects = serde_json::from_str::<Response>(source)?.objects;
Ok(objects.into_iter().map(|o| o.package.name).collect())
}
// this is buried here because generally you want to use the ResolvedNpmRc instead of this.
fn npm_registry_url() -> &'static Url {
static NPM_REGISTRY_DEFAULT_URL: Lazy<Url> =
Lazy::new(|| deno_resolver::npmrc::npm_registry_url(&CliSys::default()));
&NPM_REGISTRY_DEFAULT_URL
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_npm_search_response() {
// This is a subset of a realistic response only containing data currently
// used by our parser. It's enough to catch regressions.
let names = parse_npm_search_response(r#"{"objects":[{"package":{"name":"puppeteer"}},{"package":{"name":"puppeteer-core"}},{"package":{"name":"puppeteer-extra-plugin-stealth"}},{"package":{"name":"puppeteer-extra-plugin"}}]}"#).unwrap();
assert_eq!(
names,
vec![
"puppeteer".to_string(),
"puppeteer-core".to_string(),
"puppeteer-extra-plugin-stealth".to_string(),
"puppeteer-extra-plugin".to_string()
]
);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/performance.rs | cli/lsp/performance.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::cmp;
use std::collections::HashMap;
use std::collections::VecDeque;
use std::fmt;
use std::sync::Arc;
use std::time::Duration;
use std::time::Instant;
use deno_core::parking_lot::Mutex;
use deno_core::serde::Deserialize;
use deno_core::serde::Serialize;
use deno_core::serde_json::json;
use super::logging::lsp_debug;
#[derive(Debug, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct PerformanceAverage {
pub name: String,
pub count: u32,
pub average_duration: u32,
}
impl PartialOrd for PerformanceAverage {
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for PerformanceAverage {
fn cmp(&self, other: &Self) -> cmp::Ordering {
self.name.cmp(&other.name)
}
}
/// A structure which serves as a start of a measurement span.
#[derive(Debug)]
pub struct PerformanceMark {
name: String,
count: u32,
start: Instant,
}
/// A structure which holds the information about the measured span.
#[derive(Debug, Clone)]
pub struct PerformanceMeasure {
pub name: String,
pub count: u32,
pub duration: Duration,
}
impl fmt::Display for PerformanceMeasure {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{} ({}ms)",
self.name,
self.duration.as_micros() as f64 / 1000.0
)
}
}
impl From<PerformanceMark> for PerformanceMeasure {
fn from(value: PerformanceMark) -> Self {
Self {
name: value.name,
count: value.count,
duration: value.start.elapsed(),
}
}
}
#[derive(Debug)]
pub struct PerformanceScopeMark {
performance: Arc<Performance>,
inner: Option<PerformanceMark>,
}
impl Drop for PerformanceScopeMark {
fn drop(&mut self) {
self
.performance
.0
.lock()
.measure(self.inner.take().unwrap());
}
}
#[derive(Debug)]
struct PerformanceInner {
counts: HashMap<String, u32>,
measurements_by_type: HashMap<String, (/* count */ u32, /* duration */ f64)>,
max_size: usize,
measures: VecDeque<PerformanceMeasure>,
}
impl PerformanceInner {
fn measure(&mut self, mark: PerformanceMark) -> Duration {
let measure = PerformanceMeasure::from(mark);
lsp_debug!(
"{},",
json!({
"type": "measure",
"name": measure.name,
"count": measure.count,
"duration": measure.duration.as_micros() as f64 / 1000.0,
})
);
let duration = measure.duration;
let measurement = self
.measurements_by_type
.entry(measure.name.to_string())
.or_insert((0, 0.0));
measurement.1 += duration.as_micros() as f64 / 1000.0;
self.measures.push_front(measure);
while self.measures.len() > self.max_size {
self.measures.pop_back();
}
duration
}
}
impl Default for PerformanceInner {
fn default() -> Self {
Self {
counts: Default::default(),
measurements_by_type: Default::default(),
max_size: 3_000,
measures: Default::default(),
}
}
}
/// A simple structure for marking a start of something to measure the duration
/// of and measuring that duration. Each measurement is identified by a string
/// name and a counter is incremented each time a new measurement is marked.
///
/// The structure will limit the size of measurements to the most recent 1000,
/// and will roll off when that limit is reached.
#[derive(Debug, Default)]
pub struct Performance(Mutex<PerformanceInner>);
impl Performance {
/// Return the count and average duration of a measurement identified by name.
#[cfg(test)]
pub fn average(&self, name: &str) -> Option<(usize, Duration)> {
let mut items = Vec::new();
for measure in self.0.lock().measures.iter() {
if measure.name == name {
items.push(measure.duration);
}
}
let len = items.len();
if len > 0 {
let average = items.into_iter().sum::<Duration>() / len as u32;
Some((len, average))
} else {
None
}
}
/// Return an iterator which provides the names, count, and average duration
/// of each measurement.
pub fn averages(&self) -> Vec<PerformanceAverage> {
let mut averages: HashMap<String, Vec<Duration>> = HashMap::new();
for measure in self.0.lock().measures.iter() {
averages
.entry(measure.name.clone())
.or_default()
.push(measure.duration);
}
averages
.into_iter()
.map(|(k, d)| {
let count = d.len() as u32;
let a = d.into_iter().sum::<Duration>() / count;
PerformanceAverage {
name: k,
count,
average_duration: a.as_millis() as u32,
}
})
.collect()
}
pub fn measurements_by_type(&self) -> Vec<(String, u32, f64)> {
self
.0
.lock()
.measurements_by_type
.iter()
.map(|(name, (count, duration))| (name.to_string(), *count, *duration))
.collect::<Vec<_>>()
}
pub fn averages_as_f64(&self) -> Vec<(String, u32, f64)> {
let mut averages: HashMap<String, Vec<Duration>> = HashMap::new();
for measure in self.0.lock().measures.iter() {
averages
.entry(measure.name.clone())
.or_default()
.push(measure.duration);
}
averages
.into_iter()
.map(|(k, d)| {
let count = d.len() as u32;
let a = d.into_iter().sum::<Duration>() / count;
(k, count, a.as_micros() as f64 / 1000.0)
})
.collect()
}
fn mark_inner<S: AsRef<str>, V: Serialize>(
&self,
name: S,
maybe_args: Option<V>,
) -> PerformanceMark {
let mut inner = self.0.lock();
let name = name.as_ref();
let count = *inner
.counts
.entry(name.to_string())
.and_modify(|c| *c += 1)
.or_insert(1);
inner
.measurements_by_type
.entry(name.to_string())
.and_modify(|(c, _)| *c += 1)
.or_insert((1, 0.0));
let msg = if let Some(args) = maybe_args {
json!({
"type": "mark",
"name": name,
"count": count,
"args": args,
})
} else {
json!({
"type": "mark",
"name": name,
})
};
lsp_debug!("{},", msg);
PerformanceMark {
name: name.to_string(),
count,
start: Instant::now(),
}
}
/// Marks the start of a measurement which returns a performance mark
/// structure, which is then passed to `.measure()` to finalize the duration
/// and add it to the internal buffer.
pub fn mark<S: AsRef<str>>(&self, name: S) -> PerformanceMark {
self.mark_inner(name, None::<()>)
}
/// Marks the start of a measurement which returns a performance mark
/// structure, which is then passed to `.measure()` to finalize the duration
/// and add it to the internal buffer.
pub fn mark_with_args<S: AsRef<str>, V: Serialize>(
&self,
name: S,
args: V,
) -> PerformanceMark {
self.mark_inner(name, Some(args))
}
/// Creates a performance mark which will be measured against on drop. Use
/// like this:
/// ```rust
/// let _mark = self.performance.measure_scope("foo");
/// ```
/// Don't use like this:
/// ```rust
/// // ❌
/// let _ = self.performance.measure_scope("foo");
/// ```
pub fn measure_scope<S: AsRef<str>>(
self: &Arc<Self>,
name: S,
) -> PerformanceScopeMark {
PerformanceScopeMark {
performance: self.clone(),
inner: Some(self.mark(name)),
}
}
/// A function which accepts a previously created performance mark which will
/// be used to finalize the duration of the span being measured, and add the
/// measurement to the internal buffer.
pub fn measure(&self, mark: PerformanceMark) -> Duration {
self.0.lock().measure(mark)
}
pub fn to_vec(&self) -> Vec<PerformanceMeasure> {
self.0.lock().measures.iter().cloned().collect()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_average() {
let performance = Performance::default();
let mark1 = performance.mark("a");
let mark2 = performance.mark("a");
let mark3 = performance.mark("b");
performance.measure(mark2);
performance.measure(mark1);
performance.measure(mark3);
let (count, _) = performance.average("a").expect("should have had value");
assert_eq!(count, 2);
let (count, _) = performance.average("b").expect("should have had value");
assert_eq!(count, 1);
assert!(performance.average("c").is_none());
}
#[test]
fn test_averages() {
let performance = Performance::default();
let mark1 = performance.mark("a");
let mark2 = performance.mark("a");
performance.measure(mark2);
performance.measure(mark1);
let averages = performance.averages();
assert_eq!(averages.len(), 1);
assert_eq!(averages[0].count, 2);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/cache.rs | cli/lsp/cache.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::BTreeMap;
use std::fs;
use std::path::Path;
use std::sync::Arc;
use std::time::SystemTime;
use deno_core::ModuleSpecifier;
use deno_core::url::Url;
use deno_path_util::url_to_file_path;
use crate::cache::DenoDir;
use crate::cache::GlobalHttpCache;
use crate::cache::HttpCache;
use crate::cache::LocalLspHttpCache;
use crate::lsp::config::Config;
use crate::lsp::logging::lsp_log;
use crate::lsp::logging::lsp_warn;
use crate::sys::CliSys;
/// Calculate a version for a given path.
pub fn calculate_fs_version_at_path(path: impl AsRef<Path>) -> Option<String> {
let metadata = fs::metadata(path).ok()?;
if let Ok(modified) = metadata.modified() {
if let Ok(n) = modified.duration_since(SystemTime::UNIX_EPOCH) {
Some(n.as_millis().to_string())
} else {
Some("1".to_string())
}
} else {
Some("1".to_string())
}
}
#[derive(Debug, Clone)]
pub struct LspCache {
deno_dir: DenoDir,
global: Arc<GlobalHttpCache>,
vendors_by_scope: BTreeMap<Arc<Url>, Option<Arc<LocalLspHttpCache>>>,
}
impl Default for LspCache {
fn default() -> Self {
Self::new(None)
}
}
impl LspCache {
pub fn new(global_cache_url: Option<Url>) -> Self {
let global_cache_path = global_cache_url.and_then(|s| {
url_to_file_path(&s)
.inspect(|p| {
lsp_log!("Resolved global cache path: \"{}\"", p.to_string_lossy());
})
.inspect_err(|err| {
lsp_warn!("Failed to resolve custom cache path: {err}");
})
.ok()
});
let sys = CliSys::default();
let deno_dir_root =
deno_cache_dir::resolve_deno_dir(&sys, global_cache_path)
.expect("should be infallible with absolute custom root");
let deno_dir = DenoDir::new(sys.clone(), deno_dir_root);
let global =
Arc::new(GlobalHttpCache::new(sys, deno_dir.remote_folder_path()));
Self {
deno_dir,
global,
vendors_by_scope: Default::default(),
}
}
pub fn update_config(&mut self, config: &Config) {
self.vendors_by_scope = config
.tree
.data_by_scope()
.iter()
.map(|(scope, config_data)| {
(
scope.clone(),
config_data.vendor_dir.as_ref().map(|v| {
Arc::new(LocalLspHttpCache::new(v.clone(), self.global.clone()))
}),
)
})
.collect();
}
pub fn deno_dir(&self) -> &DenoDir {
&self.deno_dir
}
pub fn global(&self) -> &Arc<GlobalHttpCache> {
&self.global
}
pub fn for_specifier(
&self,
file_referrer: Option<&ModuleSpecifier>,
) -> Arc<dyn HttpCache> {
let Some(file_referrer) = file_referrer else {
return self.global.clone();
};
self
.vendors_by_scope
.iter()
.rfind(|(s, _)| file_referrer.as_str().starts_with(s.as_str()))
.and_then(|(_, v)| v.clone().map(|v| v as _))
.unwrap_or(self.global.clone() as _)
}
pub fn vendored_specifier(
&self,
specifier: &ModuleSpecifier,
file_referrer: Option<&ModuleSpecifier>,
) -> Option<ModuleSpecifier> {
let file_referrer = file_referrer?;
if !matches!(specifier.scheme(), "http" | "https") {
return None;
}
let vendor = self
.vendors_by_scope
.iter()
.rfind(|(s, _)| file_referrer.as_str().starts_with(s.as_str()))?
.1
.as_ref()?;
vendor.get_file_url(specifier)
}
pub fn unvendored_specifier(
&self,
specifier: &ModuleSpecifier,
) -> Option<ModuleSpecifier> {
let path = url_to_file_path(specifier).ok()?;
let vendor = self
.vendors_by_scope
.iter()
.rfind(|(s, _)| specifier.as_str().starts_with(s.as_str()))?
.1
.as_ref()?;
vendor.get_remote_url(&path)
}
pub fn in_cache_directory(&self, specifier: &Url) -> bool {
let Ok(path) = url_to_file_path(specifier) else {
return false;
};
if path.starts_with(&self.deno_dir().root) {
return true;
}
let Some(vendor) = self
.vendors_by_scope
.iter()
.rfind(|(s, _)| specifier.as_str().starts_with(s.as_str()))
.and_then(|(_, c)| c.as_ref())
else {
return false;
};
vendor.get_remote_url(&path).is_some()
}
pub fn in_global_cache_directory(&self, specifier: &Url) -> bool {
let Ok(path) = url_to_file_path(specifier) else {
return false;
};
if path.starts_with(&self.deno_dir().root) {
return true;
}
false
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/jsr.rs | cli/lsp/jsr.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap;
use std::sync::Arc;
use dashmap::DashMap;
use deno_cache_dir::HttpCache;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_graph::ModuleSpecifier;
use deno_graph::packages::JsrPackageInfo;
use deno_graph::packages::JsrPackageInfoVersion;
use deno_graph::packages::JsrPackageVersionInfo;
use deno_graph::packages::JsrVersionResolver;
use deno_resolver::workspace::WorkspaceResolver;
use deno_semver::StackString;
use deno_semver::Version;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use serde::Deserialize;
use super::config::ConfigData;
use super::search::PackageSearchApi;
use crate::args::jsr_api_url;
use crate::args::jsr_url;
use crate::file_fetcher::CliFileFetcher;
use crate::file_fetcher::TextDecodedFile;
use crate::jsr::JsrFetchResolver;
use crate::jsr::partial_jsr_package_version_info_from_slice;
use crate::sys::CliSys;
#[derive(Debug)]
struct WorkspacePackage {
dir_url: Url,
version_info: Arc<JsrPackageVersionInfo>,
}
/// Keep in sync with `JsrFetchResolver`!
#[derive(Debug)]
pub struct JsrCacheResolver {
nv_by_req: DashMap<PackageReq, Option<PackageNv>>,
/// The `module_graph` fields of the version infos should be forcibly absent.
/// It can be large and we don't want to store it.
info_by_nv: DashMap<PackageNv, Option<Arc<JsrPackageVersionInfo>>>,
info_by_name: DashMap<StackString, Option<Arc<JsrPackageInfo>>>,
workspace_packages_by_name: HashMap<StackString, WorkspacePackage>,
cache: Arc<dyn HttpCache>,
}
impl JsrCacheResolver {
pub fn new(
cache: Arc<dyn HttpCache>,
config_data: Option<&ConfigData>,
workspace_resolver: &WorkspaceResolver<CliSys>,
) -> Self {
let nv_by_req = DashMap::new();
let info_by_nv = DashMap::new();
let info_by_name = DashMap::new();
let mut workspace_packages_by_name = HashMap::new();
for jsr_package in workspace_resolver.jsr_packages().iter() {
let exports = deno_core::serde_json::json!(&jsr_package.exports);
let version_info = Arc::new(JsrPackageVersionInfo {
exports: exports.clone(),
module_graph_1: None,
module_graph_2: None,
manifest: Default::default(),
lockfile_checksum: None,
});
let name = StackString::from_str(&jsr_package.name);
workspace_packages_by_name.insert(
name.clone(),
WorkspacePackage {
dir_url: jsr_package.base.clone(),
version_info: version_info.clone(),
},
);
let Some(version) = &jsr_package.version else {
continue;
};
let nv = PackageNv {
name,
version: version.clone(),
};
info_by_name.insert(
nv.name.clone(),
Some(Arc::new(JsrPackageInfo {
versions: [(
nv.version.clone(),
JsrPackageInfoVersion {
yanked: false,
created_at: None,
},
)]
.into_iter()
.collect(),
})),
);
info_by_nv.insert(nv.clone(), Some(version_info));
}
if let Some(lockfile) = config_data.and_then(|d| d.lockfile.as_ref()) {
for (dep_req, version) in &lockfile.lock().content.packages.specifiers {
let req = match dep_req.kind {
deno_semver::package::PackageKind::Jsr => &dep_req.req,
deno_semver::package::PackageKind::Npm => {
continue;
}
};
let Ok(version) = Version::parse_standard(version) else {
continue;
};
nv_by_req.insert(
req.clone(),
Some(PackageNv {
name: req.name.clone(),
version,
}),
);
}
}
Self {
nv_by_req,
info_by_nv,
info_by_name,
workspace_packages_by_name,
cache: cache.clone(),
}
}
pub fn req_to_nv(&self, req: &PackageReq) -> Option<PackageNv> {
if let Some(nv) = self.nv_by_req.get(req) {
return nv.value().clone();
}
let maybe_get_nv = || {
let name = &req.name;
let package_info = self.package_info(name)?;
// Find the first matching version of the package which is cached.
let mut versions = package_info.versions.keys().collect::<Vec<_>>();
versions.sort();
let version = versions
.into_iter()
.rev()
.find(|v| {
if req.version_req.tag().is_some() || !req.version_req.matches(v) {
return false;
}
let nv = PackageNv {
name: name.clone(),
version: (*v).clone(),
};
self.package_version_info(&nv).is_some()
})
.cloned()?;
Some(PackageNv {
name: name.clone(),
version,
})
};
let nv = maybe_get_nv();
self.nv_by_req.insert(req.clone(), nv.clone());
nv
}
pub fn jsr_to_resource_url(
&self,
req_ref: &JsrPackageReqReference,
) -> Option<ModuleSpecifier> {
let req = req_ref.req().clone();
let maybe_nv = self.req_to_nv(&req);
let nv = maybe_nv.as_ref()?;
let info = self.package_version_info(nv)?;
let path = info.export(&req_ref.export_name())?;
if let Some(workspace_package) =
self.workspace_packages_by_name.get(&nv.name)
{
workspace_package.dir_url.join(path).ok()
} else {
jsr_url()
.join(&format!("{}/{}/{}", &nv.name, &nv.version, &path))
.ok()
}
}
pub fn lookup_bare_specifier_for_workspace_file(
&self,
specifier: &Url,
) -> Option<String> {
if specifier.scheme() != "file" {
return None;
}
let (name, workspace_package) = self
.workspace_packages_by_name
.iter()
.filter(|(_, p)| specifier.as_str().starts_with(p.dir_url.as_str()))
.max_by_key(|(_, p)| p.dir_url.as_str().len())?;
let path = specifier
.as_str()
.strip_prefix(workspace_package.dir_url.as_str())?;
let export = Self::lookup_export_for_version_info(
&workspace_package.version_info,
path,
)?;
if export == "." {
Some(name.to_string())
} else {
Some(format!("{name}/{export}"))
}
}
pub fn lookup_export_for_path(
&self,
nv: &PackageNv,
path: &str,
) -> Option<String> {
let info = self.package_version_info(nv)?;
Self::lookup_export_for_version_info(&info, path)
}
fn lookup_export_for_version_info(
info: &JsrPackageVersionInfo,
path: &str,
) -> Option<String> {
let path = path.strip_prefix("./").unwrap_or(path);
let mut sloppy_fallback = None;
for (export, path_) in info.exports() {
let path_ = path_.strip_prefix("./").unwrap_or(path_);
if path_ == path {
return Some(export.strip_prefix("./").unwrap_or(export).to_string());
}
// TSC in some cases will suggest a `.js` import path for a `.d.ts` source
// file.
if sloppy_fallback.is_none() {
let path = path
.strip_suffix(".js")
.or_else(|| path.strip_suffix(".mjs"))
.or_else(|| path.strip_suffix(".cjs"))
.unwrap_or(path);
let path_ = path_
.strip_suffix(".d.ts")
.or_else(|| path_.strip_suffix(".d.mts"))
.or_else(|| path_.strip_suffix(".d.cts"))
.unwrap_or(path_);
if path_ == path {
sloppy_fallback =
Some(export.strip_prefix("./").unwrap_or(export).to_string());
}
}
}
sloppy_fallback
}
pub fn lookup_req_for_nv(&self, nv: &PackageNv) -> Option<PackageReq> {
for entry in self.nv_by_req.iter() {
let Some(nv_) = entry.value() else {
continue;
};
if nv_ == nv {
return Some(entry.key().clone());
}
}
None
}
pub fn package_info(
&self,
name: &StackString,
) -> Option<Arc<JsrPackageInfo>> {
if let Some(info) = self.info_by_name.get(name) {
return info.value().clone();
}
let read_cached_package_info = || {
let meta_url = jsr_url().join(&format!("{}/meta.json", name)).ok()?;
let meta_bytes = read_cached_url(&meta_url, &self.cache)?;
serde_json::from_slice::<JsrPackageInfo>(&meta_bytes).ok()
};
let info = read_cached_package_info().map(Arc::new);
self.info_by_name.insert(name.clone(), info.clone());
info
}
pub fn package_version_info(
&self,
nv: &PackageNv,
) -> Option<Arc<JsrPackageVersionInfo>> {
if let Some(info) = self.info_by_nv.get(nv) {
return info.value().clone();
}
let read_cached_package_version_info = || {
let meta_url = jsr_url()
.join(&format!("{}/{}_meta.json", &nv.name, &nv.version))
.ok()?;
let meta_bytes = read_cached_url(&meta_url, &self.cache)?;
partial_jsr_package_version_info_from_slice(&meta_bytes).ok()
};
let info = read_cached_package_version_info().map(Arc::new);
self.info_by_nv.insert(nv.clone(), info.clone());
info
}
pub fn did_cache(&self) {
self.nv_by_req.retain(|_, nv| nv.is_some());
self.info_by_nv.retain(|_, info| info.is_some());
self.info_by_name.retain(|_, info| info.is_some());
}
}
fn read_cached_url(
url: &ModuleSpecifier,
cache: &Arc<dyn HttpCache>,
) -> Option<Vec<u8>> {
cache
.get(&cache.cache_item_key(url).ok()?, None)
.ok()?
.map(|f| f.content.into_owned())
}
#[derive(Debug)]
pub struct CliJsrSearchApi {
file_fetcher: Arc<CliFileFetcher>,
resolver: JsrFetchResolver,
search_cache: DashMap<String, Arc<Vec<String>>>,
versions_cache: DashMap<String, Arc<Vec<Version>>>,
exports_cache: DashMap<PackageNv, Arc<Vec<String>>>,
}
impl CliJsrSearchApi {
pub fn new(file_fetcher: Arc<CliFileFetcher>) -> Self {
let resolver = JsrFetchResolver::new(
file_fetcher.clone(),
Arc::new(JsrVersionResolver {
// not currently supported in the lsp
newest_dependency_date_options: Default::default(),
}),
);
Self {
file_fetcher,
resolver,
search_cache: Default::default(),
versions_cache: Default::default(),
exports_cache: Default::default(),
}
}
pub fn get_resolver(&self) -> &JsrFetchResolver {
&self.resolver
}
pub fn clear_cache(&self) {
self.file_fetcher.clear_memory_files();
self.search_cache.clear();
self.versions_cache.clear();
self.exports_cache.clear();
}
}
#[async_trait::async_trait(?Send)]
impl PackageSearchApi for CliJsrSearchApi {
async fn search(&self, query: &str) -> Result<Arc<Vec<String>>, AnyError> {
if let Some(names) = self.search_cache.get(query) {
return Ok(names.clone());
}
let mut search_url = jsr_api_url().join("packages")?;
search_url.query_pairs_mut().append_pair("query", query);
let file_fetcher = self.file_fetcher.clone();
let file = {
let file = file_fetcher.fetch_bypass_permissions(&search_url).await?;
TextDecodedFile::decode(file)?
};
let names = Arc::new(parse_jsr_search_response(&file.source)?);
self.search_cache.insert(query.to_string(), names.clone());
Ok(names)
}
async fn versions(&self, name: &str) -> Result<Arc<Vec<Version>>, AnyError> {
if let Some(versions) = self.versions_cache.get(name) {
return Ok(versions.clone());
}
let info = self
.resolver
.package_info(name)
.await
.ok_or_else(|| anyhow!("JSR package info not found: {}", name))?;
let mut versions = info.versions.keys().cloned().collect::<Vec<_>>();
versions.sort();
versions.reverse();
let versions = Arc::new(versions);
self
.versions_cache
.insert(name.to_string(), versions.clone());
Ok(versions)
}
async fn exports(
&self,
nv: &PackageNv,
) -> Result<Arc<Vec<String>>, AnyError> {
if let Some(exports) = self.exports_cache.get(nv) {
return Ok(exports.clone());
}
let info = self
.resolver
.package_version_info(nv)
.await
.ok_or_else(|| anyhow!("JSR package version info not found: {}", nv))?;
let mut exports = info
.exports()
.map(|(n, _)| n.to_string())
.collect::<Vec<_>>();
exports.sort();
let exports = Arc::new(exports);
self.exports_cache.insert(nv.clone(), exports.clone());
Ok(exports)
}
}
fn parse_jsr_search_response(source: &str) -> Result<Vec<String>, AnyError> {
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct Item {
scope: String,
name: String,
version_count: usize,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct Response {
items: Vec<Item>,
}
let items = serde_json::from_str::<Response>(source)?.items;
Ok(
items
.into_iter()
.filter(|i| i.version_count > 0)
.map(|i| format!("@{}/{}", i.scope, i.name))
.collect(),
)
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/tsc.rs | cli/lsp/tsc.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::cell::RefCell;
use std::cmp;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::HashSet;
use std::convert::Infallible;
use std::ffi::c_void;
use std::net::SocketAddr;
use std::ops::Range;
use std::path::Path;
use std::rc::Rc;
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
use std::thread;
use dashmap::DashMap;
use deno_ast::MediaType;
use deno_config::deno_json::CompilerOptions;
use deno_core::JsRuntime;
use deno_core::ModuleSpecifier;
use deno_core::OpState;
use deno_core::PollEventLoopOptions;
use deno_core::RuntimeOptions;
use deno_core::anyhow::anyhow;
use deno_core::convert::Smi;
use deno_core::convert::ToV8;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_core::op2;
use deno_core::parking_lot::Mutex;
use deno_core::resolve_url;
use deno_core::serde::Deserialize;
use deno_core::serde::Serialize;
use deno_core::serde::de;
use deno_core::serde_json;
use deno_core::serde_json::Value;
use deno_core::serde_json::json;
use deno_core::serde_v8;
use deno_core::url::Url;
use deno_core::v8;
use deno_graph::source::Resolver;
use deno_lib::util::result::InfallibleResultExt;
use deno_lib::worker::create_isolate_create_params;
use deno_path_util::url_to_file_path;
use deno_resolver::deno_json::CompilerOptionsKey;
use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES;
use deno_runtime::inspector_server::InspectorServer;
use deno_runtime::tokio_util::create_basic_runtime;
use indexmap::IndexMap;
use indexmap::IndexSet;
use lazy_regex::lazy_regex;
use lsp_types::Uri;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use node_resolver::cache::NodeResolutionThreadLocalCache;
use once_cell::sync::Lazy;
use regex::Captures;
use regex::Regex;
use serde_repr::Deserialize_repr;
use serde_repr::Serialize_repr;
use text_size::TextRange;
use text_size::TextSize;
use tokio::sync::mpsc;
use tokio::sync::mpsc::UnboundedReceiver;
use tokio::sync::oneshot;
use tokio_util::sync::CancellationToken;
use tower_lsp::jsonrpc::Error as LspError;
use tower_lsp::jsonrpc::Result as LspResult;
use tower_lsp::lsp_types as lsp;
use super::code_lens;
use super::code_lens::CodeLensData;
use super::config;
use super::documents::DocumentModule;
use super::documents::DocumentText;
use super::language_server;
use super::language_server::StateSnapshot;
use super::logging::lsp_log;
use super::performance::Performance;
use super::performance::PerformanceMark;
use super::refactor::ALL_KNOWN_REFACTOR_ACTION_KINDS;
use super::refactor::EXTRACT_CONSTANT;
use super::refactor::EXTRACT_INTERFACE;
use super::refactor::EXTRACT_TYPE;
use super::refactor::RefactorCodeActionData;
use super::semantic_tokens;
use super::semantic_tokens::SemanticTokensBuilder;
use super::text::LineIndex;
use super::urls::uri_to_url;
use super::urls::url_to_uri;
use crate::args::FmtOptionsConfig;
use crate::args::jsr_url;
use crate::lsp::documents::Document;
use crate::lsp::logging::lsp_warn;
use crate::lsp::resolver::SingleReferrerGraphResolver;
use crate::tsc::MISSING_DEPENDENCY_SPECIFIER;
use crate::tsc::ResolveArgs;
use crate::util::path::relative_specifier;
use crate::util::path::to_percent_decoded_str;
use crate::util::v8::convert;
static BRACKET_ACCESSOR_RE: Lazy<Regex> =
lazy_regex!(r#"^\[['"](.+)[\['"]\]$"#);
static CAPTION_RE: Lazy<Regex> =
lazy_regex!(r"<caption>(.*?)</caption>\s*\r?\n((?:\s|\S)*)");
static CODEBLOCK_RE: Lazy<Regex> = lazy_regex!(r"^\s*[~`]{3}"m);
static EMAIL_MATCH_RE: Lazy<Regex> = lazy_regex!(r"(.+)\s<([-.\w]+@[-.\w]+)>");
static HTTP_RE: Lazy<Regex> = lazy_regex!(r#"(?i)^https?:"#);
static JSDOC_LINKS_RE: Lazy<Regex> = lazy_regex!(
r"(?i)\{@(link|linkplain|linkcode) (https?://[^ |}]+?)(?:[| ]([^{}\n]+?))?\}"
);
static PART_KIND_MODIFIER_RE: Lazy<Regex> = lazy_regex!(r",|\s+");
static PART_RE: Lazy<Regex> = lazy_regex!(r"^(\S+)\s*-?\s*");
static SCOPE_RE: Lazy<Regex> = lazy_regex!(r"scope_(\d)");
const FILE_EXTENSION_KIND_MODIFIERS: &[&str] =
&[".d.ts", ".ts", ".tsx", ".js", ".jsx", ".json"];
type Request = (
TscRequest,
CompilerOptionsKey,
Option<Arc<Url>>,
Option<Arc<Uri>>,
Arc<StateSnapshot>,
oneshot::Sender<Result<String, AnyError>>,
CancellationToken,
Option<PendingChange>,
Option<super::trace::Context>,
);
#[derive(Debug, Clone, Copy, Serialize_repr)]
#[repr(u8)]
pub enum IndentStyle {
#[allow(dead_code)]
None = 0,
Block = 1,
#[allow(dead_code)]
Smart = 2,
}
/// Relevant subset of https://github.com/denoland/deno/blob/v1.37.1/cli/tsc/dts/typescript.d.ts#L6658.
#[derive(Clone, Debug, Default, Serialize)]
#[serde(rename_all = "camelCase")]
struct FormatCodeSettings {
base_indent_size: Option<u8>,
indent_size: Option<u8>,
tab_size: Option<u8>,
new_line_character: Option<String>,
convert_tabs_to_spaces: Option<bool>,
indent_style: Option<IndentStyle>,
trim_trailing_whitespace: Option<bool>,
insert_space_after_comma_delimiter: Option<bool>,
insert_space_after_semicolon_in_for_statements: Option<bool>,
insert_space_before_and_after_binary_operators: Option<bool>,
insert_space_after_constructor: Option<bool>,
insert_space_after_keywords_in_control_flow_statements: Option<bool>,
insert_space_after_function_keyword_for_anonymous_functions: Option<bool>,
insert_space_after_opening_and_before_closing_nonempty_parenthesis:
Option<bool>,
insert_space_after_opening_and_before_closing_nonempty_brackets: Option<bool>,
insert_space_after_opening_and_before_closing_nonempty_braces: Option<bool>,
insert_space_after_opening_and_before_closing_template_string_braces:
Option<bool>,
insert_space_after_opening_and_before_closing_jsx_expression_braces:
Option<bool>,
insert_space_after_type_assertion: Option<bool>,
insert_space_before_function_parenthesis: Option<bool>,
place_open_brace_on_new_line_for_functions: Option<bool>,
place_open_brace_on_new_line_for_control_blocks: Option<bool>,
insert_space_before_type_annotation: Option<bool>,
indent_multi_line_object_literal_beginning_on_blank_line: Option<bool>,
semicolons: Option<SemicolonPreference>,
indent_switch_case: Option<bool>,
}
impl From<&FmtOptionsConfig> for FormatCodeSettings {
fn from(config: &FmtOptionsConfig) -> Self {
FormatCodeSettings {
base_indent_size: Some(0),
indent_size: Some(config.indent_width.unwrap_or(2)),
tab_size: Some(config.indent_width.unwrap_or(2)),
new_line_character: Some("\n".to_string()),
convert_tabs_to_spaces: Some(!config.use_tabs.unwrap_or(false)),
indent_style: Some(IndentStyle::Block),
trim_trailing_whitespace: Some(false),
insert_space_after_comma_delimiter: Some(true),
insert_space_after_semicolon_in_for_statements: Some(true),
insert_space_before_and_after_binary_operators: Some(true),
insert_space_after_constructor: Some(false),
insert_space_after_keywords_in_control_flow_statements: Some(true),
insert_space_after_function_keyword_for_anonymous_functions: Some(true),
insert_space_after_opening_and_before_closing_nonempty_parenthesis: Some(
false,
),
insert_space_after_opening_and_before_closing_nonempty_brackets: Some(
false,
),
insert_space_after_opening_and_before_closing_nonempty_braces: Some(true),
insert_space_after_opening_and_before_closing_template_string_braces:
Some(false),
insert_space_after_opening_and_before_closing_jsx_expression_braces: Some(
false,
),
insert_space_after_type_assertion: Some(false),
insert_space_before_function_parenthesis: Some(false),
place_open_brace_on_new_line_for_functions: Some(false),
place_open_brace_on_new_line_for_control_blocks: Some(false),
insert_space_before_type_annotation: Some(false),
indent_multi_line_object_literal_beginning_on_blank_line: Some(false),
semicolons: match config.semi_colons {
Some(false) => Some(SemicolonPreference::Remove),
_ => Some(SemicolonPreference::Insert),
},
indent_switch_case: Some(true),
}
}
}
#[derive(Clone, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum SemicolonPreference {
Insert,
Remove,
}
// Allow due to false positive https://github.com/rust-lang/rust-clippy/issues/13170
#[allow(clippy::needless_borrows_for_generic_args)]
fn normalize_diagnostic(
diagnostic: &mut crate::tsc::Diagnostic,
specifier_map: &TscSpecifierMap,
) -> Result<(), AnyError> {
if let Some(file_name) = &mut diagnostic.file_name {
*file_name = specifier_map.normalize(&file_name)?.to_string();
}
for ri in diagnostic.related_information.iter_mut().flatten() {
normalize_diagnostic(ri, specifier_map)?;
}
Ok(())
}
pub struct TsServer {
performance: Arc<Performance>,
sender: mpsc::UnboundedSender<Request>,
receiver: Mutex<Option<mpsc::UnboundedReceiver<Request>>>,
pub specifier_map: Arc<TscSpecifierMap>,
inspector_server_addr: Mutex<Option<String>>,
inspector_server: Mutex<Option<Arc<InspectorServer>>>,
pending_change: Mutex<Option<PendingChange>>,
enable_tracing: Arc<AtomicBool>,
start_once: std::sync::Once,
}
impl std::fmt::Debug for TsServer {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("TsServer")
.field("performance", &self.performance)
.field("sender", &self.sender)
.field("receiver", &self.receiver)
.field("specifier_map", &self.specifier_map)
.field("inspector_server_addr", &self.inspector_server_addr.lock())
.field("inspector_server", &self.inspector_server.lock().is_some())
.field("start_once", &self.start_once)
.finish()
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
#[repr(u8)]
pub enum ChangeKind {
Opened = 0,
Modified = 1,
Closed = 2,
}
impl<'a> ToV8<'a> for ChangeKind {
type Error = Infallible;
fn to_v8(
self,
scope: &mut v8::PinScope<'a, '_>,
) -> Result<v8::Local<'a, v8::Value>, Self::Error> {
Smi(self as u8).to_v8(scope)
}
}
impl Serialize for ChangeKind {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_i32(*self as i32)
}
}
#[derive(Debug)]
#[cfg_attr(test, derive(Serialize))]
pub struct PendingChange {
pub modified_scripts: Vec<(String, ChangeKind)>,
pub project_version: usize,
pub new_compiler_options_by_key:
Option<BTreeMap<CompilerOptionsKey, Arc<CompilerOptions>>>,
pub new_notebook_keys: Option<BTreeMap<Arc<Uri>, CompilerOptionsKey>>,
}
impl<'a> ToV8<'a> for PendingChange {
type Error = Infallible;
fn to_v8(
self,
scope: &mut v8::PinScope<'a, '_>,
) -> Result<v8::Local<'a, v8::Value>, Self::Error> {
let modified_scripts = {
let mut modified_scripts_v8 =
Vec::with_capacity(self.modified_scripts.len());
for (specifier, kind) in &self.modified_scripts {
let specifier = v8::String::new(scope, specifier).unwrap().into();
let kind = kind.to_v8(scope).unwrap_infallible();
let pair =
v8::Array::new_with_elements(scope, &[specifier, kind]).into();
modified_scripts_v8.push(pair);
}
v8::Array::new_with_elements(scope, &modified_scripts_v8).into()
};
let project_version =
v8::Integer::new_from_unsigned(scope, self.project_version as u32).into();
let new_compiler_options_by_key = if let Some(new_compiler_options_by_key) =
self.new_compiler_options_by_key
{
serde_v8::to_v8(
scope,
new_compiler_options_by_key.into_iter().collect::<Vec<_>>(),
)
.unwrap_or_else(|err| {
lsp_warn!("Couldn't serialize ts configs: {err}");
v8::null(scope).into()
})
} else {
v8::null(scope).into()
};
let new_notebook_keys = if let Some(new_notebook_keys) =
self.new_notebook_keys
{
serde_v8::to_v8(scope, new_notebook_keys.into_iter().collect::<Vec<_>>())
.unwrap_or_else(|err| {
lsp_warn!("Couldn't serialize ts configs: {err}");
v8::null(scope).into()
})
} else {
v8::null(scope).into()
};
Ok(
v8::Array::new_with_elements(
scope,
&[
modified_scripts,
project_version,
new_compiler_options_by_key,
new_notebook_keys,
],
)
.into(),
)
}
}
impl PendingChange {
fn coalesce(
&mut self,
new_version: usize,
modified_scripts: Vec<(String, ChangeKind)>,
new_compiler_options_by_key: Option<
BTreeMap<CompilerOptionsKey, Arc<CompilerOptions>>,
>,
new_notebook_keys: Option<BTreeMap<Arc<Uri>, CompilerOptionsKey>>,
) {
use ChangeKind::*;
self.project_version = self.project_version.max(new_version);
if let Some(new_compiler_options_by_key) = new_compiler_options_by_key {
self.new_compiler_options_by_key = Some(new_compiler_options_by_key);
}
if let Some(new_notebook_keys) = new_notebook_keys {
self.new_notebook_keys = Some(new_notebook_keys);
}
for (spec, new) in modified_scripts {
if let Some((_, current)) =
self.modified_scripts.iter_mut().find(|(s, _)| s == &spec)
{
// already a pending change for this specifier,
// coalesce the change kinds
match (*current, new) {
(_, Closed) => {
*current = Closed;
}
(Opened | Closed, Opened) => {
*current = Opened;
}
(Modified, Opened) => {
lsp_warn!("Unexpected change from Modified -> Opened");
*current = Opened;
}
(Opened, Modified) => {
// Opening may change the set of files in the project
*current = Opened;
}
(Closed, Modified) => {
lsp_warn!("Unexpected change from Closed -> Modifed");
// Shouldn't happen, but if it does treat it as closed
// since it's "stronger" than modifying an open doc
*current = Closed;
}
(Modified, Modified) => {
// no change
}
}
} else {
self.modified_scripts.push((spec, new));
}
}
}
}
impl TsServer {
pub fn new(performance: Arc<Performance>) -> Self {
let (tx, request_rx) = mpsc::unbounded_channel::<Request>();
Self {
performance,
sender: tx,
receiver: Mutex::new(Some(request_rx)),
specifier_map: Arc::new(TscSpecifierMap::new()),
inspector_server_addr: Mutex::new(None),
inspector_server: Mutex::new(None),
pending_change: Mutex::new(None),
enable_tracing: Default::default(),
start_once: std::sync::Once::new(),
}
}
pub fn set_tracing_enabled(&self, enabled: bool) {
self
.enable_tracing
.store(enabled, std::sync::atomic::Ordering::Relaxed);
}
/// This should be called before `self.ensure_started()`.
pub fn set_inspector_server_addr(&self, addr: Option<String>) {
*self.inspector_server_addr.lock() = addr;
}
pub fn ensure_started(&self) {
self.start_once.call_once(|| {
let maybe_inspector_server = self
.inspector_server_addr
.lock()
.as_ref()
.and_then(|addr| {
addr
.parse::<SocketAddr>()
.inspect_err(|err| {
lsp_warn!("Invalid inspector server address: {:#}", err);
})
.ok()
})
.map(|addr| {
Arc::new(InspectorServer::new(addr, "deno-lsp-tsc").unwrap())
});
self
.inspector_server
.lock()
.clone_from(&maybe_inspector_server);
// TODO(bartlomieju): why is the join_handle ignored here? Should we store it
// on the `TsServer` struct.
let receiver = self.receiver.lock().take().unwrap();
let performance = self.performance.clone();
let specifier_map = self.specifier_map.clone();
let enable_tracing = self.enable_tracing.clone();
let _join_handle = thread::spawn(move || {
run_tsc_thread(
receiver,
performance,
specifier_map,
maybe_inspector_server,
enable_tracing,
)
});
lsp_log!("TS server started.");
});
}
pub fn is_started(&self) -> bool {
self.start_once.is_completed()
}
pub fn project_changed(
&self,
snapshot: Arc<StateSnapshot>,
documents: &[(Document, ChangeKind)],
new_compiler_options_by_key: Option<
BTreeMap<CompilerOptionsKey, Arc<CompilerOptions>>,
>,
new_notebook_keys: Option<BTreeMap<Arc<Uri>, CompilerOptionsKey>>,
) {
let modified_scripts = documents
.iter()
.filter_map(|(document, change_kind)| {
let (specifier, media_type) =
snapshot.document_modules.primary_specifier(document)?;
let specifier = self.specifier_map.denormalize(&specifier, media_type);
Some((specifier, *change_kind))
})
.collect::<Vec<_>>();
match &mut *self.pending_change.lock() {
Some(pending_change) => {
pending_change.coalesce(
snapshot.project_version,
modified_scripts,
new_compiler_options_by_key,
new_notebook_keys,
);
}
pending => {
let pending_change = PendingChange {
modified_scripts,
project_version: snapshot.project_version,
new_compiler_options_by_key,
new_notebook_keys,
};
*pending = Some(pending_change);
}
}
}
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub async fn get_diagnostics(
&self,
snapshot: Arc<StateSnapshot>,
module: &DocumentModule,
token: &CancellationToken,
) -> Result<Vec<crate::tsc::Diagnostic>, AnyError> {
let req = TscRequest::GetDiagnostics((
self
.specifier_map
.denormalize(&module.specifier, module.media_type),
snapshot.project_version,
));
self
.request::<Vec<crate::tsc::Diagnostic>>(
snapshot,
req,
&module.compiler_options_key,
module.scope.as_ref(),
module.notebook_uri.as_ref(),
token,
)
.await
.and_then(|mut diagnostics| {
for diagnostic in &mut diagnostics {
if token.is_cancelled() {
return Err(anyhow!("request cancelled"));
}
normalize_diagnostic(diagnostic, &self.specifier_map)?;
}
Ok(diagnostics)
})
}
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub async fn get_ambient_modules(
&self,
snapshot: Arc<StateSnapshot>,
compiler_options_key: &CompilerOptionsKey,
notebook_uri: Option<&Arc<Uri>>,
token: &CancellationToken,
) -> Result<Vec<String>, AnyError> {
let req = TscRequest::GetAmbientModules;
self
.request::<Vec<String>>(
snapshot.clone(),
req,
compiler_options_key,
None,
notebook_uri,
token,
)
.await
}
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub async fn cleanup_semantic_cache(&self, snapshot: Arc<StateSnapshot>) {
if !self.is_started() {
return;
}
let req = TscRequest::CleanupSemanticCache;
self
.request::<()>(
snapshot.clone(),
req,
&Default::default(),
None,
None,
&Default::default(),
)
.await
.map_err(|err| {
log::error!("Failed to request to tsserver {}", err);
LspError::invalid_request()
})
.ok();
}
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub async fn find_references(
&self,
snapshot: Arc<StateSnapshot>,
module: &DocumentModule,
position: u32,
token: &CancellationToken,
) -> Result<Option<Vec<ReferencedSymbol>>, AnyError> {
let req = TscRequest::FindReferences((
self
.specifier_map
.denormalize(&module.specifier, module.media_type),
position,
));
self
.request::<Option<Vec<ReferencedSymbol>>>(
snapshot,
req,
&module.compiler_options_key,
module.scope.as_ref(),
module.notebook_uri.as_ref(),
token,
)
.await
.and_then(|mut symbols| {
for symbol in symbols.iter_mut().flatten() {
if token.is_cancelled() {
return Err(anyhow!("request cancelled"));
}
symbol.normalize(&self.specifier_map)?;
}
Ok(symbols)
})
}
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub async fn get_navigation_tree(
&self,
snapshot: Arc<StateSnapshot>,
module: &DocumentModule,
token: &CancellationToken,
) -> Result<NavigationTree, AnyError> {
let req = TscRequest::GetNavigationTree((self
.specifier_map
.denormalize(&module.specifier, module.media_type),));
self
.request(
snapshot,
req,
&module.compiler_options_key,
module.scope.as_ref(),
module.notebook_uri.as_ref(),
token,
)
.await
}
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub async fn get_supported_code_fixes(
&self,
snapshot: Arc<StateSnapshot>,
) -> Result<Vec<String>, LspError> {
let req = TscRequest::GetSupportedCodeFixes;
self
.request(
snapshot,
req,
&Default::default(),
None,
None,
&Default::default(),
)
.await
.map_err(|err| {
log::error!("Unable to get fixable diagnostics: {}", err);
LspError::internal_error()
})
}
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub async fn get_quick_info(
&self,
snapshot: Arc<StateSnapshot>,
module: &DocumentModule,
position: u32,
token: &CancellationToken,
) -> Result<Option<QuickInfo>, AnyError> {
let req = TscRequest::GetQuickInfoAtPosition((
self
.specifier_map
.denormalize(&module.specifier, module.media_type),
position,
));
self
.request(
snapshot,
req,
&module.compiler_options_key,
module.scope.as_ref(),
module.notebook_uri.as_ref(),
token,
)
.await
}
#[allow(clippy::too_many_arguments)]
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub async fn get_code_fixes(
&self,
snapshot: Arc<StateSnapshot>,
module: &DocumentModule,
range: Range<u32>,
codes: Vec<i32>,
token: &CancellationToken,
) -> Result<Vec<CodeFixAction>, AnyError> {
let req = TscRequest::GetCodeFixesAtPosition(Box::new((
self
.specifier_map
.denormalize(&module.specifier, module.media_type),
range.start,
range.end,
codes,
(&snapshot
.config
.tree
.fmt_config_for_specifier(&module.specifier)
.options)
.into(),
UserPreferences::from_config_for_specifier(
&snapshot.config,
&module.specifier,
),
)));
self
.request::<Vec<CodeFixAction>>(
snapshot,
req,
&module.compiler_options_key,
module.scope.as_ref(),
module.notebook_uri.as_ref(),
token,
)
.await
.and_then(|mut actions| {
for action in &mut actions {
action.normalize(&self.specifier_map, token)?;
}
Ok(actions)
})
}
#[allow(clippy::too_many_arguments)]
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub async fn get_applicable_refactors(
&self,
snapshot: Arc<StateSnapshot>,
module: &DocumentModule,
range: Range<u32>,
trigger_kind: Option<lsp::CodeActionTriggerKind>,
only: String,
token: &CancellationToken,
) -> Result<Vec<ApplicableRefactorInfo>, AnyError> {
let trigger_kind = trigger_kind.map(|reason| match reason {
lsp::CodeActionTriggerKind::INVOKED => "invoked",
lsp::CodeActionTriggerKind::AUTOMATIC => "implicit",
_ => unreachable!(),
});
let req = TscRequest::GetApplicableRefactors(Box::new((
self
.specifier_map
.denormalize(&module.specifier, module.media_type),
range.into(),
UserPreferences::from_config_for_specifier(
&snapshot.config,
&module.specifier,
),
trigger_kind,
only,
)));
self
.request(
snapshot,
req,
&module.compiler_options_key,
module.scope.as_ref(),
module.notebook_uri.as_ref(),
token,
)
.await
}
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
#[allow(clippy::too_many_arguments)]
pub async fn get_combined_code_fix(
&self,
snapshot: Arc<StateSnapshot>,
module: &DocumentModule,
fix_id: &str,
token: &CancellationToken,
) -> Result<CombinedCodeActions, AnyError> {
let req = TscRequest::GetCombinedCodeFix(Box::new((
CombinedCodeFixScope {
r#type: "file",
file_name: self
.specifier_map
.denormalize(&module.specifier, module.media_type),
},
fix_id.to_string(),
(&snapshot
.config
.tree
.fmt_config_for_specifier(&module.specifier)
.options)
.into(),
UserPreferences::from_config_for_specifier(
&snapshot.config,
&module.specifier,
),
)));
self
.request::<CombinedCodeActions>(
snapshot,
req,
&module.compiler_options_key,
module.scope.as_ref(),
module.notebook_uri.as_ref(),
token,
)
.await
.and_then(|mut actions| {
actions.normalize(&self.specifier_map)?;
Ok(actions)
})
}
#[allow(clippy::too_many_arguments)]
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub async fn get_edits_for_refactor(
&self,
snapshot: Arc<StateSnapshot>,
module: &DocumentModule,
range: Range<u32>,
refactor_name: String,
action_name: String,
token: &CancellationToken,
) -> Result<RefactorEditInfo, AnyError> {
let req = TscRequest::GetEditsForRefactor(Box::new((
self
.specifier_map
.denormalize(&module.specifier, module.media_type),
(&snapshot
.config
.tree
.fmt_config_for_specifier(&module.specifier)
.options)
.into(),
range.into(),
refactor_name,
action_name,
Some(UserPreferences::from_config_for_specifier(
&snapshot.config,
&module.specifier,
)),
)));
self
.request::<RefactorEditInfo>(
snapshot,
req,
&module.compiler_options_key,
module.scope.as_ref(),
module.notebook_uri.as_ref(),
token,
)
.await
.and_then(|mut info| {
info.normalize(&self.specifier_map)?;
Ok(info)
})
}
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
#[allow(clippy::too_many_arguments)]
pub async fn get_edits_for_file_rename(
&self,
snapshot: Arc<StateSnapshot>,
module: &DocumentModule,
new_specifier: &Url,
token: &CancellationToken,
) -> Result<Vec<FileTextChanges>, AnyError> {
let req = TscRequest::GetEditsForFileRename(Box::new((
self
.specifier_map
.denormalize(&module.specifier, module.media_type),
self
.specifier_map
.denormalize(new_specifier, module.media_type),
(&snapshot
.config
.tree
.fmt_config_for_specifier(&module.specifier)
.options)
.into(),
UserPreferences::from_config_for_specifier(
&snapshot.config,
&module.specifier,
),
)));
self
.request::<Vec<FileTextChanges>>(
snapshot,
req,
&module.compiler_options_key,
module.scope.as_ref(),
module.notebook_uri.as_ref(),
token,
)
.await
.and_then(|mut changes| {
for changes in &mut changes {
changes.normalize(&self.specifier_map)?;
for text_changes in &mut changes.text_changes {
if token.is_cancelled() {
return Err(anyhow!("request cancelled"));
}
text_changes.new_text =
to_percent_decoded_str(&text_changes.new_text);
}
}
Ok(changes)
})
}
#[allow(clippy::too_many_arguments)]
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub async fn get_document_highlights(
&self,
snapshot: Arc<StateSnapshot>,
module: &DocumentModule,
position: u32,
token: &CancellationToken,
) -> Result<Option<Vec<DocumentHighlights>>, AnyError> {
let denormalized_specifier = self
.specifier_map
.denormalize(&module.specifier, module.media_type);
let req = TscRequest::GetDocumentHighlights(Box::new((
denormalized_specifier.clone(),
position,
vec![denormalized_specifier],
)));
self
.request(
snapshot,
req,
&module.compiler_options_key,
module.scope.as_ref(),
module.notebook_uri.as_ref(),
token,
)
.await
}
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub async fn get_definition(
&self,
snapshot: Arc<StateSnapshot>,
module: &DocumentModule,
position: u32,
token: &CancellationToken,
) -> Result<Option<DefinitionInfoAndBoundSpan>, AnyError> {
let req = TscRequest::GetDefinitionAndBoundSpan((
self
.specifier_map
.denormalize(&module.specifier, module.media_type),
position,
));
self
.request::<Option<DefinitionInfoAndBoundSpan>>(
snapshot,
req,
&module.compiler_options_key,
module.scope.as_ref(),
module.notebook_uri.as_ref(),
token,
)
.await
.and_then(|mut info| {
if let Some(info) = &mut info {
info.normalize(&self.specifier_map)?;
}
Ok(info)
})
}
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub async fn get_type_definition(
&self,
snapshot: Arc<StateSnapshot>,
module: &DocumentModule,
position: u32,
token: &CancellationToken,
) -> Result<Option<Vec<DefinitionInfo>>, AnyError> {
let req = TscRequest::GetTypeDefinitionAtPosition((
self
.specifier_map
.denormalize(&module.specifier, module.media_type),
position,
));
self
.request::<Option<Vec<DefinitionInfo>>>(
snapshot,
req,
&module.compiler_options_key,
module.scope.as_ref(),
module.notebook_uri.as_ref(),
token,
)
.await
.and_then(|mut infos| {
for info in infos.iter_mut().flatten() {
if token.is_cancelled() {
return Err(anyhow!("request cancelled"));
}
info.normalize(&self.specifier_map)?;
}
Ok(infos)
})
}
#[allow(clippy::too_many_arguments)]
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
pub async fn get_completions(
&self,
snapshot: Arc<StateSnapshot>,
module: &DocumentModule,
position: u32,
trigger_character: Option<String>,
trigger_kind: Option<CompletionTriggerKind>,
token: &CancellationToken,
) -> Result<Option<CompletionInfo>, AnyError> {
let req = TscRequest::GetCompletionsAtPosition(Box::new((
self
.specifier_map
.denormalize(&module.specifier, module.media_type),
position,
GetCompletionsAtPositionOptions {
user_preferences: UserPreferences::from_config_for_specifier(
&snapshot.config,
&module.specifier,
),
trigger_character,
trigger_kind,
},
(&snapshot
.config
.tree
.fmt_config_for_specifier(&module.specifier)
.options)
.into(),
)));
self
.request::<Option<CompletionInfo>>(
snapshot,
req,
&module.compiler_options_key,
module.scope.as_ref(),
module.notebook_uri.as_ref(),
token,
)
.await
.and_then(|mut info| {
if let Some(info) = &mut info {
info.normalize(&self.specifier_map, token)?;
}
Ok(info)
})
}
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
#[allow(clippy::too_many_arguments)]
pub async fn get_completion_details(
&self,
snapshot: Arc<StateSnapshot>,
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/capabilities.rs | cli/lsp/capabilities.rs | // Copyright 2018-2025 the Deno authors. MIT license.
//!
//! Provides information about what capabilities that are supported by the
//! language server, which helps determine what messages are sent from the
//! client.
//!
use deno_core::serde_json::json;
use tower_lsp::lsp_types::*;
use super::refactor::ALL_KNOWN_REFACTOR_ACTION_KINDS;
use super::semantic_tokens::get_legend;
fn code_action_capabilities(
client_capabilities: &ClientCapabilities,
) -> CodeActionProviderCapability {
client_capabilities
.text_document
.as_ref()
.and_then(|it| it.code_action.as_ref())
.and_then(|it| it.code_action_literal_support.as_ref())
.map(|_| {
let mut code_action_kinds = vec![
CodeActionKind::QUICKFIX,
CodeActionKind::REFACTOR,
CodeActionKind::SOURCE_ORGANIZE_IMPORTS,
];
code_action_kinds.extend(
ALL_KNOWN_REFACTOR_ACTION_KINDS
.iter()
.map(|action| action.kind.clone()),
);
CodeActionProviderCapability::Options(CodeActionOptions {
code_action_kinds: Some(code_action_kinds),
resolve_provider: Some(true),
work_done_progress_options: Default::default(),
})
})
.unwrap_or(CodeActionProviderCapability::Simple(true))
}
pub fn semantic_tokens_registration_options()
-> SemanticTokensRegistrationOptions {
const LANGUAGES: [&str; 4] = [
"javascript",
"javascriptreact",
"typescript",
"typescriptreact",
];
const SCHEMES: [&str; 5] = [
"file",
"untitled",
"deno",
"vscode-notebook-cell",
"deno-notebook-cell",
];
let mut document_filters =
Vec::with_capacity(LANGUAGES.len() * SCHEMES.len());
for language in &LANGUAGES {
for scheme in &SCHEMES {
document_filters.push(DocumentFilter {
language: Some(language.to_string()),
scheme: Some(scheme.to_string()),
pattern: None,
});
}
}
SemanticTokensRegistrationOptions {
text_document_registration_options: TextDocumentRegistrationOptions {
document_selector: Some(document_filters),
},
semantic_tokens_options: SemanticTokensOptions {
legend: get_legend(),
range: Some(true),
full: Some(SemanticTokensFullOptions::Bool(true)),
..Default::default()
},
static_registration_options: Default::default(),
}
}
pub fn server_capabilities(
client_capabilities: &ClientCapabilities,
) -> ServerCapabilities {
let code_action_provider = code_action_capabilities(client_capabilities);
ServerCapabilities {
text_document_sync: Some(TextDocumentSyncCapability::Options(
TextDocumentSyncOptions {
open_close: Some(true),
change: Some(TextDocumentSyncKind::INCREMENTAL),
will_save: None,
will_save_wait_until: None,
save: Some(SaveOptions::default().into()),
},
)),
hover_provider: Some(HoverProviderCapability::Simple(true)),
completion_provider: Some(CompletionOptions {
// Don't include "," here as it leads to confusing completion
// behavior with function arguments. See https://github.com/denoland/deno/issues/20160
all_commit_characters: Some(vec![
".".to_string(),
";".to_string(),
"(".to_string(),
]),
completion_item: None,
trigger_characters: Some(vec![
".".to_string(),
"\"".to_string(),
"'".to_string(),
"`".to_string(),
"/".to_string(),
"@".to_string(),
"<".to_string(),
"#".to_string(),
]),
resolve_provider: Some(true),
work_done_progress_options: WorkDoneProgressOptions {
work_done_progress: None,
},
}),
signature_help_provider: Some(SignatureHelpOptions {
trigger_characters: Some(vec![
",".to_string(),
"(".to_string(),
"<".to_string(),
]),
retrigger_characters: Some(vec![")".to_string()]),
work_done_progress_options: WorkDoneProgressOptions {
work_done_progress: None,
},
}),
declaration_provider: None,
definition_provider: Some(OneOf::Left(true)),
type_definition_provider: Some(TypeDefinitionProviderCapability::Simple(
true,
)),
implementation_provider: Some(ImplementationProviderCapability::Simple(
true,
)),
references_provider: Some(OneOf::Left(true)),
document_highlight_provider: Some(OneOf::Left(true)),
document_symbol_provider: Some(OneOf::Right(DocumentSymbolOptions {
label: Some("Deno".to_string()),
work_done_progress_options: WorkDoneProgressOptions {
work_done_progress: None,
},
})),
workspace_symbol_provider: Some(OneOf::Left(true)),
code_action_provider: Some(code_action_provider),
code_lens_provider: Some(CodeLensOptions {
resolve_provider: Some(true),
}),
document_formatting_provider: Some(OneOf::Left(true)),
document_range_formatting_provider: None,
document_on_type_formatting_provider: None,
selection_range_provider: Some(SelectionRangeProviderCapability::Simple(
true,
)),
folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)),
rename_provider: Some(OneOf::Left(true)),
document_link_provider: None,
color_provider: None,
execute_command_provider: Some(ExecuteCommandOptions {
commands: vec![
"deno.cache".to_string(),
"deno.reloadImportRegistries".to_string(),
],
..Default::default()
}),
call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)),
semantic_tokens_provider: if client_capabilities
.text_document
.as_ref()
.and_then(|t| t.semantic_tokens.as_ref())
.and_then(|s| s.dynamic_registration)
.unwrap_or_default()
{
None
} else {
Some(
SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(
semantic_tokens_registration_options(),
),
)
},
workspace: Some(WorkspaceServerCapabilities {
workspace_folders: Some(WorkspaceFoldersServerCapabilities {
supported: Some(true),
change_notifications: Some(OneOf::Left(true)),
}),
file_operations: None,
}),
linked_editing_range_provider: None,
moniker_provider: None,
experimental: Some(json!({
"denoConfigTasks": true,
"testingApi": true,
"didRefreshDenoConfigurationTreeNotifications": true,
})),
inlay_hint_provider: Some(OneOf::Left(true)),
position_encoding: None,
diagnostic_provider: Some(DiagnosticServerCapabilities::Options(
DiagnosticOptions {
inter_file_dependencies: true,
..Default::default()
},
)),
inline_value_provider: None,
inline_completion_provider: None,
notebook_document_sync: Some(OneOf::Left(NotebookDocumentSyncOptions {
notebook_selector: vec![NotebookSelector::ByCells {
notebook: None,
cells: vec![
NotebookCellSelector {
language: "javascript".to_string(),
},
NotebookCellSelector {
language: "javascriptreact".to_string(),
},
NotebookCellSelector {
language: "typescript".to_string(),
},
NotebookCellSelector {
language: "typescriptreact".to_string(),
},
],
}],
save: Some(true),
})),
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/testing/collectors.rs | cli/lsp/testing/collectors.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap;
use std::collections::HashSet;
use deno_ast::SourceRangedForSpanned;
use deno_ast::SourceTextInfo;
use deno_ast::swc::ast;
use deno_ast::swc::ecma_visit::Visit;
use deno_ast::swc::ecma_visit::VisitWith;
use deno_core::ModuleSpecifier;
use lsp::Range;
use tower_lsp::lsp_types as lsp;
use super::definitions::TestModule;
use crate::lsp::analysis::source_range_to_lsp_range;
/// Parse an arrow expression for any test steps and return them.
fn visit_arrow(
arrow_expr: &ast::ArrowExpr,
parent_id: &str,
text_info: &SourceTextInfo,
test_module: &mut TestModule,
is_describe: bool,
) {
let (maybe_test_context, maybe_step_var) = if is_describe {
(None, None)
} else {
match parse_test_context_param(arrow_expr.params.first()) {
Some(r) => r,
None => return,
}
};
let mut collector = TestStepCollector::new(
maybe_test_context,
maybe_step_var,
parent_id,
text_info,
test_module,
is_describe,
);
arrow_expr.body.visit_with(&mut collector);
}
/// Parse a function for any test steps and return them.
fn visit_fn(
function: &ast::Function,
parent_id: &str,
text_info: &SourceTextInfo,
test_module: &mut TestModule,
is_describe: bool,
) {
let (maybe_test_context, maybe_step_var) = if is_describe {
(None, None)
} else {
match parse_test_context_param(function.params.first().map(|p| &p.pat)) {
Some(r) => r,
None => return,
}
};
let mut collector = TestStepCollector::new(
maybe_test_context,
maybe_step_var,
parent_id,
text_info,
test_module,
is_describe,
);
function.body.visit_with(&mut collector);
}
/// Parse a param of a test function for the test context binding, or any
/// destructuring of a `steps` method from the test context.
fn parse_test_context_param(
param: Option<&ast::Pat>,
) -> Option<(Option<String>, Option<String>)> {
let mut maybe_test_context = None;
let mut maybe_step_var = None;
match param {
// handles `(testContext)`
Some(ast::Pat::Ident(binding_ident)) => {
maybe_test_context = Some(binding_ident.id.sym.to_string());
}
Some(ast::Pat::Object(object_pattern)) => {
for prop in &object_pattern.props {
match prop {
ast::ObjectPatProp::KeyValue(key_value_pat_prop) => {
match &key_value_pat_prop.key {
// handles `({ step: s })`
ast::PropName::Ident(ident) => {
if ident.sym.eq("step") {
if let ast::Pat::Ident(ident) =
key_value_pat_prop.value.as_ref()
{
maybe_step_var = Some(ident.id.sym.to_string());
}
break;
}
}
// handles `({ "step": s })`
ast::PropName::Str(string) => {
if string.value.eq("step") {
if let ast::Pat::Ident(ident) =
key_value_pat_prop.value.as_ref()
{
maybe_step_var = Some(ident.id.sym.to_string());
}
break;
}
}
_ => (),
}
}
// handles `({ step = something })`
ast::ObjectPatProp::Assign(assign_pat_prop)
if assign_pat_prop.key.sym.eq("step") =>
{
maybe_step_var = Some("step".to_string());
break;
}
// handles `({ ...ctx })`
ast::ObjectPatProp::Rest(rest_pat) => {
if let ast::Pat::Ident(ident) = rest_pat.arg.as_ref() {
maybe_test_context = Some(ident.id.sym.to_string());
}
break;
}
_ => (),
}
}
}
_ => return None,
}
if maybe_test_context.is_none() && maybe_step_var.is_none() {
None
} else {
Some((maybe_test_context, maybe_step_var))
}
}
/// Check a call expression of a test or test step to determine the name of the
/// test or test step as well as any sub steps.
fn visit_call_expr(
node: &ast::CallExpr,
fns: Option<&HashMap<String, ast::Function>>,
range: Range,
parent_id: Option<&str>,
text_info: &SourceTextInfo,
test_module: &mut TestModule,
is_describe: bool,
) {
let should_recurse = !(is_describe && parent_id.is_some());
if let Some(expr) = node.args.first().map(|es| es.expr.as_ref()) {
match expr {
ast::Expr::Object(obj_lit) => {
let mut maybe_name = None;
for prop in &obj_lit.props {
let ast::PropOrSpread::Prop(prop) = prop else {
continue;
};
let ast::Prop::KeyValue(key_value_prop) = prop.as_ref() else {
continue;
};
let ast::PropName::Ident(ast::IdentName { sym, .. }) =
&key_value_prop.key
else {
continue;
};
if sym == "name" {
match key_value_prop.value.as_ref() {
// matches string literals (e.g. "test name" or
// 'test name')
ast::Expr::Lit(ast::Lit::Str(lit_str)) => {
maybe_name = Some(lit_str.value.to_string_lossy().to_string());
}
// matches template literals with only a single quasis
// (e.g. `test name`)
ast::Expr::Tpl(tpl) => {
if tpl.quasis.len() == 1 {
maybe_name = Some(tpl.quasis[0].raw.to_string());
}
}
_ => {}
}
break;
}
}
let name = match maybe_name {
Some(n) => n,
None => return,
};
let (id, _) = test_module.register(
name,
Some(range),
false,
parent_id.map(str::to_owned),
);
if !should_recurse {
return;
}
for prop in &obj_lit.props {
let ast::PropOrSpread::Prop(prop) = prop else {
continue;
};
match prop.as_ref() {
ast::Prop::KeyValue(key_value_prop) => {
let ast::PropName::Ident(ast::IdentName { sym, .. }) =
&key_value_prop.key
else {
continue;
};
if sym == "fn" {
match key_value_prop.value.as_ref() {
ast::Expr::Arrow(arrow_expr) => {
visit_arrow(
arrow_expr,
&id,
text_info,
test_module,
is_describe,
);
}
ast::Expr::Fn(fn_expr) => {
visit_fn(
&fn_expr.function,
&id,
text_info,
test_module,
is_describe,
);
}
_ => {}
}
break;
}
}
ast::Prop::Method(method_prop) => {
let ast::PropName::Ident(ast::IdentName { sym, .. }) =
&method_prop.key
else {
continue;
};
if sym == "fn" {
visit_fn(
&method_prop.function,
&id,
text_info,
test_module,
is_describe,
);
break;
}
}
_ => {}
}
}
}
ast::Expr::Fn(fn_expr) => {
if let Some(ast::Ident { sym, .. }) = fn_expr.ident.as_ref() {
let name = sym.to_string();
let (id, _) = test_module.register(
name,
Some(range),
false,
parent_id.map(str::to_owned),
);
if !should_recurse {
return;
}
visit_fn(&fn_expr.function, &id, text_info, test_module, is_describe);
}
}
ast::Expr::Lit(ast::Lit::Str(lit_str)) => {
let name = lit_str.value.to_string_lossy().to_string();
let (id, _) = test_module.register(
name,
Some(range),
false,
parent_id.map(str::to_owned),
);
if !should_recurse {
return;
}
match node.args.get(1).map(|es| es.expr.as_ref()) {
Some(ast::Expr::Fn(fn_expr)) => {
visit_fn(
&fn_expr.function,
&id,
text_info,
test_module,
is_describe,
);
}
Some(ast::Expr::Arrow(arrow_expr)) => {
visit_arrow(arrow_expr, &id, text_info, test_module, is_describe);
}
_ => {}
}
}
ast::Expr::Tpl(tpl) => {
if tpl.quasis.len() == 1 {
let name = tpl.quasis[0].raw.to_string();
let (id, _) = test_module.register(
name,
Some(range),
false,
parent_id.map(str::to_owned),
);
if !should_recurse {
return;
}
match node.args.get(1).map(|es| es.expr.as_ref()) {
Some(ast::Expr::Fn(fn_expr)) => {
visit_fn(
&fn_expr.function,
&id,
text_info,
test_module,
is_describe,
);
}
Some(ast::Expr::Arrow(arrow_expr)) => {
visit_arrow(arrow_expr, &id, text_info, test_module, is_describe);
}
_ => {}
}
}
}
ast::Expr::Ident(ident) => {
let name = ident.sym.to_string();
if let Some(fn_expr) = fns.and_then(|fns| fns.get(&name)) {
let (parent_id, _) = test_module.register(
name,
Some(range),
false,
parent_id.map(str::to_owned),
);
if !should_recurse {
return;
}
visit_fn(fn_expr, &parent_id, text_info, test_module, is_describe);
}
}
_ => {
if parent_id.is_none() {
let node_range = node.range();
let indexes = text_info.line_and_column_display(node_range.start);
test_module.register(
format!("Test {}:{}", indexes.line_number, indexes.column_number),
Some(range),
false,
None,
);
}
}
}
}
}
/// A structure which can be used to walk a branch of AST determining if the
/// branch contains any testing steps.
struct TestStepCollector<'a> {
maybe_test_context: Option<String>,
vars: HashSet<String>,
parent_id: &'a str,
text_info: &'a SourceTextInfo,
test_module: &'a mut TestModule,
is_describe: bool,
}
impl<'a> TestStepCollector<'a> {
fn new(
maybe_test_context: Option<String>,
maybe_step_var: Option<String>,
parent_id: &'a str,
text_info: &'a SourceTextInfo,
test_module: &'a mut TestModule,
is_describe: bool,
) -> Self {
let mut vars = HashSet::new();
if let Some(var) = maybe_step_var {
vars.insert(var);
}
Self {
maybe_test_context,
vars,
parent_id,
text_info,
test_module,
is_describe,
}
}
}
impl Visit for TestStepCollector<'_> {
fn visit_call_expr(&mut self, node: &ast::CallExpr) {
let ast::Callee::Expr(callee_expr) = &node.callee else {
return;
};
let mut prop_chain = ["", ""];
let mut current_segment = callee_expr.as_ref();
let mut rightmost_symbol_range = None;
for (i, name) in prop_chain.iter_mut().enumerate().rev() {
match current_segment {
ast::Expr::Ident(ident) => {
*name = ident.sym.as_str();
rightmost_symbol_range.get_or_insert_with(|| ident.range());
break;
}
ast::Expr::Member(member_expr) => {
if i == 0 {
return;
}
let ast::MemberProp::Ident(right) = &member_expr.prop else {
return;
};
*name = right.sym.as_str();
rightmost_symbol_range.get_or_insert_with(|| right.range());
current_segment = &member_expr.obj;
}
_ => return,
}
}
let Some(rightmost_symbol_range) = rightmost_symbol_range else {
debug_assert!(false, "rightmost symbol range should always be defined");
return;
};
match (
self.is_describe,
self.maybe_test_context.as_deref(),
prop_chain,
) {
(true, _, ["", "it"] | ["it", "ignore" | "only" | "skip"]) => {}
(false, Some(c), [s, "step"]) if s == c => {}
(false, _, ["", s]) if self.vars.contains(s) => {}
_ => return,
}
visit_call_expr(
node,
None,
source_range_to_lsp_range(&rightmost_symbol_range, self.text_info),
Some(self.parent_id),
self.text_info,
self.test_module,
self.is_describe,
);
}
fn visit_var_decl(&mut self, node: &ast::VarDecl) {
if let Some(test_context) = &self.maybe_test_context {
for decl in &node.decls {
let Some(init) = &decl.init else {
continue;
};
match init.as_ref() {
// Identify destructured assignments of `step` from test context
ast::Expr::Ident(ident) => {
if ident.sym != *test_context {
continue;
}
let ast::Pat::Object(object_pat) = &decl.name else {
continue;
};
for prop in &object_pat.props {
match prop {
ast::ObjectPatProp::Assign(prop) => {
if prop.key.sym.eq("step") {
self.vars.insert(prop.key.sym.to_string());
}
}
ast::ObjectPatProp::KeyValue(prop) => {
if let ast::PropName::Ident(key_ident) = &prop.key
&& key_ident.sym.eq("step")
&& let ast::Pat::Ident(value_ident) = &prop.value.as_ref()
{
self.vars.insert(value_ident.id.sym.to_string());
}
}
_ => (),
}
}
}
// Identify variable assignments where the init is test context
// `.step`
ast::Expr::Member(member_expr) => {
let ast::Expr::Ident(obj_ident) = member_expr.obj.as_ref() else {
continue;
};
if obj_ident.sym != *test_context {
continue;
}
let ast::MemberProp::Ident(prop_ident) = &member_expr.prop else {
continue;
};
if prop_ident.sym.eq("step")
&& let ast::Pat::Ident(binding_ident) = &decl.name
{
self.vars.insert(binding_ident.id.sym.to_string());
}
}
_ => (),
}
}
}
}
}
/// Walk an AST and determine if it contains any `Deno.test` tests.
pub struct TestCollector {
test_module: TestModule,
vars: HashSet<String>,
fns: HashMap<String, ast::Function>,
text_info: SourceTextInfo,
}
impl TestCollector {
pub fn new(specifier: ModuleSpecifier, text_info: SourceTextInfo) -> Self {
Self {
test_module: TestModule::new(specifier),
vars: HashSet::new(),
fns: HashMap::new(),
text_info,
}
}
/// Move out the test definitions
pub fn take(self) -> TestModule {
self.test_module
}
}
impl Visit for TestCollector {
fn visit_call_expr(&mut self, node: &ast::CallExpr) {
let ast::Callee::Expr(callee_expr) = &node.callee else {
return;
};
let mut prop_chain = ["", "", ""];
let mut current_segment = callee_expr.as_ref();
let mut rightmost_symbol_range = None;
for (i, name) in prop_chain.iter_mut().enumerate().rev() {
match current_segment {
ast::Expr::Ident(ident) => {
*name = ident.sym.as_str();
rightmost_symbol_range.get_or_insert_with(|| ident.range());
break;
}
ast::Expr::Member(member_expr) => {
if i == 0 {
return;
}
let ast::MemberProp::Ident(right) = &member_expr.prop else {
return;
};
*name = right.sym.as_str();
rightmost_symbol_range.get_or_insert_with(|| right.range());
current_segment = &member_expr.obj;
}
_ => return,
}
}
let Some(rightmost_symbol_range) = rightmost_symbol_range else {
debug_assert!(false, "rightmost symbol range should always be defined");
return;
};
let is_describe = match prop_chain {
["", "Deno", "test"] | ["Deno", "test", "ignore" | "only"] => false,
["", "", "describe"] | ["", "describe", "ignore" | "only" | "skip"] => {
true
}
["", "", s] if self.vars.contains(s) => false,
_ => return,
};
visit_call_expr(
node,
Some(&self.fns),
source_range_to_lsp_range(&rightmost_symbol_range, &self.text_info),
None,
&self.text_info,
&mut self.test_module,
is_describe,
);
}
fn visit_var_decl(&mut self, node: &ast::VarDecl) {
for decl in &node.decls {
let Some(init) = &decl.init else { continue };
match init.as_ref() {
// Identify destructured assignments of `test` from `Deno`
ast::Expr::Ident(ident) => {
if ident.sym != "Deno" {
continue;
}
let ast::Pat::Object(object_pat) = &decl.name else {
continue;
};
for prop in &object_pat.props {
match prop {
ast::ObjectPatProp::Assign(prop) => {
let name = prop.key.sym.to_string();
if name == "test" {
self.vars.insert(name);
}
}
ast::ObjectPatProp::KeyValue(prop) => {
let ast::PropName::Ident(key_ident) = &prop.key else {
continue;
};
if key_ident.sym == "test"
&& let ast::Pat::Ident(value_ident) = &prop.value.as_ref()
{
self.vars.insert(value_ident.id.sym.to_string());
}
}
_ => (),
}
}
}
// Identify variable assignments where the init is `Deno.test`
ast::Expr::Member(member_expr) => {
let ast::Expr::Ident(obj_ident) = member_expr.obj.as_ref() else {
continue;
};
if obj_ident.sym != "Deno" {
continue;
};
let ast::MemberProp::Ident(prop_ident) = &member_expr.prop else {
continue;
};
if prop_ident.sym != "test" {
continue;
}
if let ast::Pat::Ident(binding_ident) = &decl.name {
self.vars.insert(binding_ident.id.sym.to_string());
}
}
_ => (),
}
}
}
fn visit_fn_decl(&mut self, n: &ast::FnDecl) {
self
.fns
.insert(n.ident.sym.to_string(), *n.function.clone());
}
}
#[cfg(test)]
pub mod tests {
use deno_core::resolve_url;
use lsp::Position;
use pretty_assertions::assert_eq;
use super::*;
use crate::lsp::testing::definitions::TestDefinition;
pub fn new_range(l1: u32, c1: u32, l2: u32, c2: u32) -> Range {
Range::new(Position::new(l1, c1), Position::new(l2, c2))
}
fn collect(source: &str) -> TestModule {
let specifier = resolve_url("file:///a/example.ts").unwrap();
let parsed_module = deno_ast::parse_module(deno_ast::ParseParams {
specifier: specifier.clone(),
text: source.into(),
media_type: deno_ast::MediaType::TypeScript,
capture_tokens: true,
scope_analysis: true,
maybe_syntax: None,
})
.unwrap();
let text_info = parsed_module.text_info_lazy().clone();
let mut collector = TestCollector::new(specifier, text_info);
parsed_module.program().visit_with(&mut collector);
collector.take()
}
#[test]
fn test_test_collector_test() {
let test_module = collect(
r#"
Deno.test("test", () => {});
"#,
);
assert_eq!(
&test_module,
&TestModule {
specifier: test_module.specifier.clone(),
defs: vec![(
"4ebb361c93f76a0f1bac300638675609f1cf481e6f3b9006c3c98604b3a184e9"
.to_string(),
TestDefinition {
id:
"4ebb361c93f76a0f1bac300638675609f1cf481e6f3b9006c3c98604b3a184e9"
.to_string(),
name: "test".to_string(),
range: Some(new_range(1, 11, 1, 15)),
is_dynamic: false,
parent_id: None,
step_ids: Default::default(),
}
),]
.into_iter()
.collect(),
}
);
}
#[test]
fn test_test_collector_test_tpl() {
let test_module = collect(
r#"
Deno.test(`test`, () => {});
"#,
);
assert_eq!(
&test_module,
&TestModule {
specifier: test_module.specifier.clone(),
defs: vec![(
"4ebb361c93f76a0f1bac300638675609f1cf481e6f3b9006c3c98604b3a184e9"
.to_string(),
TestDefinition {
id:
"4ebb361c93f76a0f1bac300638675609f1cf481e6f3b9006c3c98604b3a184e9"
.to_string(),
name: "test".to_string(),
range: Some(new_range(1, 11, 1, 15)),
is_dynamic: false,
parent_id: None,
step_ids: Default::default(),
}
),]
.into_iter()
.collect(),
}
);
}
#[test]
fn test_test_collector_a() {
let test_module = collect(
r#"
Deno.test({
name: "test",
async fn(t) {
await t.step("step", ({ step }) => {
await step({
name: "sub step",
fn() {}
})
});
}
});
"#,
);
assert_eq!(
&test_module,
&TestModule {
specifier: test_module.specifier.clone(),
defs: vec![
(
"4ebb361c93f76a0f1bac300638675609f1cf481e6f3b9006c3c98604b3a184e9".to_string(),
TestDefinition {
id: "4ebb361c93f76a0f1bac300638675609f1cf481e6f3b9006c3c98604b3a184e9".to_string(),
name: "test".to_string(),
range: Some(new_range(1, 11, 1, 15)),
is_dynamic: false,
parent_id: None,
step_ids: vec!["704d24083fd4a3e1bd204faa20827dc594334812245e5d45dda222b3edc60a0c".to_string()].into_iter().collect(),
}
),
(
"704d24083fd4a3e1bd204faa20827dc594334812245e5d45dda222b3edc60a0c".to_string(),
TestDefinition {
id: "704d24083fd4a3e1bd204faa20827dc594334812245e5d45dda222b3edc60a0c".to_string(),
name: "step".to_string(),
range: Some(new_range(4, 18, 4, 22)),
is_dynamic: false,
parent_id: Some("4ebb361c93f76a0f1bac300638675609f1cf481e6f3b9006c3c98604b3a184e9".to_string()),
step_ids: vec!["0d006a4ec0abaa9cc1d18256b1ccd2677a4c882ff5cb807123890f7528ab1e8d".to_string()].into_iter().collect(),
}
),
(
"0d006a4ec0abaa9cc1d18256b1ccd2677a4c882ff5cb807123890f7528ab1e8d".to_string(),
TestDefinition {
id: "0d006a4ec0abaa9cc1d18256b1ccd2677a4c882ff5cb807123890f7528ab1e8d".to_string(),
name: "sub step".to_string(),
range: Some(new_range(5, 18, 5, 22)),
is_dynamic: false,
parent_id: Some("704d24083fd4a3e1bd204faa20827dc594334812245e5d45dda222b3edc60a0c".to_string()),
step_ids: Default::default(),
}
),
].into_iter().collect(),
}
);
}
#[test]
fn test_test_collector_a_tpl() {
let test_module = collect(
r#"
Deno.test({
name: `test`,
async fn(t) {
await t.step(`step`, ({ step }) => {
await step({
name: `sub step`,
fn() {}
})
});
}
});
"#,
);
assert_eq!(
&test_module,
&TestModule {
specifier: test_module.specifier.clone(),
defs: vec![
(
"4ebb361c93f76a0f1bac300638675609f1cf481e6f3b9006c3c98604b3a184e9".to_string(),
TestDefinition {
id: "4ebb361c93f76a0f1bac300638675609f1cf481e6f3b9006c3c98604b3a184e9".to_string(),
name: "test".to_string(),
range: Some(new_range(1, 11, 1, 15)),
is_dynamic: false,
parent_id: None,
step_ids: vec!["704d24083fd4a3e1bd204faa20827dc594334812245e5d45dda222b3edc60a0c".to_string()].into_iter().collect(),
}
),
(
"704d24083fd4a3e1bd204faa20827dc594334812245e5d45dda222b3edc60a0c".to_string(),
TestDefinition {
id: "704d24083fd4a3e1bd204faa20827dc594334812245e5d45dda222b3edc60a0c".to_string(),
name: "step".to_string(),
range: Some(new_range(4, 18, 4, 22)),
is_dynamic: false,
parent_id: Some("4ebb361c93f76a0f1bac300638675609f1cf481e6f3b9006c3c98604b3a184e9".to_string()),
step_ids: vec!["0d006a4ec0abaa9cc1d18256b1ccd2677a4c882ff5cb807123890f7528ab1e8d".to_string()].into_iter().collect(),
}
),
(
"0d006a4ec0abaa9cc1d18256b1ccd2677a4c882ff5cb807123890f7528ab1e8d".to_string(),
TestDefinition {
id: "0d006a4ec0abaa9cc1d18256b1ccd2677a4c882ff5cb807123890f7528ab1e8d".to_string(),
name: "sub step".to_string(),
range: Some(new_range(5, 18, 5, 22)),
is_dynamic: false,
parent_id: Some("704d24083fd4a3e1bd204faa20827dc594334812245e5d45dda222b3edc60a0c".to_string()),
step_ids: Default::default(),
}
),
].into_iter().collect(),
}
);
}
#[test]
fn test_test_collector_destructure() {
let test_module = collect(
r#"
const { test } = Deno;
test("test", () => {});
"#,
);
assert_eq!(
&test_module,
&TestModule {
specifier: test_module.specifier.clone(),
defs: vec![(
"4ebb361c93f76a0f1bac300638675609f1cf481e6f3b9006c3c98604b3a184e9"
.to_string(),
TestDefinition {
id:
"4ebb361c93f76a0f1bac300638675609f1cf481e6f3b9006c3c98604b3a184e9"
.to_string(),
name: "test".to_string(),
range: Some(new_range(2, 6, 2, 10)),
is_dynamic: false,
parent_id: None,
step_ids: Default::default(),
}
),]
.into_iter()
.collect(),
}
);
}
#[test]
fn test_test_collector_destructure_rebind_step() {
let test_module = collect(
r#"
Deno.test(async function useFnName({ step: s }) {
await s("step", () => {});
});
"#,
);
assert_eq!(
&test_module,
&TestModule {
specifier: test_module.specifier.clone(),
defs: vec![
(
"86b4c821900e38fc89f24bceb0e45193608ab3f9d2a6019c7b6a5aceff5d7df2".to_string(),
TestDefinition {
id: "86b4c821900e38fc89f24bceb0e45193608ab3f9d2a6019c7b6a5aceff5d7df2".to_string(),
name: "useFnName".to_string(),
range: Some(new_range(1, 11, 1, 15)),
is_dynamic: false,
parent_id: None,
step_ids: vec!["dac8a169b8f8c6babf11122557ea545de2733bfafed594d044b22bc6863a0856".to_string()].into_iter().collect(),
}
),
(
"dac8a169b8f8c6babf11122557ea545de2733bfafed594d044b22bc6863a0856".to_string(),
TestDefinition {
id: "dac8a169b8f8c6babf11122557ea545de2733bfafed594d044b22bc6863a0856".to_string(),
name: "step".to_string(),
range: Some(new_range(2, 14, 2, 15)),
is_dynamic: false,
parent_id: Some("86b4c821900e38fc89f24bceb0e45193608ab3f9d2a6019c7b6a5aceff5d7df2".to_string()),
step_ids: Default::default(),
}
),
].into_iter().collect(),
}
);
}
#[test]
fn test_test_collector_rebind() {
let test_module = collect(
r#"
const t = Deno.test;
t("test", () => {});
"#,
);
assert_eq!(
&test_module,
&TestModule {
specifier: test_module.specifier.clone(),
defs: vec![(
"4ebb361c93f76a0f1bac300638675609f1cf481e6f3b9006c3c98604b3a184e9"
.to_string(),
TestDefinition {
id:
"4ebb361c93f76a0f1bac300638675609f1cf481e6f3b9006c3c98604b3a184e9"
.to_string(),
name: "test".to_string(),
range: Some(new_range(2, 6, 2, 7)),
is_dynamic: false,
parent_id: None,
step_ids: Default::default(),
}
),]
.into_iter()
.collect(),
}
);
}
#[test]
fn test_test_collector_separate_test_function_with_string_name() {
let test_module = collect(
r#"
function someFunction() {}
Deno.test("test", someFunction);
"#,
);
assert_eq!(
&test_module,
&TestModule {
specifier: test_module.specifier.clone(),
defs: vec![(
"4ebb361c93f76a0f1bac300638675609f1cf481e6f3b9006c3c98604b3a184e9"
.to_string(),
TestDefinition {
id:
"4ebb361c93f76a0f1bac300638675609f1cf481e6f3b9006c3c98604b3a184e9"
.to_string(),
name: "test".to_string(),
range: Some(new_range(2, 11, 2, 15)),
is_dynamic: false,
parent_id: None,
step_ids: Default::default(),
}
),]
.into_iter()
.collect(),
}
);
}
#[test]
fn test_test_collector_function_only() {
let test_module = collect(
r#"
Deno.test(async function someFunction() {});
Deno.test.ignore(function foo() {});
Deno.test.only(function bar() {});
"#,
);
assert_eq!(
&test_module,
&TestModule {
specifier: test_module.specifier.clone(),
defs: vec![
(
"87f28e06f5ddadd90a74a93b84df2e31b9edced8301b0ad4c8fbab8d806ec99d".to_string(),
TestDefinition {
id: "87f28e06f5ddadd90a74a93b84df2e31b9edced8301b0ad4c8fbab8d806ec99d".to_string(),
name: "foo".to_string(),
range: Some(new_range(2, 16, 2, 22)),
is_dynamic: false,
parent_id: None,
step_ids: Default::default(),
},
),
(
"e0f6a73647b763f82176c98a019e54200b799a32007f9859fb782aaa9e308568".to_string(),
TestDefinition {
id: "e0f6a73647b763f82176c98a019e54200b799a32007f9859fb782aaa9e308568".to_string(),
name: "someFunction".to_string(),
range: Some(new_range(1, 11, 1, 15)),
is_dynamic: false,
parent_id: None,
step_ids: Default::default(),
}
),
(
"e1bd61cdaf5e64863d3d85baffe3e43bd57cdb8dc0b5d6a9e03ade18b7f68d47".to_string(),
TestDefinition {
id: "e1bd61cdaf5e64863d3d85baffe3e43bd57cdb8dc0b5d6a9e03ade18b7f68d47".to_string(),
name: "bar".to_string(),
range: Some(new_range(3, 16, 3, 20)),
is_dynamic: false,
parent_id: None,
step_ids: Default::default(),
}
)
]
.into_iter()
.collect(),
}
);
}
#[test]
fn test_test_collector_separate_test_function() {
let test_module = collect(
r#"
async function someFunction() {}
Deno.test(someFunction);
"#,
);
assert_eq!(
&test_module,
&TestModule {
specifier: test_module.specifier.clone(),
defs: vec![(
"e0f6a73647b763f82176c98a019e54200b799a32007f9859fb782aaa9e308568"
.to_string(),
TestDefinition {
id:
"e0f6a73647b763f82176c98a019e54200b799a32007f9859fb782aaa9e308568"
.to_string(),
name: "someFunction".to_string(),
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/testing/lsp_custom.rs | cli/lsp/testing/lsp_custom.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use deno_core::serde::Deserialize;
use deno_core::serde::Serialize;
use tower_lsp::lsp_types as lsp;
pub const TEST_RUN_CANCEL_REQUEST: &str = "deno/testRunCancel";
pub const TEST_RUN_REQUEST: &str = "deno/testRun";
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct EnqueuedTestModule {
pub text_document: lsp::TextDocumentIdentifier,
pub ids: Vec<String>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct TestData {
/// The unique ID of the test
pub id: String,
/// The human readable test to display for the test.
pub label: String,
#[serde(skip_serializing_if = "Vec::is_empty")]
#[serde(default)]
pub steps: Vec<TestData>,
/// The range where the test is located.
#[serde(skip_serializing_if = "Option::is_none")]
pub range: Option<lsp::Range>,
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum TestModuleNotificationKind {
/// The test module notification represents an insertion of tests, not
/// replacement of the test children.
Insert,
/// The test module notification represents a replacement of any tests within
/// the test module.
Replace,
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct TestModuleNotificationParams {
/// The text document that the notification relates to.
pub text_document: lsp::TextDocumentIdentifier,
/// Indicates what kind of notification this represents.
pub kind: TestModuleNotificationKind,
/// The human readable text to display for the test module.
pub label: String,
/// The tests identified in the module.
pub tests: Vec<TestData>,
}
pub enum TestModuleNotification {}
impl lsp::notification::Notification for TestModuleNotification {
type Params = TestModuleNotificationParams;
const METHOD: &'static str = "deno/testModule";
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct TestModuleDeleteNotificationParams {
/// The text document that the notification relates to.
pub text_document: lsp::TextDocumentIdentifier,
}
pub enum TestModuleDeleteNotification {}
impl lsp::notification::Notification for TestModuleDeleteNotification {
type Params = TestModuleDeleteNotificationParams;
const METHOD: &'static str = "deno/testModuleDelete";
}
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum TestRunKind {
// The run profile is just to execute the tests
Run,
// The tests should be run and debugged, currently not implemented
Debug,
// The tests should be run, collecting and reporting coverage information,
// currently not implemented
Coverage,
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct TestRunRequestParams {
pub id: u32,
pub kind: TestRunKind,
#[serde(skip_serializing_if = "Vec::is_empty")]
#[serde(default)]
pub exclude: Vec<TestIdentifier>,
#[serde(skip_serializing_if = "Option::is_none")]
pub include: Option<Vec<TestIdentifier>>,
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct TestRunCancelParams {
pub id: u32,
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct TestRunProgressParams {
pub id: u32,
pub message: TestRunProgressMessage,
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct TestIdentifier {
/// The module identifier which contains the test.
pub text_document: lsp::TextDocumentIdentifier,
/// An optional string identifying the individual test. If not present, then
/// it identifies all the tests associated with the module.
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
/// An optional structure identifying a step of the test. If not present, then
/// no step is identified.
#[serde(skip_serializing_if = "Option::is_none")]
pub step_id: Option<String>,
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase", tag = "type")]
pub enum TestRunProgressMessage {
Enqueued {
test: TestIdentifier,
},
Started {
test: TestIdentifier,
},
Skipped {
test: TestIdentifier,
},
Failed {
test: TestIdentifier,
messages: Vec<TestMessage>,
#[serde(skip_serializing_if = "Option::is_none")]
duration: Option<u32>,
},
Errored {
test: TestIdentifier,
messages: Vec<TestMessage>,
#[serde(skip_serializing_if = "Option::is_none")]
duration: Option<u32>,
},
Passed {
test: TestIdentifier,
#[serde(skip_serializing_if = "Option::is_none")]
duration: Option<u32>,
},
Output {
value: String,
#[serde(skip_serializing_if = "Option::is_none")]
test: Option<TestIdentifier>,
#[serde(skip_serializing_if = "Option::is_none")]
location: Option<lsp::Location>,
},
End,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct TestMessage {
pub message: lsp::MarkupContent,
#[serde(skip_serializing_if = "Option::is_none")]
pub expected_output: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub actual_output: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub location: Option<lsp::Location>,
}
pub enum TestRunProgressNotification {}
impl lsp::notification::Notification for TestRunProgressNotification {
type Params = TestRunProgressParams;
const METHOD: &'static str = "deno/testRunProgress";
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/testing/mod.rs | cli/lsp/testing/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
mod collectors;
mod definitions;
mod execution;
pub mod lsp_custom;
mod server;
pub use collectors::TestCollector;
pub use definitions::TestModule;
pub use lsp_custom::TEST_RUN_CANCEL_REQUEST;
pub use lsp_custom::TEST_RUN_REQUEST;
pub use server::TestServer;
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/testing/definitions.rs | cli/lsp/testing/definitions.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use deno_core::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_lib::util::checksum;
use indexmap::IndexMap;
use indexmap::IndexSet;
use lsp::Range;
use tower_lsp::lsp_types as lsp;
use super::lsp_custom;
use super::lsp_custom::TestData;
use crate::lsp::client::TestingNotification;
use crate::lsp::logging::lsp_warn;
use crate::lsp::urls::url_to_uri;
use crate::tools::test::TestDescription;
use crate::tools::test::TestStepDescription;
#[derive(Debug, Clone, PartialEq)]
pub struct TestDefinition {
pub id: String,
pub name: String,
pub range: Option<Range>,
pub is_dynamic: bool,
pub parent_id: Option<String>,
pub step_ids: IndexSet<String>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct TestModule {
pub specifier: ModuleSpecifier,
pub defs: IndexMap<String, TestDefinition>,
}
impl TestModule {
pub fn new(specifier: ModuleSpecifier) -> Self {
Self {
specifier,
defs: Default::default(),
}
}
/// Returns `(id, is_newly_registered)`.
pub fn register(
&mut self,
name: String,
range: Option<Range>,
is_dynamic: bool,
parent_id: Option<String>,
) -> (String, bool) {
let mut id_components = Vec::with_capacity(7);
id_components.push(name.as_bytes());
let mut current_parent_id = &parent_id;
while let Some(parent_id) = current_parent_id {
let parent = match self.defs.get(parent_id) {
Some(d) => d,
None => {
lsp_warn!(
"Internal Error: parent_id \"{}\" of test \"{}\" was not registered.",
parent_id,
&name
);
id_components.push("<unknown>".as_bytes());
break;
}
};
id_components.push(parent.name.as_bytes());
current_parent_id = &parent.parent_id;
}
id_components.push(self.specifier.as_str().as_bytes());
id_components.reverse();
let id = checksum::r#gen(&id_components);
if self.defs.contains_key(&id) {
return (id, false);
}
if let Some(parent_id) = &parent_id {
let parent = self.defs.get_mut(parent_id).unwrap();
parent.step_ids.insert(id.clone());
}
self.defs.insert(
id.clone(),
TestDefinition {
id: id.clone(),
name,
range,
is_dynamic,
parent_id,
step_ids: Default::default(),
},
);
(id, true)
}
/// Returns `(id, was_newly_registered)`.
pub fn register_dynamic(&mut self, desc: &TestDescription) -> (String, bool) {
self.register(desc.name.clone(), None, true, None)
}
/// Returns `(id, was_newly_registered)`.
pub fn register_step_dynamic(
&mut self,
desc: &TestStepDescription,
parent_static_id: &str,
) -> (String, bool) {
self.register(
desc.name.clone(),
None,
true,
Some(parent_static_id.to_string()),
)
}
pub fn get(&self, id: &str) -> Option<&TestDefinition> {
self.defs.get(id)
}
pub fn get_test_data(&self, id: &str) -> TestData {
fn get_test_data_inner(tm: &TestModule, id: &str) -> TestData {
let def = tm.defs.get(id).unwrap();
TestData {
id: def.id.clone(),
label: def.name.clone(),
steps: def
.step_ids
.iter()
.map(|id| get_test_data_inner(tm, id))
.collect(),
range: def.range,
}
}
let def = self.defs.get(id).unwrap();
let mut current_data = get_test_data_inner(self, &def.id);
let mut current_parent_id = &def.parent_id;
while let Some(parent_id) = current_parent_id {
let parent = self.defs.get(parent_id).unwrap();
current_data = TestData {
id: parent.id.clone(),
label: parent.name.clone(),
steps: vec![current_data],
range: None,
};
current_parent_id = &parent.parent_id;
}
current_data
}
/// Return the test definitions as a testing module notification.
pub fn as_replace_notification(
&self,
maybe_root_uri: Option<&ModuleSpecifier>,
) -> Result<TestingNotification, AnyError> {
let label = self.label(maybe_root_uri);
Ok(TestingNotification::Module(
lsp_custom::TestModuleNotificationParams {
text_document: lsp::TextDocumentIdentifier {
uri: url_to_uri(&self.specifier)?,
},
kind: lsp_custom::TestModuleNotificationKind::Replace,
label,
tests: self
.defs
.iter()
.filter(|(_, def)| def.parent_id.is_none())
.map(|(id, _)| self.get_test_data(id))
.collect(),
},
))
}
pub fn label(&self, maybe_root_uri: Option<&ModuleSpecifier>) -> String {
if let Some(root) = maybe_root_uri {
self.specifier.as_str().replace(root.as_str(), "")
} else {
self
.specifier
.path_segments()
.and_then(|mut s| s.next_back().map(|s| s.to_string()))
.unwrap_or_else(|| "<unknown>".to_string())
}
}
pub fn is_empty(&self) -> bool {
self.defs.is_empty()
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/testing/server.rs | cli/lsp/testing/server.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap;
use std::collections::HashSet;
use std::sync::Arc;
use std::thread;
use deno_core::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::serde_json::Value;
use deno_core::serde_json::json;
use deno_core::url::Url;
use deno_runtime::tokio_util::create_basic_runtime;
use tokio::sync::mpsc;
use tower_lsp::jsonrpc::Error as LspError;
use tower_lsp::jsonrpc::Result as LspResult;
use tower_lsp::lsp_types as lsp;
use super::definitions::TestModule;
use super::execution::TestRun;
use super::lsp_custom;
use crate::lsp::client::Client;
use crate::lsp::client::TestingNotification;
use crate::lsp::config;
use crate::lsp::language_server::StateSnapshot;
use crate::lsp::performance::Performance;
use crate::lsp::urls::url_to_uri;
fn as_delete_notification(
url: &ModuleSpecifier,
) -> Result<TestingNotification, AnyError> {
Ok(TestingNotification::DeleteModule(
lsp_custom::TestModuleDeleteNotificationParams {
text_document: lsp::TextDocumentIdentifier {
uri: url_to_uri(url)?,
},
},
))
}
pub type TestServerTests =
Arc<tokio::sync::Mutex<HashMap<ModuleSpecifier, (TestModule, String)>>>;
/// The main structure which handles requests and sends notifications related
/// to the Testing API.
#[derive(Debug)]
pub struct TestServer {
client: Client,
performance: Arc<Performance>,
/// A channel for handling run requests from the client
run_channel: mpsc::UnboundedSender<u32>,
/// A map of run ids to test runs
runs: Arc<Mutex<HashMap<u32, TestRun>>>,
/// Tests that are discovered from a versioned document
tests: TestServerTests,
/// A channel for requesting that changes to documents be statically analyzed
/// for tests
update_channel: mpsc::UnboundedSender<Arc<StateSnapshot>>,
}
impl TestServer {
pub fn new(
client: Client,
performance: Arc<Performance>,
maybe_root_url: Option<Arc<Url>>,
) -> Self {
let tests = Default::default();
let (update_channel, mut update_rx) =
mpsc::unbounded_channel::<Arc<StateSnapshot>>();
let (run_channel, mut run_rx) = mpsc::unbounded_channel::<u32>();
let server = Self {
client,
performance,
run_channel,
runs: Default::default(),
tests,
update_channel,
};
let tests = server.tests.clone();
let client = server.client.clone();
let performance = server.performance.clone();
let mru = maybe_root_url.clone();
let _update_join_handle = thread::spawn(move || {
let runtime = create_basic_runtime();
runtime.block_on(async {
loop {
match update_rx.recv().await {
None => break,
Some(snapshot) => {
let mark = performance.mark("lsp.testing_update");
let mut tests = tests.lock().await;
// we create a list of test modules we currently are tracking
// eliminating any we go over when iterating over the document
let mut keys: HashSet<ModuleSpecifier> =
tests.keys().cloned().collect();
for document in snapshot
.document_modules
.documents
.filtered_docs(|d| d.is_file_like() && d.is_diagnosable())
{
let Some(module) =
snapshot.document_modules.primary_module(&document)
else {
continue;
};
if module.specifier.scheme() != "file" {
continue;
}
if !snapshot
.config
.specifier_enabled_for_test(&module.specifier)
{
continue;
}
keys.remove(&module.specifier);
let script_version = document.script_version();
let valid = if let Some((_, old_script_version)) =
tests.get(&module.specifier)
{
old_script_version == &script_version
} else {
false
};
if !valid {
let was_empty = tests
.remove(&module.specifier)
.map(|(tm, _)| tm.is_empty())
.unwrap_or(true);
let test_module = module
.test_module()
.await
.map(|tm| tm.as_ref().clone())
.unwrap_or_else(|| {
TestModule::new(module.specifier.as_ref().clone())
});
if !test_module.is_empty() {
if let Ok(params) =
test_module.as_replace_notification(mru.as_deref())
{
client.send_test_notification(params);
}
} else if !was_empty
&& let Ok(params) =
as_delete_notification(&module.specifier)
{
client.send_test_notification(params);
}
tests.insert(
module.specifier.as_ref().clone(),
(test_module, script_version),
);
}
}
for key in &keys {
if let Ok(params) = as_delete_notification(key) {
client.send_test_notification(params);
}
}
performance.measure(mark);
}
}
}
})
});
let client = server.client.clone();
let runs = server.runs.clone();
let _run_join_handle = thread::spawn(move || {
let runtime = create_basic_runtime();
runtime.block_on(async {
loop {
match run_rx.recv().await {
None => break,
Some(id) => {
let maybe_run = {
let runs = runs.lock();
runs.get(&id).cloned()
};
if let Some(run) = maybe_run {
match run.exec(&client, maybe_root_url.as_deref()).await {
Ok(_) => (),
Err(err) => {
client.show_message(lsp::MessageType::ERROR, err);
}
}
client.send_test_notification(TestingNotification::Progress(
lsp_custom::TestRunProgressParams {
id,
message: lsp_custom::TestRunProgressMessage::End,
},
));
runs.lock().remove(&id);
}
}
}
}
})
});
server
}
fn enqueue_run(&self, id: u32) -> Result<(), AnyError> {
self.run_channel.send(id).map_err(|err| err.into())
}
/// A request from the client to cancel a test run.
pub fn run_cancel_request(
&self,
params: lsp_custom::TestRunCancelParams,
) -> LspResult<Option<Value>> {
if let Some(run) = self.runs.lock().get(¶ms.id) {
run.cancel();
Ok(Some(json!(true)))
} else {
Ok(Some(json!(false)))
}
}
/// A request from the client to start a test run.
pub async fn run_request(
&self,
params: lsp_custom::TestRunRequestParams,
workspace_settings: config::WorkspaceSettings,
) -> LspResult<Option<Value>> {
let test_run =
{ TestRun::init(¶ms, self.tests.clone(), workspace_settings).await };
let enqueued = test_run.as_enqueued().await;
{
let mut runs = self.runs.lock();
runs.insert(params.id, test_run);
}
self.enqueue_run(params.id).map_err(|err| {
log::error!("cannot enqueue run: {}", err);
LspError::internal_error()
})?;
Ok(Some(json!({ "enqueued": enqueued })))
}
pub(crate) fn update(
&self,
snapshot: Arc<StateSnapshot>,
) -> Result<(), AnyError> {
self.update_channel.send(snapshot).map_err(|err| err.into())
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/lsp/testing/execution.rs | cli/lsp/testing/execution.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::HashMap;
use std::collections::HashSet;
use std::sync::Arc;
use std::time::Duration;
use std::time::Instant;
use deno_core::ModuleSpecifier;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::error::JsError;
use deno_core::futures::StreamExt;
use deno_core::futures::future;
use deno_core::futures::stream;
use deno_core::parking_lot::RwLock;
use deno_core::unsync::spawn;
use deno_core::unsync::spawn_blocking;
use deno_runtime::deno_permissions::Permissions;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::tokio_util::create_and_run_current_thread;
use indexmap::IndexMap;
use tokio_util::sync::CancellationToken;
use tower_lsp::lsp_types as lsp;
use super::definitions::TestDefinition;
use super::definitions::TestModule;
use super::lsp_custom;
use super::server::TestServerTests;
use crate::args::DenoSubcommand;
use crate::args::flags_from_vec;
use crate::args::parallelism_count;
use crate::factory::CliFactory;
use crate::lsp::client::Client;
use crate::lsp::client::TestingNotification;
use crate::lsp::config;
use crate::lsp::logging::lsp_log;
use crate::lsp::urls::uri_parse_unencoded;
use crate::lsp::urls::uri_to_url;
use crate::lsp::urls::url_to_uri;
use crate::tools::test;
use crate::tools::test::FailFastTracker;
use crate::tools::test::TestFailure;
use crate::tools::test::TestFailureFormatOptions;
use crate::tools::test::create_test_event_channel;
/// Logic to convert a test request into a set of test modules to be tested and
/// any filters to be applied to those tests
fn as_queue_and_filters(
params: &lsp_custom::TestRunRequestParams,
tests: &HashMap<ModuleSpecifier, (TestModule, String)>,
) -> (
HashSet<ModuleSpecifier>,
HashMap<ModuleSpecifier, LspTestFilter>,
) {
let mut queue: HashSet<ModuleSpecifier> = HashSet::new();
let mut filters: HashMap<ModuleSpecifier, LspTestFilter> = HashMap::new();
if let Some(include) = ¶ms.include {
for item in include {
let url = uri_to_url(&item.text_document.uri);
if let Some((test_definitions, _)) = tests.get(&url) {
queue.insert(url.clone());
if let Some(id) = &item.id
&& let Some(test) = test_definitions.get(id)
{
let filter = filters.entry(url).or_default();
if let Some(include) = filter.include.as_mut() {
include.insert(test.id.clone(), test.clone());
} else {
let mut include = HashMap::new();
include.insert(test.id.clone(), test.clone());
filter.include = Some(include);
}
}
}
}
} else {
queue.extend(tests.keys().cloned());
}
for item in ¶ms.exclude {
let url = uri_to_url(&item.text_document.uri);
if let Some((test_definitions, _)) = tests.get(&url) {
if let Some(id) = &item.id {
// there is no way to exclude a test step
if item.step_id.is_none()
&& let Some(test) = test_definitions.get(id)
{
let filter = filters.entry(url.clone()).or_default();
filter.exclude.insert(test.id.clone(), test.clone());
}
} else {
// the entire test module is excluded
queue.remove(&url);
}
}
}
queue.retain(|s| !tests.get(s).unwrap().0.is_empty());
(queue, filters)
}
fn failure_to_test_message(failure: &TestFailure) -> lsp_custom::TestMessage {
let message = lsp::MarkupContent {
kind: lsp::MarkupKind::PlainText,
value: failure
.format(&TestFailureFormatOptions::default())
.to_string(),
};
let location = failure.error_location().and_then(|v| {
let pos = lsp::Position {
line: v.line_number,
character: v.column_number,
};
// Does not have to match the test URI
// since one can write `Deno.test(importedFunction)`
let uri = uri_parse_unencoded(&v.file_name).ok()?;
Some(lsp::Location {
uri,
range: lsp::Range::new(pos, pos),
})
});
lsp_custom::TestMessage {
message,
expected_output: None,
actual_output: None,
location,
}
}
#[derive(Debug, Clone, Default, PartialEq)]
struct LspTestFilter {
include: Option<HashMap<String, TestDefinition>>,
exclude: HashMap<String, TestDefinition>,
}
impl LspTestFilter {
fn as_ids(&self, test_module: &TestModule) -> Vec<String> {
let ids: Vec<String> = if let Some(include) = &self.include {
include.keys().cloned().collect()
} else {
test_module
.defs
.iter()
.filter(|(_, d)| d.parent_id.is_none())
.map(|(k, _)| k.clone())
.collect()
};
ids
.into_iter()
.filter(|id| !self.exclude.contains_key(id))
.collect()
}
}
#[derive(Debug, Clone)]
pub struct TestRun {
id: u32,
kind: lsp_custom::TestRunKind,
filters: HashMap<ModuleSpecifier, LspTestFilter>,
queue: HashSet<ModuleSpecifier>,
tests: TestServerTests,
token: CancellationToken,
workspace_settings: config::WorkspaceSettings,
}
impl TestRun {
pub async fn init(
params: &lsp_custom::TestRunRequestParams,
tests: TestServerTests,
workspace_settings: config::WorkspaceSettings,
) -> Self {
let (queue, filters) = {
let tests = tests.lock().await;
as_queue_and_filters(params, &tests)
};
Self {
id: params.id,
kind: params.kind.clone(),
filters,
queue,
tests,
token: CancellationToken::new(),
workspace_settings,
}
}
/// Provide the tests of a test run as an enqueued module which can be sent
/// to the client to indicate tests are enqueued for testing.
pub async fn as_enqueued(&self) -> Vec<lsp_custom::EnqueuedTestModule> {
let tests = self.tests.lock().await;
self
.queue
.iter()
.filter_map(|s| {
let ids = if let Some((test_module, _)) = tests.get(s) {
if let Some(filter) = self.filters.get(s) {
filter.as_ids(test_module)
} else {
LspTestFilter::default().as_ids(test_module)
}
} else {
Vec::new()
};
Some(lsp_custom::EnqueuedTestModule {
text_document: lsp::TextDocumentIdentifier {
uri: url_to_uri(s).ok()?,
},
ids,
})
})
.collect()
}
/// If being executed, cancel the test.
pub fn cancel(&self) {
self.token.cancel();
}
/// Execute the tests, dispatching progress notifications to the client.
pub async fn exec(
&self,
client: &Client,
maybe_root_uri: Option<&ModuleSpecifier>,
) -> Result<(), AnyError> {
let args = self.get_args();
lsp_log!("Executing test run with arguments: {}", args.join(" "));
let flags = Arc::new(flags_from_vec(
args.into_iter().map(|s| From::from(s.as_ref())).collect(),
)?);
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let permission_desc_parser = factory.permission_desc_parser()?;
let main_graph_container = factory.main_module_graph_container().await?;
main_graph_container
.check_specifiers(
&self.queue.iter().cloned().collect::<Vec<_>>(),
Default::default(),
)
.await?;
let (concurrent_jobs, fail_fast) =
if let DenoSubcommand::Test(test_flags) = cli_options.sub_command() {
(
parallelism_count(test_flags.parallel).into(),
test_flags.fail_fast,
)
} else {
unreachable!("Should always be Test subcommand.");
};
// TODO(mmastrac): Temporarily limit concurrency in windows testing to avoid named pipe issue:
// *** Unexpected server pipe failure '"\\\\.\\pipe\\deno_pipe_e30f45c9df61b1e4.1198.222\\0"': 3
// This is likely because we're hitting some sort of invisible resource limit
// This limit is both in cli/lsp/testing/execution.rs and cli/tools/test/mod.rs
#[cfg(windows)]
let concurrent_jobs = std::cmp::min(concurrent_jobs, 4);
let (test_event_sender_factory, mut receiver) = create_test_event_channel();
let fail_fast_tracker = FailFastTracker::new(fail_fast);
let mut queue = self.queue.iter().collect::<Vec<&ModuleSpecifier>>();
queue.sort();
let tests: Arc<RwLock<IndexMap<usize, test::TestDescription>>> =
Arc::new(RwLock::new(IndexMap::new()));
let mut test_steps = IndexMap::new();
let worker_factory =
Arc::new(factory.create_cli_main_worker_factory().await?);
let join_handles = queue.into_iter().map(move |specifier| {
let specifier = specifier.clone();
let specifier_dir =
cli_options.workspace().resolve_member_dir(&specifier);
let worker_factory = worker_factory.clone();
let cli_options = cli_options.clone();
let permission_desc_parser = permission_desc_parser.clone();
let worker_sender = test_event_sender_factory.worker();
let fail_fast_tracker = fail_fast_tracker.clone();
let lsp_filter = self.filters.get(&specifier);
let filter = test::TestFilter {
substring: None,
regex: None,
include: lsp_filter.and_then(|f| {
f.include
.as_ref()
.map(|i| i.values().map(|t| t.name.clone()).collect())
}),
exclude: lsp_filter
.map(|f| f.exclude.values().map(|t| t.name.clone()).collect())
.unwrap_or_default(),
};
let token = self.token.clone();
spawn_blocking(move || {
// Various test files should not share the same permissions in terms of
// `PermissionsContainer` - otherwise granting/revoking permissions in one
// file would have impact on other files, which is undesirable.
let permissions =
cli_options.permissions_options_for_dir(&specifier_dir)?;
let permissions_container = PermissionsContainer::new(
permission_desc_parser.clone(),
Permissions::from_options(
permission_desc_parser.as_ref(),
&permissions,
)?,
);
if fail_fast_tracker.should_stop() {
return Ok(());
}
if token.is_cancelled() {
Ok(())
} else {
// All JsErrors are handled by test_specifier and piped into the test
// channel.
create_and_run_current_thread(test::test_specifier(
worker_factory,
permissions_container,
specifier,
// Executing tests in the LSP currently doesn't support preload option
vec![],
// Executing tests in the LSP currently doesn't support require option
vec![],
worker_sender,
fail_fast_tracker,
test::TestSpecifierOptions {
filter,
shuffle: None,
trace_leaks: false,
},
))
}
})
});
let join_stream = stream::iter(join_handles)
.buffer_unordered(concurrent_jobs)
.collect::<Vec<Result<Result<(), AnyError>, tokio::task::JoinError>>>();
let mut reporter = Box::new(LspTestReporter::new(
self,
client.clone(),
maybe_root_uri,
self.tests.clone(),
));
let handler = {
spawn(async move {
let earlier = Instant::now();
let mut summary = test::TestSummary::new();
let mut tests_with_result = HashSet::new();
let mut used_only = false;
while let Some((_, event)) = receiver.recv().await {
match event {
test::TestEvent::Register(description) => {
for (_, description) in description.into_iter() {
reporter.report_register(description).await;
// TODO(mmastrac): we shouldn't need to clone here - we can re-use the descriptions
tests.write().insert(description.id, description.clone());
}
}
test::TestEvent::Plan(plan) => {
summary.total += plan.total;
summary.filtered_out += plan.filtered_out;
if plan.used_only {
used_only = true;
}
reporter.report_plan(&plan);
}
test::TestEvent::Wait(id) => {
reporter.report_wait(tests.read().get(&id).unwrap());
}
test::TestEvent::Output(output) => {
reporter.report_output(&output);
}
test::TestEvent::Slow(id, elapsed) => {
reporter.report_slow(tests.read().get(&id).unwrap(), elapsed);
}
test::TestEvent::Result(id, result, elapsed) => {
if tests_with_result.insert(id) {
let description = tests.read().get(&id).unwrap().clone();
match &result {
test::TestResult::Ok => summary.passed += 1,
test::TestResult::Ignored => summary.ignored += 1,
test::TestResult::Failed(error) => {
summary.failed += 1;
summary
.failures
.push(((&description).into(), error.clone()));
}
test::TestResult::Cancelled => {
summary.failed += 1;
}
}
reporter.report_result(&description, &result, elapsed);
}
}
test::TestEvent::UncaughtError(origin, error) => {
reporter.report_uncaught_error(&origin, &error);
summary.failed += 1;
summary.uncaught_errors.push((origin, error));
}
test::TestEvent::StepRegister(description) => {
reporter.report_step_register(&description).await;
test_steps.insert(description.id, description);
}
test::TestEvent::StepWait(id) => {
reporter.report_step_wait(test_steps.get(&id).unwrap());
}
test::TestEvent::StepResult(id, result, duration) => {
if tests_with_result.insert(id) {
match &result {
test::TestStepResult::Ok => {
summary.passed_steps += 1;
}
test::TestStepResult::Ignored => {
summary.ignored_steps += 1;
}
test::TestStepResult::Failed(_) => {
summary.failed_steps += 1;
}
}
reporter.report_step_result(
test_steps.get(&id).unwrap(),
&result,
duration,
);
}
}
test::TestEvent::Completed => {
reporter.report_completed();
}
test::TestEvent::ForceEndReport => {}
test::TestEvent::Sigint => {}
}
}
let elapsed = Instant::now().duration_since(earlier);
reporter.report_summary(&summary, &elapsed);
if used_only {
return Err(anyhow!(
"Test failed because the \"only\" option was used"
));
}
if summary.failed > 0 {
return Err(anyhow!("Test failed"));
}
Ok(())
})
};
let (join_results, result) = future::join(join_stream, handler).await;
// propagate any errors
for join_result in join_results {
join_result??;
}
result??;
Ok(())
}
fn get_args(&self) -> Vec<Cow<'_, str>> {
let mut args = vec![Cow::Borrowed("deno"), Cow::Borrowed("test")];
args.extend(
self
.workspace_settings
.testing
.args
.iter()
.map(|s| Cow::Borrowed(s.as_str())),
);
args.push(Cow::Borrowed("--trace-leaks"));
for unstable_feature in self.workspace_settings.unstable.as_deref() {
let flag = format!("--unstable-{unstable_feature}");
if !args.contains(&Cow::Borrowed(&flag)) {
args.push(Cow::Owned(flag));
}
}
if let Some(config) = &self.workspace_settings.config
&& !args.contains(&Cow::Borrowed("--config"))
&& !args.contains(&Cow::Borrowed("-c"))
{
args.push(Cow::Borrowed("--config"));
args.push(Cow::Borrowed(config.as_str()));
}
if let Some(import_map) = &self.workspace_settings.import_map
&& !args.contains(&Cow::Borrowed("--import-map"))
{
args.push(Cow::Borrowed("--import-map"));
args.push(Cow::Borrowed(import_map.as_str()));
}
if self.kind == lsp_custom::TestRunKind::Debug
&& !args.contains(&Cow::Borrowed("--inspect"))
&& !args.contains(&Cow::Borrowed("--inspect-brk"))
{
args.push(Cow::Borrowed("--inspect"));
}
args
}
}
#[derive(Debug, PartialEq)]
enum LspTestDescription {
/// `(desc, static_id)`
TestDescription(test::TestDescription, String),
/// `(desc, static_id)`
TestStepDescription(test::TestStepDescription, String),
}
impl LspTestDescription {
fn origin(&self) -> &str {
match self {
LspTestDescription::TestDescription(d, _) => d.origin.as_str(),
LspTestDescription::TestStepDescription(d, _) => d.origin.as_str(),
}
}
fn location(&self) -> &test::TestLocation {
match self {
LspTestDescription::TestDescription(d, _) => &d.location,
LspTestDescription::TestStepDescription(d, _) => &d.location,
}
}
fn parent_id(&self) -> Option<usize> {
match self {
LspTestDescription::TestDescription(_, _) => None,
LspTestDescription::TestStepDescription(d, _) => Some(d.parent_id),
}
}
fn static_id(&self) -> &str {
match self {
LspTestDescription::TestDescription(_, i) => i,
LspTestDescription::TestStepDescription(_, i) => i,
}
}
fn as_test_identifier(
&self,
tests: &IndexMap<usize, LspTestDescription>,
) -> lsp_custom::TestIdentifier {
let mut root_desc = self;
while let Some(parent_id) = root_desc.parent_id() {
root_desc = tests.get(&parent_id).unwrap();
}
let uri = uri_parse_unencoded(&root_desc.location().file_name).unwrap();
let static_id = self.static_id();
let root_static_id = root_desc.static_id();
lsp_custom::TestIdentifier {
text_document: lsp::TextDocumentIdentifier { uri },
id: Some(root_static_id.to_string()),
step_id: if static_id == root_static_id {
None
} else {
Some(static_id.to_string())
},
}
}
}
struct LspTestReporter {
client: Client,
id: u32,
maybe_root_uri: Option<ModuleSpecifier>,
files: TestServerTests,
tests: IndexMap<usize, LspTestDescription>,
current_test: Option<usize>,
}
impl LspTestReporter {
fn new(
run: &TestRun,
client: Client,
maybe_root_uri: Option<&ModuleSpecifier>,
files: TestServerTests,
) -> Self {
Self {
client,
id: run.id,
maybe_root_uri: maybe_root_uri.cloned(),
files,
tests: Default::default(),
current_test: Default::default(),
}
}
fn progress(&self, message: lsp_custom::TestRunProgressMessage) {
self
.client
.send_test_notification(TestingNotification::Progress(
lsp_custom::TestRunProgressParams {
id: self.id,
message,
},
));
}
fn report_plan(&mut self, _plan: &test::TestPlan) {}
async fn report_register(&mut self, desc: &test::TestDescription) {
let mut files = self.files.lock().await;
let specifier = ModuleSpecifier::parse(&desc.location.file_name).unwrap();
let (test_module, _) = files
.entry(specifier.clone())
.or_insert_with(|| (TestModule::new(specifier), "1".to_string()));
let Ok(uri) = url_to_uri(&test_module.specifier) else {
return;
};
let (static_id, is_new) = test_module.register_dynamic(desc);
self.tests.insert(
desc.id,
LspTestDescription::TestDescription(desc.clone(), static_id.clone()),
);
if is_new {
self
.client
.send_test_notification(TestingNotification::Module(
lsp_custom::TestModuleNotificationParams {
text_document: lsp::TextDocumentIdentifier { uri },
kind: lsp_custom::TestModuleNotificationKind::Insert,
label: test_module.label(self.maybe_root_uri.as_ref()),
tests: vec![test_module.get_test_data(&static_id)],
},
));
}
}
fn report_wait(&mut self, desc: &test::TestDescription) {
self.current_test = Some(desc.id);
let desc = self.tests.get(&desc.id).unwrap();
let test = desc.as_test_identifier(&self.tests);
self.progress(lsp_custom::TestRunProgressMessage::Started { test });
}
fn report_slow(&mut self, _desc: &test::TestDescription, _elapsed: u64) {}
fn report_output(&mut self, output: &[u8]) {
let test = self
.current_test
.as_ref()
.map(|id| self.tests.get(id).unwrap().as_test_identifier(&self.tests));
let value = String::from_utf8_lossy(output).replace('\n', "\r\n");
self.progress(lsp_custom::TestRunProgressMessage::Output {
value,
test,
// TODO(@kitsonk) test output should include a location
location: None,
})
}
fn report_result(
&mut self,
desc: &test::TestDescription,
result: &test::TestResult,
elapsed: u64,
) {
self.current_test = None;
match result {
test::TestResult::Ok => {
let desc = self.tests.get(&desc.id).unwrap();
self.progress(lsp_custom::TestRunProgressMessage::Passed {
test: desc.as_test_identifier(&self.tests),
duration: Some(elapsed as u32),
})
}
test::TestResult::Ignored => {
let desc = self.tests.get(&desc.id).unwrap();
self.progress(lsp_custom::TestRunProgressMessage::Skipped {
test: desc.as_test_identifier(&self.tests),
})
}
test::TestResult::Failed(failure) => {
let desc = self.tests.get(&desc.id).unwrap();
self.progress(lsp_custom::TestRunProgressMessage::Failed {
test: desc.as_test_identifier(&self.tests),
messages: vec![failure_to_test_message(failure)],
duration: Some(elapsed as u32),
})
}
test::TestResult::Cancelled => {
let desc = self.tests.get(&desc.id).unwrap();
self.progress(lsp_custom::TestRunProgressMessage::Failed {
test: desc.as_test_identifier(&self.tests),
messages: vec![],
duration: Some(elapsed as u32),
})
}
}
}
fn report_uncaught_error(&mut self, origin: &str, js_error: &JsError) {
self.current_test = None;
let err_string = format!(
"Uncaught error from {}: {}\nThis error was not caught from a test and caused the test runner to fail on the referenced module.\nIt most likely originated from a dangling promise, event/timeout handler or top-level code.",
origin,
test::fmt::format_test_error(
js_error,
&TestFailureFormatOptions::default()
)
);
let messages = vec![lsp_custom::TestMessage {
message: lsp::MarkupContent {
kind: lsp::MarkupKind::PlainText,
value: err_string,
},
expected_output: None,
actual_output: None,
location: None,
}];
for desc in self.tests.values().filter(|d| d.origin() == origin) {
self.progress(lsp_custom::TestRunProgressMessage::Failed {
test: desc.as_test_identifier(&self.tests),
messages: messages.clone(),
duration: None,
});
}
}
async fn report_step_register(&mut self, desc: &test::TestStepDescription) {
let mut files = self.files.lock().await;
let file_name = self
.current_test
.and_then(|i| {
let mut root_desc = self.tests.get(&i)?;
while let Some(parent_id) = root_desc.parent_id() {
root_desc = self.tests.get(&parent_id)?;
}
Some(&root_desc.location().file_name)
})
.unwrap_or(&desc.location.file_name);
let specifier = ModuleSpecifier::parse(file_name).unwrap();
let (test_module, _) = files
.entry(specifier.clone())
.or_insert_with(|| (TestModule::new(specifier), "1".to_string()));
let Ok(uri) = url_to_uri(&test_module.specifier) else {
return;
};
let (static_id, is_new) = test_module.register_step_dynamic(
desc,
self.tests.get(&desc.parent_id).unwrap().static_id(),
);
self.tests.insert(
desc.id,
LspTestDescription::TestStepDescription(desc.clone(), static_id.clone()),
);
if is_new {
self
.client
.send_test_notification(TestingNotification::Module(
lsp_custom::TestModuleNotificationParams {
text_document: lsp::TextDocumentIdentifier { uri },
kind: lsp_custom::TestModuleNotificationKind::Insert,
label: test_module.label(self.maybe_root_uri.as_ref()),
tests: vec![test_module.get_test_data(&static_id)],
},
));
}
}
fn report_step_wait(&mut self, desc: &test::TestStepDescription) {
if self.current_test == Some(desc.parent_id) {
self.current_test = Some(desc.id);
}
let desc = self.tests.get(&desc.id).unwrap();
let test = desc.as_test_identifier(&self.tests);
self.progress(lsp_custom::TestRunProgressMessage::Started { test });
}
fn report_step_result(
&mut self,
desc: &test::TestStepDescription,
result: &test::TestStepResult,
elapsed: u64,
) {
if self.current_test == Some(desc.id) {
self.current_test = Some(desc.parent_id);
}
let desc = self.tests.get(&desc.id).unwrap();
match result {
test::TestStepResult::Ok => {
self.progress(lsp_custom::TestRunProgressMessage::Passed {
test: desc.as_test_identifier(&self.tests),
duration: Some(elapsed as u32),
})
}
test::TestStepResult::Ignored => {
self.progress(lsp_custom::TestRunProgressMessage::Skipped {
test: desc.as_test_identifier(&self.tests),
})
}
test::TestStepResult::Failed(failure) => {
self.progress(lsp_custom::TestRunProgressMessage::Failed {
test: desc.as_test_identifier(&self.tests),
messages: vec![failure_to_test_message(failure)],
duration: Some(elapsed as u32),
})
}
}
}
fn report_completed(&mut self) {
// there is nothing to do on report_completed
}
fn report_summary(
&mut self,
_summary: &test::TestSummary,
_elapsed: &Duration,
) {
// there is nothing to do on report_summary
}
}
#[cfg(test)]
mod tests {
use deno_core::serde_json::json;
use super::*;
use crate::lsp::testing::collectors::tests::new_range;
#[test]
fn test_as_queue_and_filters() {
let specifier = ModuleSpecifier::parse("file:///a/file.ts").unwrap();
// Regression test for https://github.com/denoland/vscode_deno/issues/890.
let non_test_specifier =
ModuleSpecifier::parse("file:///a/no_tests.ts").unwrap();
let params = lsp_custom::TestRunRequestParams {
id: 1,
kind: lsp_custom::TestRunKind::Run,
include: Some(vec![
lsp_custom::TestIdentifier {
text_document: lsp::TextDocumentIdentifier {
uri: url_to_uri(&specifier).unwrap(),
},
id: None,
step_id: None,
},
lsp_custom::TestIdentifier {
text_document: lsp::TextDocumentIdentifier {
uri: url_to_uri(&non_test_specifier).unwrap(),
},
id: None,
step_id: None,
},
]),
exclude: vec![lsp_custom::TestIdentifier {
text_document: lsp::TextDocumentIdentifier {
uri: url_to_uri(&specifier).unwrap(),
},
id: Some(
"69d9fe87f64f5b66cb8b631d4fd2064e8224b8715a049be54276c42189ff8f9f"
.to_string(),
),
step_id: None,
}],
};
let mut tests = HashMap::new();
let test_def_a = TestDefinition {
id: "0b7c6bf3cd617018d33a1bf982a08fe088c5bb54fcd5eb9e802e7c137ec1af94"
.to_string(),
name: "test a".to_string(),
range: Some(new_range(1, 5, 1, 9)),
is_dynamic: false,
parent_id: None,
step_ids: Default::default(),
};
let test_def_b = TestDefinition {
id: "69d9fe87f64f5b66cb8b631d4fd2064e8224b8715a049be54276c42189ff8f9f"
.to_string(),
name: "test b".to_string(),
range: Some(new_range(2, 5, 2, 9)),
is_dynamic: false,
parent_id: None,
step_ids: Default::default(),
};
let test_module = TestModule {
specifier: specifier.clone(),
defs: vec![
(test_def_a.id.clone(), test_def_a.clone()),
(test_def_b.id.clone(), test_def_b.clone()),
]
.into_iter()
.collect(),
};
tests.insert(specifier.clone(), (test_module.clone(), "1".to_string()));
tests.insert(
non_test_specifier.clone(),
(TestModule::new(non_test_specifier), "1".to_string()),
);
let (queue, filters) = as_queue_and_filters(¶ms, &tests);
assert_eq!(json!(queue), json!([specifier]));
let mut exclude = HashMap::new();
exclude.insert(
"69d9fe87f64f5b66cb8b631d4fd2064e8224b8715a049be54276c42189ff8f9f"
.to_string(),
test_def_b,
);
let maybe_filter = filters.get(&specifier);
assert!(maybe_filter.is_some());
let filter = maybe_filter.unwrap();
assert_eq!(
filter,
&LspTestFilter {
include: None,
exclude,
}
);
assert_eq!(
filter.as_ids(&test_module),
vec![
"0b7c6bf3cd617018d33a1bf982a08fe088c5bb54fcd5eb9e802e7c137ec1af94"
.to_string()
]
);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/serve.rs | cli/tools/serve.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::num::NonZeroUsize;
use std::str::FromStr;
use std::sync::Arc;
use deno_core::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_core::futures::TryFutureExt;
use deno_lib::worker::LibWorkerFactoryRoots;
use deno_runtime::UnconfiguredRuntime;
use super::run::check_permission_before_script;
use super::run::maybe_npm_install;
use crate::args::Flags;
use crate::args::ServeFlags;
use crate::args::WatchFlagsWithPaths;
use crate::args::WorkspaceMainModuleResolver;
use crate::args::parallelism_count;
use crate::factory::CliFactory;
use crate::util::file_watcher::WatcherRestartMode;
use crate::worker::CliMainWorkerFactory;
pub async fn serve(
flags: Arc<Flags>,
serve_flags: ServeFlags,
unconfigured_runtime: Option<UnconfiguredRuntime>,
roots: LibWorkerFactoryRoots,
) -> Result<i32, AnyError> {
check_permission_before_script(&flags);
if let Some(watch_flags) = serve_flags.watch {
return serve_with_watch(
flags,
watch_flags,
parallelism_count(serve_flags.parallel),
)
.await;
}
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let deno_dir = factory.deno_dir()?;
let http_client = factory.http_client_provider();
// Run a background task that checks for available upgrades or output
// if an earlier run of this background task found a new version of Deno.
#[cfg(feature = "upgrade")]
super::upgrade::check_for_upgrades(
http_client.clone(),
deno_dir.upgrade_check_file_path(),
);
let workspace_resolver = factory.workspace_resolver().await?.clone();
let node_resolver = factory.node_resolver().await?.clone();
let main_module = cli_options.resolve_main_module_with_resolver(Some(
&WorkspaceMainModuleResolver::new(workspace_resolver, node_resolver),
))?;
maybe_npm_install(&factory).await?;
let worker_factory = Arc::new(
factory
.create_cli_main_worker_factory_with_roots(roots)
.await?,
);
if serve_flags.open_site {
let url = resolve_serve_url(serve_flags.host, serve_flags.port);
let _ = open::that_detached(url);
}
let hmr = serve_flags
.watch
.map(|watch_flags| watch_flags.hmr)
.unwrap_or(false);
do_serve(
worker_factory,
main_module.clone(),
parallelism_count(serve_flags.parallel),
hmr,
unconfigured_runtime,
)
.await
}
async fn do_serve(
worker_factory: Arc<CliMainWorkerFactory>,
main_module: ModuleSpecifier,
parallelism_count: NonZeroUsize,
hmr: bool,
unconfigured_runtime: Option<UnconfiguredRuntime>,
) -> Result<i32, AnyError> {
let worker_count = parallelism_count.get() - 1;
let mut worker = worker_factory
.create_main_worker_with_unconfigured_runtime(
deno_runtime::WorkerExecutionMode::ServeMain { worker_count },
main_module.clone(),
// TODO(bartlomieju):
vec![],
vec![],
unconfigured_runtime,
)
.await?;
let worker_count = match worker_count {
0 => return worker.run().await.map_err(Into::into),
c => c,
};
let main = deno_core::unsync::spawn(async move { worker.run().await });
let mut channels = Vec::with_capacity(worker_count);
for i in 0..worker_count {
let worker_factory = worker_factory.clone();
let main_module = main_module.clone();
let (tx, rx) = tokio::sync::oneshot::channel();
channels.push(rx);
std::thread::Builder::new()
.name(format!("serve-worker-{}", i + 1))
.spawn(move || {
deno_runtime::tokio_util::create_and_run_current_thread(async move {
let result = run_worker(i, worker_factory, main_module, hmr).await;
let _ = tx.send(result);
});
})?;
}
let (main_result, worker_results) = tokio::try_join!(
main.map_err(AnyError::from),
deno_core::futures::future::try_join_all(
channels.into_iter().map(|r| r.map_err(AnyError::from))
)
)?;
let mut exit_code = main_result?;
for res in worker_results {
let ret = res?;
if ret != 0 && exit_code == 0 {
exit_code = ret;
}
}
Ok(exit_code)
}
async fn run_worker(
worker_index: usize,
worker_factory: Arc<CliMainWorkerFactory>,
main_module: ModuleSpecifier,
hmr: bool,
) -> Result<i32, AnyError> {
let mut worker: crate::worker::CliMainWorker = worker_factory
.create_main_worker(
deno_runtime::WorkerExecutionMode::ServeWorker { worker_index },
main_module,
// TODO(bartlomieju):
vec![],
vec![],
)
.await?;
if hmr {
worker.run_for_watcher().await?;
Ok(0)
} else {
worker.run().await.map_err(Into::into)
}
}
async fn serve_with_watch(
flags: Arc<Flags>,
watch_flags: WatchFlagsWithPaths,
parallelism_count: NonZeroUsize,
) -> Result<i32, AnyError> {
let hmr = watch_flags.hmr;
crate::util::file_watcher::watch_recv(
flags,
crate::util::file_watcher::PrintConfig::new_with_banner(
if watch_flags.hmr { "HMR" } else { "Watcher" },
"Process",
!watch_flags.no_clear_screen,
),
WatcherRestartMode::Automatic,
move |flags, watcher_communicator, changed_paths| {
watcher_communicator.show_path_changed(changed_paths.clone());
Ok(async move {
let factory = CliFactory::from_flags_for_watcher(
flags,
watcher_communicator.clone(),
);
let cli_options = factory.cli_options()?;
let main_module = cli_options.resolve_main_module()?;
maybe_npm_install(&factory).await?;
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
let worker_factory =
Arc::new(factory.create_cli_main_worker_factory().await?);
do_serve(
worker_factory,
main_module.clone(),
parallelism_count,
hmr,
None,
)
.await?;
Ok(())
})
},
)
.boxed_local()
.await?;
Ok(0)
}
fn resolve_serve_url(host: String, port: u16) -> String {
let host = if matches!(host.as_str(), "0.0.0.0" | "::") {
"127.0.0.1".to_string()
} else if std::net::Ipv6Addr::from_str(&host).is_ok() {
format!("[{}]", host)
} else {
host
};
if port == 80 {
format!("http://{host}/")
} else {
format!("http://{host}:{port}/")
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_resolve_serve_url() {
assert_eq!(
resolve_serve_url("localhost".to_string(), 80),
"http://localhost/"
);
assert_eq!(
resolve_serve_url("0.0.0.0".to_string(), 80),
"http://127.0.0.1/"
);
assert_eq!(resolve_serve_url("::".to_string(), 80), "http://127.0.0.1/");
assert_eq!(
resolve_serve_url("::".to_string(), 90),
"http://127.0.0.1:90/"
);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/compile.rs | cli/tools/compile.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashSet;
use std::collections::VecDeque;
use std::io::Write as _;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::Context;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_graph::GraphKind;
use deno_npm_installer::graph::NpmCachingStrategy;
use deno_path_util::resolve_url_or_path;
use deno_path_util::url_from_file_path;
use deno_path_util::url_to_file_path;
use deno_terminal::colors;
use rand::Rng;
use super::installer::BinNameResolver;
use crate::args::CliOptions;
use crate::args::CompileFlags;
use crate::args::Flags;
use crate::factory::CliFactory;
use crate::standalone::binary::WriteBinOptions;
use crate::standalone::binary::is_standalone_binary;
pub async fn compile(
flags: Arc<Flags>,
compile_flags: CompileFlags,
) -> Result<(), AnyError> {
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let module_graph_creator = factory.module_graph_creator().await?;
let binary_writer = factory.create_compile_binary_writer().await?;
let entrypoint = cli_options.resolve_main_module()?;
let bin_name_resolver = factory.bin_name_resolver()?;
let output_path = resolve_compile_executable_output_path(
&bin_name_resolver,
&compile_flags,
cli_options.initial_cwd(),
)
.await?;
let (module_roots, include_paths) = get_module_roots_and_include_paths(
entrypoint,
&compile_flags,
cli_options,
)?;
let graph = Arc::try_unwrap(
module_graph_creator
.create_graph_and_maybe_check(module_roots.clone())
.await?,
)
.unwrap();
let graph = if cli_options.type_check_mode().is_true() {
// In this case, the previous graph creation did type checking, which will
// create a module graph with types information in it. We don't want to
// store that in the binary so create a code only module graph from scratch.
module_graph_creator
.create_graph(
GraphKind::CodeOnly,
module_roots,
NpmCachingStrategy::Eager,
)
.await?
} else {
graph
};
let initial_cwd =
deno_path_util::url_from_directory_path(cli_options.initial_cwd())?;
log::info!(
"{} {} to {}",
colors::green("Compile"),
crate::util::path::relative_specifier_path_for_display(
&initial_cwd,
entrypoint
),
{
if let Ok(output_path) = deno_path_util::url_from_file_path(&output_path)
{
crate::util::path::relative_specifier_path_for_display(
&initial_cwd,
&output_path,
)
} else {
output_path.display().to_string()
}
}
);
validate_output_path(&output_path)?;
let mut temp_filename = output_path.file_name().unwrap().to_owned();
temp_filename.push(format!(
".tmp-{}",
faster_hex::hex_encode(
&rand::thread_rng().r#gen::<[u8; 8]>(),
&mut [0u8; 16]
)
.unwrap()
));
let temp_path = output_path.with_file_name(temp_filename);
let file = std::fs::File::create(&temp_path).with_context(|| {
format!("Opening temporary file '{}'", temp_path.display())
})?;
let write_result = binary_writer
.write_bin(WriteBinOptions {
writer: file,
display_output_filename: &output_path
.file_name()
.unwrap()
.to_string_lossy(),
graph: &graph,
entrypoint,
include_paths: &include_paths,
exclude_paths: compile_flags
.exclude
.iter()
.map(|p| cli_options.initial_cwd().join(p))
.chain(std::iter::once(
cli_options.initial_cwd().join(&output_path),
))
.chain(std::iter::once(cli_options.initial_cwd().join(&temp_path)))
.collect(),
compile_flags: &compile_flags,
})
.await
.with_context(|| {
format!(
"Writing deno compile executable to temporary file '{}'",
temp_path.display()
)
});
// set it as executable
#[cfg(unix)]
let write_result = write_result.and_then(|_| {
use std::os::unix::fs::PermissionsExt;
let perms = std::fs::Permissions::from_mode(0o755);
std::fs::set_permissions(&temp_path, perms).with_context(|| {
format!(
"Setting permissions on temporary file '{}'",
temp_path.display()
)
})
});
let write_result = write_result.and_then(|_| {
std::fs::rename(&temp_path, &output_path).with_context(|| {
format!(
"Renaming temporary file '{}' to '{}'",
temp_path.display(),
output_path.display()
)
})
});
if let Err(err) = write_result {
// errored, so attempt to remove the temporary file
let _ = std::fs::remove_file(temp_path);
return Err(err);
}
Ok(())
}
pub async fn compile_eszip(
flags: Arc<Flags>,
compile_flags: CompileFlags,
) -> Result<(), AnyError> {
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let module_graph_creator = factory.module_graph_creator().await?;
let parsed_source_cache = factory.parsed_source_cache()?;
let compiler_options_resolver = factory.compiler_options_resolver()?;
let bin_name_resolver = factory.bin_name_resolver()?;
let entrypoint = cli_options.resolve_main_module()?;
let mut output_path = resolve_compile_executable_output_path(
&bin_name_resolver,
&compile_flags,
cli_options.initial_cwd(),
)
.await?;
output_path.set_extension("eszip");
let maybe_import_map_specifier =
cli_options.resolve_specified_import_map_specifier()?;
let (module_roots, _include_paths) = get_module_roots_and_include_paths(
entrypoint,
&compile_flags,
cli_options,
)?;
let graph = Arc::try_unwrap(
module_graph_creator
.create_graph_and_maybe_check(module_roots.clone())
.await?,
)
.unwrap();
let graph = if cli_options.type_check_mode().is_true() {
// In this case, the previous graph creation did type checking, which will
// create a module graph with types information in it. We don't want to
// store that in the binary so create a code only module graph from scratch.
module_graph_creator
.create_graph(
GraphKind::CodeOnly,
module_roots,
NpmCachingStrategy::Eager,
)
.await?
} else {
graph
};
let transpile_and_emit_options = compiler_options_resolver
.for_specifier(cli_options.workspace().root_dir_url())
.transpile_options()?;
let transpile_options = transpile_and_emit_options.transpile.clone();
let emit_options = transpile_and_emit_options.emit.clone();
let parser = parsed_source_cache.as_capturing_parser();
let root_dir_url = cli_options.workspace().root_dir_url();
log::debug!("Binary root dir: {}", root_dir_url);
let relative_file_base = eszip::EszipRelativeFileBaseUrl::new(root_dir_url);
let mut eszip = eszip::EszipV2::from_graph(eszip::FromGraphOptions {
graph,
parser,
transpile_options,
emit_options,
relative_file_base: Some(relative_file_base),
npm_packages: None,
module_kind_resolver: Default::default(),
npm_snapshot: Default::default(),
})?;
if let Some(import_map_specifier) = maybe_import_map_specifier {
let import_map_path = import_map_specifier.to_file_path().unwrap();
let import_map_content = std::fs::read_to_string(&import_map_path)
.with_context(|| {
format!("Failed to read import map: {:?}", import_map_path)
})?;
let import_map_specifier_str = if let Some(relative_import_map_specifier) =
root_dir_url.make_relative(&import_map_specifier)
{
relative_import_map_specifier
} else {
import_map_specifier.to_string()
};
eszip.add_import_map(
eszip::ModuleKind::Json,
import_map_specifier_str,
import_map_content.as_bytes().to_vec().into(),
);
}
log::info!(
"{} {} to {}",
colors::green("Compile"),
entrypoint,
output_path.display(),
);
validate_output_path(&output_path)?;
let mut file = std::fs::File::create(&output_path).with_context(|| {
format!("Opening ESZip file '{}'", output_path.display())
})?;
let write_result = {
let r = file.write_all(&eszip.into_bytes());
drop(file);
r
};
if let Err(err) = write_result {
let _ = std::fs::remove_file(output_path);
return Err(err.into());
}
Ok(())
}
/// This function writes out a final binary to specified path. If output path
/// is not already standalone binary it will return error instead.
fn validate_output_path(output_path: &Path) -> Result<(), AnyError> {
if output_path.exists() {
// If the output is a directory, throw error
if output_path.is_dir() {
bail!(
concat!(
"Could not compile to file '{}' because a directory exists with ",
"the same name. You can use the `--output <file-path>` flag to ",
"provide an alternative name."
),
output_path.display()
);
}
// Make sure we don't overwrite any file not created by Deno compiler because
// this filename is chosen automatically in some cases.
if !is_standalone_binary(output_path) {
bail!(
concat!(
"Could not compile to file '{}' because the file already exists ",
"and cannot be overwritten. Please delete the existing file or ",
"use the `--output <file-path>` flag to provide an alternative name."
),
output_path.display()
);
}
// Remove file if it was indeed a deno compiled binary, to avoid corruption
// (see https://github.com/denoland/deno/issues/10310)
std::fs::remove_file(output_path)?;
} else {
let output_base = &output_path.parent().unwrap();
if output_base.exists() && output_base.is_file() {
bail!(
concat!(
"Could not compile to file '{}' because its parent directory ",
"is an existing file. You can use the `--output <file-path>` flag to ",
"provide an alternative name.",
),
output_base.display(),
);
}
std::fs::create_dir_all(output_base)?;
}
Ok(())
}
fn get_module_roots_and_include_paths(
entrypoint: &ModuleSpecifier,
compile_flags: &CompileFlags,
cli_options: &Arc<CliOptions>,
) -> Result<(Vec<ModuleSpecifier>, Vec<ModuleSpecifier>), AnyError> {
let initial_cwd = cli_options.initial_cwd();
fn is_module_graph_module(url: &ModuleSpecifier) -> bool {
if url.scheme() != "file" {
return true;
}
is_module_graph_media_type(MediaType::from_specifier(url))
}
fn is_module_graph_media_type(media_type: MediaType) -> bool {
match media_type {
MediaType::JavaScript
| MediaType::Jsx
| MediaType::Mjs
| MediaType::Cjs
| MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Dts
| MediaType::Dmts
| MediaType::Dcts
| MediaType::Tsx
| MediaType::Json
| MediaType::Wasm => true,
MediaType::Css
| MediaType::Html
| MediaType::Jsonc
| MediaType::Json5
| MediaType::SourceMap
| MediaType::Sql
| MediaType::Unknown => false,
}
}
fn analyze_path(
url: &ModuleSpecifier,
excluded_paths: &HashSet<PathBuf>,
searched_paths: &mut HashSet<PathBuf>,
mut add_path: impl FnMut(&Path),
) -> Result<(), AnyError> {
let Ok(path) = url_to_file_path(url) else {
return Ok(());
};
let mut pending = VecDeque::from([path]);
while let Some(path) = pending.pop_front() {
if !searched_paths.insert(path.clone()) {
continue;
}
if excluded_paths.contains(&path) {
continue;
}
if !path.is_dir() {
add_path(&path);
continue;
}
for entry in std::fs::read_dir(&path).with_context(|| {
format!("Failed reading directory '{}'", path.display())
})? {
let entry = entry.with_context(|| {
format!("Failed reading entry in directory '{}'", path.display())
})?;
pending.push_back(entry.path());
}
}
Ok(())
}
let mut searched_paths = HashSet::new();
let mut module_roots = Vec::new();
let mut include_paths = Vec::new();
let exclude_set = compile_flags
.exclude
.iter()
.map(|path| initial_cwd.join(path))
.collect::<HashSet<_>>();
module_roots.push(entrypoint.clone());
for side_module in &compile_flags.include {
let url = resolve_url_or_path(side_module, initial_cwd)?;
if is_module_graph_module(&url) {
module_roots.push(url.clone());
} else {
analyze_path(&url, &exclude_set, &mut searched_paths, |file_path| {
let media_type = MediaType::from_path(file_path);
if is_module_graph_media_type(media_type)
&& let Ok(file_url) = url_from_file_path(file_path)
{
module_roots.push(file_url);
}
})?;
}
if url.scheme() == "file" {
include_paths.push(url);
}
}
for preload_module in cli_options.preload_modules()? {
module_roots.push(preload_module);
}
for require_module in cli_options.require_modules()? {
module_roots.push(require_module);
}
Ok((module_roots, include_paths))
}
async fn resolve_compile_executable_output_path(
bin_name_resolver: &BinNameResolver<'_>,
compile_flags: &CompileFlags,
current_dir: &Path,
) -> Result<PathBuf, AnyError> {
let module_specifier =
resolve_url_or_path(&compile_flags.source_file, current_dir)?;
let output_flag = compile_flags.output.clone();
let mut output_path = if let Some(out) = output_flag.as_ref() {
let mut out_path = PathBuf::from(out);
if out.ends_with('/') || out.ends_with('\\') {
if let Some(infer_file_name) = bin_name_resolver
.infer_name_from_url(&module_specifier)
.await
.map(PathBuf::from)
{
out_path = out_path.join(infer_file_name);
}
} else {
out_path = out_path.to_path_buf();
}
Some(out_path)
} else {
None
};
if output_flag.is_none() {
output_path = bin_name_resolver
.infer_name_from_url(&module_specifier)
.await
.map(PathBuf::from)
}
output_path.ok_or_else(|| anyhow!(
"An executable name was not provided. One could not be inferred from the URL. Aborting.",
)).map(|output_path| {
get_os_specific_filepath(output_path, &compile_flags.target)
})
}
fn get_os_specific_filepath(
output: PathBuf,
target: &Option<String>,
) -> PathBuf {
let is_windows = match target {
Some(target) => target.contains("windows"),
None => cfg!(windows),
};
if is_windows && output.extension().unwrap_or_default() != "exe" {
if let Some(ext) = output.extension() {
// keep version in my-exe-0.1.0 -> my-exe-0.1.0.exe
output.with_extension(format!("{}.exe", ext.to_string_lossy()))
} else {
output.with_extension("exe")
}
} else {
output
}
}
#[cfg(test)]
mod test {
use deno_npm::registry::TestNpmRegistryApi;
use deno_npm::resolution::NpmVersionResolver;
pub use super::*;
use crate::http_util::HttpClientProvider;
#[tokio::test]
async fn resolve_compile_executable_output_path_target_linux() {
let http_client = HttpClientProvider::new(None, None);
let npm_api = TestNpmRegistryApi::default();
let npm_version_resolver = NpmVersionResolver::default();
let bin_name_resolver =
BinNameResolver::new(&http_client, &npm_api, &npm_version_resolver);
let path = resolve_compile_executable_output_path(
&bin_name_resolver,
&CompileFlags {
source_file: "mod.ts".to_string(),
output: Some(String::from("./file")),
args: Vec::new(),
target: Some("x86_64-unknown-linux-gnu".to_string()),
no_terminal: false,
icon: None,
include: Default::default(),
exclude: Default::default(),
eszip: true,
},
&std::env::current_dir().unwrap(),
)
.await
.unwrap();
// no extension, no matter what the operating system is
// because the target was specified as linux
// https://github.com/denoland/deno/issues/9667
assert_eq!(path.file_name().unwrap(), "file");
}
#[tokio::test]
async fn resolve_compile_executable_output_path_target_windows() {
let http_client = HttpClientProvider::new(None, None);
let npm_api = TestNpmRegistryApi::default();
let npm_version_resolver = NpmVersionResolver::default();
let bin_name_resolver =
BinNameResolver::new(&http_client, &npm_api, &npm_version_resolver);
let path = resolve_compile_executable_output_path(
&bin_name_resolver,
&CompileFlags {
source_file: "mod.ts".to_string(),
output: Some(String::from("./file")),
args: Vec::new(),
target: Some("x86_64-pc-windows-msvc".to_string()),
include: Default::default(),
exclude: Default::default(),
icon: None,
no_terminal: false,
eszip: true,
},
&std::env::current_dir().unwrap(),
)
.await
.unwrap();
assert_eq!(path.file_name().unwrap(), "file.exe");
}
#[test]
fn test_os_specific_file_path() {
fn run_test(path: &str, target: Option<&str>, expected: &str) {
assert_eq!(
get_os_specific_filepath(
PathBuf::from(path),
&target.map(|s| s.to_string())
),
PathBuf::from(expected)
);
}
if cfg!(windows) {
run_test("C:\\my-exe", None, "C:\\my-exe.exe");
run_test("C:\\my-exe.exe", None, "C:\\my-exe.exe");
run_test("C:\\my-exe-0.1.2", None, "C:\\my-exe-0.1.2.exe");
} else {
run_test("my-exe", Some("linux"), "my-exe");
run_test("my-exe-0.1.2", Some("linux"), "my-exe-0.1.2");
}
run_test("C:\\my-exe", Some("windows"), "C:\\my-exe.exe");
run_test("C:\\my-exe.exe", Some("windows"), "C:\\my-exe.exe");
run_test("C:\\my-exe.0.1.2", Some("windows"), "C:\\my-exe.0.1.2.exe");
run_test("my-exe-0.1.2", Some("linux"), "my-exe-0.1.2");
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/doc.rs | cli/tools/doc.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::BTreeMap;
use std::rc::Rc;
use std::sync::Arc;
use deno_ast::diagnostics::Diagnostic;
use deno_config::glob::FilePatterns;
use deno_config::glob::PathOrPatternSet;
use deno_core::anyhow::Context;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_doc as doc;
use deno_doc::html::UrlResolveKind;
use deno_doc::html::UsageComposer;
use deno_doc::html::UsageComposerEntry;
use deno_graph::CheckJsOption;
use deno_graph::GraphKind;
use deno_graph::ModuleSpecifier;
use deno_graph::analysis::ModuleAnalyzer;
use deno_graph::ast::EsParser;
use deno_graph::source::NullFileSystem;
use deno_lib::version::DENO_VERSION_INFO;
use deno_npm_installer::graph::NpmCachingStrategy;
use doc::DocDiagnostic;
use doc::html::ShortPath;
use indexmap::IndexMap;
use crate::args::DocFlags;
use crate::args::DocHtmlFlag;
use crate::args::DocSourceFileFlag;
use crate::args::Flags;
use crate::colors;
use crate::display;
use crate::factory::CliFactory;
use crate::graph_util::GraphWalkErrorsOptions;
use crate::graph_util::graph_exit_integrity_errors;
use crate::graph_util::graph_walk_errors;
use crate::sys::CliSys;
use crate::tsc::get_types_declaration_file_text;
use crate::util::fs::CollectSpecifiersOptions;
use crate::util::fs::collect_specifiers;
const JSON_SCHEMA_VERSION: u8 = 1;
const PRISM_CSS: &str = include_str!("./doc/prism.css");
const PRISM_JS: &str = include_str!("./doc/prism.js");
async fn generate_doc_nodes_for_builtin_types(
doc_flags: DocFlags,
parser: &dyn EsParser,
analyzer: &dyn ModuleAnalyzer,
) -> Result<IndexMap<ModuleSpecifier, Vec<doc::DocNode>>, AnyError> {
let source_file_specifier =
ModuleSpecifier::parse("file:///lib.deno.d.ts").unwrap();
let content = get_types_declaration_file_text();
let loader = deno_graph::source::MemoryLoader::new(
vec![(
source_file_specifier.to_string(),
deno_graph::source::Source::Module {
specifier: source_file_specifier.to_string(),
content,
maybe_headers: None,
},
)],
Vec::new(),
);
let roots = vec![source_file_specifier.clone()];
let mut graph = deno_graph::ModuleGraph::new(GraphKind::TypesOnly);
graph
.build(
roots.clone(),
Vec::new(),
&loader,
deno_graph::BuildOptions {
is_dynamic: false,
skip_dynamic_deps: false,
passthrough_jsr_specifiers: false,
executor: Default::default(),
file_system: &NullFileSystem,
jsr_metadata_store: None,
jsr_url_provider: Default::default(),
jsr_version_resolver: Default::default(),
locker: None,
module_analyzer: analyzer,
module_info_cacher: Default::default(),
npm_resolver: None,
reporter: None,
resolver: None,
unstable_bytes_imports: false,
unstable_text_imports: false,
},
)
.await;
let doc_parser = doc::DocParser::new(
&graph,
parser,
&roots,
doc::DocParserOptions {
diagnostics: false,
private: doc_flags.private,
},
)?;
Ok(doc_parser.parse()?)
}
pub async fn doc(
flags: Arc<Flags>,
doc_flags: DocFlags,
) -> Result<(), AnyError> {
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let module_info_cache = factory.module_info_cache()?;
let parsed_source_cache = factory.parsed_source_cache()?;
let capturing_parser = parsed_source_cache.as_capturing_parser();
let analyzer = module_info_cache.as_module_analyzer();
let doc_nodes_by_url = match doc_flags.source_files {
DocSourceFileFlag::Builtin => {
generate_doc_nodes_for_builtin_types(
doc_flags.clone(),
&capturing_parser,
&analyzer,
)
.await?
}
DocSourceFileFlag::Paths(ref source_files) => {
let module_graph_creator = factory.module_graph_creator().await?;
let sys = CliSys::default();
let module_specifiers = collect_specifiers(
CollectSpecifiersOptions {
file_patterns: FilePatterns {
base: cli_options.initial_cwd().to_path_buf(),
include: Some(
PathOrPatternSet::from_include_relative_path_or_patterns(
cli_options.initial_cwd(),
source_files,
)?,
),
exclude: Default::default(),
},
vendor_folder: cli_options.vendor_dir_path().map(ToOwned::to_owned),
include_ignored_specified: false,
},
|_| true,
)?;
let graph = module_graph_creator
.create_graph(
GraphKind::TypesOnly,
module_specifiers.clone(),
NpmCachingStrategy::Eager,
)
.await?;
graph_exit_integrity_errors(&graph);
let errors = graph_walk_errors(
&graph,
&sys,
&module_specifiers,
GraphWalkErrorsOptions {
check_js: CheckJsOption::False,
kind: GraphKind::TypesOnly,
will_type_check: false,
allow_unknown_media_types: false,
allow_unknown_jsr_exports: false,
},
);
for error in errors {
log::warn!("{} {}", colors::yellow("Warning"), error);
}
let doc_parser = doc::DocParser::new(
&graph,
&capturing_parser,
&module_specifiers,
doc::DocParserOptions {
private: doc_flags.private,
diagnostics: doc_flags.lint,
},
)?;
let doc_nodes_by_url = doc_parser.parse()?;
if doc_flags.lint {
let diagnostics = doc_parser.take_diagnostics();
check_diagnostics(&diagnostics)?;
}
doc_nodes_by_url
}
};
if let Some(html_options) = &doc_flags.html {
let deno_ns = if doc_flags.source_files != DocSourceFileFlag::Builtin {
let deno_ns = generate_doc_nodes_for_builtin_types(
doc_flags.clone(),
&capturing_parser,
&analyzer,
)
.await?;
let (_, deno_ns) = deno_ns.into_iter().next().unwrap();
Some(deno_ns)
} else {
None
};
let mut main_entrypoint = None;
let rewrite_map = if let Some(config_file) =
cli_options.start_dir.member_or_root_deno_json()
{
let config = config_file.to_exports_config()?;
main_entrypoint = config.get_resolved(".").ok().flatten();
let rewrite_map = config
.clone()
.into_map()
.into_keys()
.map(|key| {
Ok((
config.get_resolved(&key)?.unwrap(),
key
.strip_prefix('.')
.unwrap_or(&key)
.strip_prefix('/')
.unwrap_or(&key)
.to_owned(),
))
})
.collect::<Result<IndexMap<_, _>, AnyError>>()?;
Some(rewrite_map)
} else {
None
};
generate_docs_directory(
doc_nodes_by_url,
html_options,
deno_ns,
rewrite_map,
main_entrypoint,
)
} else {
let modules_len = doc_nodes_by_url.len();
let doc_nodes =
doc_nodes_by_url.into_values().flatten().collect::<Vec<_>>();
if doc_flags.json {
let json_output = serde_json::json!({
"version": JSON_SCHEMA_VERSION,
"nodes": &doc_nodes
});
display::write_json_to_stdout(&json_output)
} else if doc_flags.lint {
// don't output docs if running with only the --lint flag
log::info!(
"Checked {} file{}",
modules_len,
if modules_len == 1 { "" } else { "s" }
);
Ok(())
} else {
print_docs_to_stdout(doc_flags, doc_nodes)
}
}
}
struct DocResolver {
deno_ns: std::collections::HashMap<Vec<String>, Option<Rc<ShortPath>>>,
strip_trailing_html: bool,
}
impl deno_doc::html::HrefResolver for DocResolver {
fn resolve_path(
&self,
current: UrlResolveKind,
target: UrlResolveKind,
) -> String {
let path = deno_doc::html::href_path_resolve(current, target);
if self.strip_trailing_html
&& let Some(path) = path
.strip_suffix("index.html")
.or_else(|| path.strip_suffix(".html"))
{
return path.to_owned();
}
path
}
fn resolve_global_symbol(&self, symbol: &[String]) -> Option<String> {
if self.deno_ns.contains_key(symbol) {
Some(format!(
"https://deno.land/api@v{}?s={}",
DENO_VERSION_INFO.deno,
symbol.join(".")
))
} else {
None
}
}
fn resolve_import_href(
&self,
symbol: &[String],
src: &str,
) -> Option<String> {
let mut url = ModuleSpecifier::parse(src).ok()?;
if url.domain() == Some("deno.land") {
url.set_query(Some(&format!("s={}", symbol.join("."))));
return Some(url.into());
}
None
}
fn resolve_source(&self, location: &deno_doc::Location) -> Option<String> {
Some(location.filename.to_string())
}
fn resolve_external_jsdoc_module(
&self,
module: &str,
_symbol: Option<&str>,
) -> Option<(String, String)> {
if let Ok(url) = deno_core::url::Url::parse(module) {
match url.scheme() {
"npm" => {
let res =
deno_semver::npm::NpmPackageReqReference::from_str(module).ok()?;
let name = &res.req().name;
Some((
format!("https://www.npmjs.com/package/{name}"),
name.to_string(),
))
}
"jsr" => {
let res =
deno_semver::jsr::JsrPackageReqReference::from_str(module).ok()?;
let name = &res.req().name;
Some((format!("https://jsr.io/{name}"), name.to_string()))
}
_ => None,
}
} else {
None
}
}
}
struct DocComposer;
impl UsageComposer for DocComposer {
fn is_single_mode(&self) -> bool {
true
}
fn compose(
&self,
current_resolve: UrlResolveKind,
usage_to_md: deno_doc::html::UsageToMd,
) -> IndexMap<UsageComposerEntry, String> {
current_resolve
.get_file()
.map(|current_file| {
IndexMap::from([(
UsageComposerEntry {
name: "".to_string(),
icon: None,
},
usage_to_md(current_file.path.as_str(), None),
)])
})
.unwrap_or_default()
}
}
fn generate_docs_directory(
doc_nodes_by_url: IndexMap<ModuleSpecifier, Vec<doc::DocNode>>,
html_options: &DocHtmlFlag,
built_in_types: Option<Vec<doc::DocNode>>,
rewrite_map: Option<IndexMap<ModuleSpecifier, String>>,
main_entrypoint: Option<ModuleSpecifier>,
) -> Result<(), AnyError> {
let cwd = std::env::current_dir().context("Failed to get CWD")?;
let output_dir_resolved = cwd.join(&html_options.output);
let category_docs =
if let Some(category_docs_path) = &html_options.category_docs_path {
let content = std::fs::read(category_docs_path)?;
Some(serde_json::from_slice(&content)?)
} else {
None
};
let symbol_redirect_map = if let Some(symbol_redirect_map_path) =
&html_options.symbol_redirect_map_path
{
let content = std::fs::read(symbol_redirect_map_path)?;
Some(serde_json::from_slice(&content)?)
} else {
None
};
let default_symbol_map = if let Some(default_symbol_map_path) =
&html_options.default_symbol_map_path
{
let content = std::fs::read(default_symbol_map_path)?;
Some(serde_json::from_slice(&content)?)
} else {
None
};
let mut options = deno_doc::html::GenerateOptions {
package_name: html_options.name.clone(),
main_entrypoint,
rewrite_map,
href_resolver: Rc::new(DocResolver {
deno_ns: Default::default(),
strip_trailing_html: html_options.strip_trailing_html,
}),
usage_composer: Rc::new(DocComposer),
category_docs,
disable_search: false,
symbol_redirect_map,
default_symbol_map,
markdown_renderer: deno_doc::html::comrak::create_renderer(
None, None, None,
),
markdown_stripper: Rc::new(deno_doc::html::comrak::strip),
head_inject: Some(Rc::new(|root| {
format!(
r#"<link href="{root}{}" rel="stylesheet" /><link href="{root}prism.css" rel="stylesheet" /><script src="{root}prism.js"></script>"#,
deno_doc::html::comrak::COMRAK_STYLESHEET_FILENAME
)
})),
id_prefix: None,
};
if let Some(built_in_types) = built_in_types {
let ctx = deno_doc::html::GenerateCtx::create_basic(
deno_doc::html::GenerateOptions {
package_name: None,
main_entrypoint: Some(
ModuleSpecifier::parse("file:///lib.deno.d.ts").unwrap(),
),
href_resolver: Rc::new(DocResolver {
deno_ns: Default::default(),
strip_trailing_html: false,
}),
usage_composer: Rc::new(DocComposer),
rewrite_map: Default::default(),
category_docs: Default::default(),
disable_search: Default::default(),
symbol_redirect_map: Default::default(),
default_symbol_map: Default::default(),
markdown_renderer: deno_doc::html::comrak::create_renderer(
None, None, None,
),
markdown_stripper: Rc::new(deno_doc::html::comrak::strip),
head_inject: None,
id_prefix: None,
},
IndexMap::from([(
ModuleSpecifier::parse("file:///lib.deno.d.ts").unwrap(),
built_in_types,
)]),
)?;
let deno_ns = deno_doc::html::compute_namespaced_symbols(
&ctx,
Box::new(
ctx
.doc_nodes
.values()
.next()
.unwrap()
.iter()
.map(std::borrow::Cow::Borrowed),
),
);
options.href_resolver = Rc::new(DocResolver {
deno_ns,
strip_trailing_html: html_options.strip_trailing_html,
});
}
let ctx =
deno_doc::html::GenerateCtx::create_basic(options, doc_nodes_by_url)?;
let mut files = deno_doc::html::generate(ctx)
.context("Failed to generate HTML documentation")?;
files.insert("prism.js".to_string(), PRISM_JS.to_string());
files.insert("prism.css".to_string(), PRISM_CSS.to_string());
let path = &output_dir_resolved;
let _ = std::fs::remove_dir_all(path);
std::fs::create_dir(path)
.with_context(|| format!("Failed to create directory {:?}", path))?;
let no_of_files = files.len();
for (name, content) in files {
let this_path = path.join(name);
let prefix = this_path.parent().with_context(|| {
format!("Failed to get parent path for {:?}", this_path)
})?;
std::fs::create_dir_all(prefix)
.with_context(|| format!("Failed to create directory {:?}", prefix))?;
std::fs::write(&this_path, content)
.with_context(|| format!("Failed to write file {:?}", this_path))?;
}
log::info!(
"{}",
colors::green(format!(
"Written {} files to {:?}",
no_of_files, html_options.output
))
);
Ok(())
}
fn print_docs_to_stdout(
doc_flags: DocFlags,
mut doc_nodes: Vec<deno_doc::DocNode>,
) -> Result<(), AnyError> {
doc_nodes.retain(|doc_node| {
!matches!(doc_node.def, doc::node::DocNodeDef::Import { .. })
});
let details = if let Some(filter) = doc_flags.filter {
let nodes = doc::find_nodes_by_name_recursively(doc_nodes, &filter);
if nodes.is_empty() {
bail!("Node {} was not found!", filter);
}
format!(
"{}",
doc::DocPrinter::new(&nodes, colors::use_color(), doc_flags.private)
)
} else {
format!(
"{}",
doc::DocPrinter::new(&doc_nodes, colors::use_color(), doc_flags.private)
)
};
display::write_to_stdout_ignore_sigpipe(details.as_bytes())
.map_err(AnyError::from)
}
fn check_diagnostics(diagnostics: &[DocDiagnostic]) -> Result<(), AnyError> {
if diagnostics.is_empty() {
return Ok(());
}
// group by location then by line (sorted) then column (sorted)
let mut diagnostic_groups = IndexMap::new();
for diagnostic in diagnostics {
diagnostic_groups
.entry(diagnostic.location.filename.clone())
.or_insert_with(BTreeMap::new)
.entry(diagnostic.location.line)
.or_insert_with(BTreeMap::new)
.entry(diagnostic.location.col)
.or_insert_with(Vec::new)
.push(diagnostic);
}
for (_, diagnostics_by_lc) in diagnostic_groups {
for (_, diagnostics_by_col) in diagnostics_by_lc {
for (_, diagnostics) in diagnostics_by_col {
for diagnostic in diagnostics {
log::error!("{}\n", diagnostic.display());
}
}
}
}
bail!(
"Found {} documentation lint error{}.",
colors::bold(diagnostics.len().to_string()),
if diagnostics.len() == 1 { "" } else { "s" }
);
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/check.rs | cli/tools/check.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::sync::Arc;
use deno_core::error::AnyError;
use deno_terminal::colors;
use crate::args::CheckFlags;
use crate::args::Flags;
use crate::factory::CliFactory;
use crate::graph_container::CollectSpecifiersOptions;
use crate::util::extract;
pub async fn check(
flags: Arc<Flags>,
check_flags: CheckFlags,
) -> Result<(), AnyError> {
let factory = CliFactory::from_flags(flags);
let main_graph_container = factory.main_module_graph_container().await?;
let specifiers = main_graph_container.collect_specifiers(
&check_flags.files,
CollectSpecifiersOptions {
include_ignored_specified: false,
},
)?;
if specifiers.is_empty() {
log::warn!("{} No matching files found.", colors::yellow("Warning"));
}
let specifiers_for_typecheck = if check_flags.doc || check_flags.doc_only {
let file_fetcher = factory.file_fetcher()?;
let root_permissions = factory.root_permissions_container()?;
let mut specifiers_for_typecheck = if check_flags.doc {
specifiers.clone()
} else {
vec![]
};
for s in specifiers {
let file = file_fetcher.fetch(&s, root_permissions).await?;
let snippet_files = extract::extract_snippet_files(file)?;
for snippet_file in snippet_files {
specifiers_for_typecheck.push(snippet_file.url.clone());
file_fetcher.insert_memory_files(snippet_file);
}
}
specifiers_for_typecheck
} else {
specifiers
};
main_graph_container
.check_specifiers(&specifiers_for_typecheck, Default::default())
.await
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/info.rs | cli/tools/info.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap;
use std::collections::HashSet;
use std::fmt::Write;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::url;
use deno_error::JsErrorClass;
use deno_graph::Dependency;
use deno_graph::GraphKind;
use deno_graph::Module;
use deno_graph::ModuleError;
use deno_graph::ModuleErrorKind;
use deno_graph::ModuleGraph;
use deno_graph::Resolution;
use deno_lib::util::checksum;
use deno_lib::version::DENO_VERSION_INFO;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm_installer::graph::NpmCachingStrategy;
use deno_path_util::resolve_url_or_path;
use deno_resolver::DenoResolveErrorKind;
use deno_resolver::display::DisplayTreeNode;
use deno_semver::npm::NpmPackageNvReference;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageNv;
use deno_terminal::colors;
use crate::args::Flags;
use crate::args::InfoFlags;
use crate::display;
use crate::factory::CliFactory;
use crate::graph_util::graph_exit_integrity_errors;
use crate::npm::CliManagedNpmResolver;
const JSON_SCHEMA_VERSION: u8 = 1;
pub async fn info(
flags: Arc<Flags>,
info_flags: InfoFlags,
) -> Result<(), AnyError> {
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
if let Some(specifier) = info_flags.file {
let module_graph_builder = factory.module_graph_builder().await?;
let module_graph_creator = factory.module_graph_creator().await?;
let npm_resolver = factory.npm_resolver().await?;
let maybe_lockfile = factory.maybe_lockfile().await?;
let resolver = factory.workspace_resolver().await?.clone();
let npmrc = factory.npmrc()?;
let node_resolver = factory.node_resolver().await?;
let cwd_url =
url::Url::from_directory_path(cli_options.initial_cwd()).unwrap();
let maybe_import_specifier = match resolver.resolve(
&specifier,
&cwd_url,
deno_resolver::workspace::ResolutionKind::Execution,
) {
Ok(resolved) => match resolved {
deno_resolver::workspace::MappedResolution::Normal {
specifier,
..
}
| deno_resolver::workspace::MappedResolution::WorkspaceJsrPackage {
specifier,
..
} => Some(specifier),
deno_resolver::workspace::MappedResolution::WorkspaceNpmPackage {
target_pkg_json,
sub_path,
..
} => Some(
node_resolver
.resolve_package_subpath_from_deno_module(
target_pkg_json.clone().dir_path(),
sub_path.as_deref(),
Some(&cwd_url),
node_resolver::ResolutionMode::Import,
node_resolver::NodeResolutionKind::Execution,
)?
.into_url()?,
),
deno_resolver::workspace::MappedResolution::PackageJson {
alias,
sub_path,
dep_result,
..
} => match dep_result.as_ref().map_err(|e| e.clone())? {
deno_package_json::PackageJsonDepValue::File(_) => {
return Err(
DenoResolveErrorKind::UnsupportedPackageJsonFileSpecifier
.into_box()
.into(),
);
}
deno_package_json::PackageJsonDepValue::JsrReq(_) => {
return Err(
DenoResolveErrorKind::UnsupportedPackageJsonJsrReq
.into_box()
.into(),
);
}
deno_package_json::PackageJsonDepValue::Workspace(version_req) => {
let pkg_folder = resolver
.resolve_workspace_pkg_json_folder_for_pkg_json_dep(
alias,
version_req,
)?;
Some(
node_resolver
.resolve_package_subpath_from_deno_module(
pkg_folder,
sub_path.as_deref(),
Some(&cwd_url),
node_resolver::ResolutionMode::Import,
node_resolver::NodeResolutionKind::Execution,
)?
.into_url()?,
)
}
deno_package_json::PackageJsonDepValue::Req(req) => {
Some(ModuleSpecifier::parse(&format!(
"npm:{}{}",
req,
sub_path.map(|s| format!("/{}", s)).unwrap_or_default()
))?)
}
},
deno_resolver::workspace::MappedResolution::PackageJsonImport {
pkg_json,
} => Some(
node_resolver
.resolve_package_import(
&specifier,
Some(&node_resolver::UrlOrPathRef::from_url(&cwd_url)),
Some(pkg_json),
node_resolver::ResolutionMode::Import,
node_resolver::NodeResolutionKind::Execution,
)?
.into_url()?,
),
},
Err(_) => None,
};
let specifier = match maybe_import_specifier {
Some(specifier) => specifier,
None => resolve_url_or_path(&specifier, cli_options.initial_cwd())?,
};
let mut loader =
module_graph_builder.create_graph_loader_with_root_permissions();
loader.enable_loading_cache_info(); // for displaying the cache information
let graph = module_graph_creator
.create_graph_with_loader(
GraphKind::All,
vec![specifier],
&mut loader,
NpmCachingStrategy::Eager,
)
.await?;
// write out the lockfile if there is one
if let Some(lockfile) = &maybe_lockfile {
graph_exit_integrity_errors(&graph);
lockfile.write_if_changed()?;
}
let maybe_npm_info = npm_resolver
.as_managed()
.map(|r| (r, r.resolution().snapshot()));
if info_flags.json {
let mut json_graph = serde_json::json!(graph);
if let Some(output) = json_graph.as_object_mut() {
output.shift_insert(
0,
"version".to_string(),
JSON_SCHEMA_VERSION.into(),
);
}
add_npm_packages_to_json(
&mut json_graph,
maybe_npm_info.as_ref().map(|(_, s)| s),
npmrc,
);
display::write_json_to_stdout(&json_graph)?;
} else {
let mut output = String::new();
GraphDisplayContext::write(
&graph,
maybe_npm_info.as_ref().map(|(r, s)| (r.as_ref(), s)),
&mut output,
)?;
display::write_to_stdout_ignore_sigpipe(output.as_bytes())?;
}
} else {
// If it was just "deno info" print location of caches and exit
print_cache_info(
&factory,
info_flags.json,
cli_options.location_flag().as_ref(),
)?;
}
Ok(())
}
#[allow(clippy::print_stdout)]
fn print_cache_info(
factory: &CliFactory,
json: bool,
location: Option<&deno_core::url::Url>,
) -> Result<(), AnyError> {
let deno_version = DENO_VERSION_INFO.deno;
let dir = factory.deno_dir()?;
#[allow(deprecated)]
let modules_cache = factory.global_http_cache()?.dir_path();
let npm_cache = factory.deno_dir()?.npm_folder_path();
let typescript_cache = &dir.gen_cache.location;
let registry_cache = dir.registries_folder_path();
let mut origin_dir = dir.origin_data_folder_path();
let deno_dir = dir.root_path_for_display().to_string();
let web_cache_dir = deno_lib::worker::get_cache_storage_dir();
if let Some(location) = &location {
origin_dir =
origin_dir.join(checksum::r#gen(&[location.to_string().as_bytes()]));
}
let local_storage_dir = origin_dir.join("local_storage");
if json {
let mut json_output = serde_json::json!({
"version": JSON_SCHEMA_VERSION,
"denoVersion": deno_version,
"denoDir": deno_dir,
"modulesCache": modules_cache,
"npmCache": npm_cache,
"typescriptCache": typescript_cache,
"registryCache": registry_cache,
"originStorage": origin_dir,
"webCacheStorage": web_cache_dir,
});
if location.is_some() {
json_output["localStorage"] = serde_json::to_value(local_storage_dir)?;
}
display::write_json_to_stdout(&json_output)
} else {
println!("{} {}", colors::bold("Deno version:"), deno_version);
println!("{} {}", colors::bold("DENO_DIR location:"), deno_dir);
println!(
"{} {}",
colors::bold("Remote modules cache:"),
modules_cache.display()
);
println!(
"{} {}",
colors::bold("npm modules cache:"),
npm_cache.display()
);
println!(
"{} {}",
colors::bold("Emitted modules cache:"),
typescript_cache.display()
);
println!(
"{} {}",
colors::bold("Language server registries cache:"),
registry_cache.display(),
);
println!(
"{} {}",
colors::bold("Origin storage:"),
origin_dir.display()
);
println!(
"{} {}",
colors::bold("Web cache storage:"),
web_cache_dir.display()
);
if location.is_some() {
println!(
"{} {}",
colors::bold("Local Storage:"),
local_storage_dir.display(),
);
}
Ok(())
}
}
fn add_npm_packages_to_json(
json: &mut serde_json::Value,
npm_snapshot: Option<&NpmResolutionSnapshot>,
npmrc: &ResolvedNpmRc,
) {
let Some(npm_snapshot) = npm_snapshot else {
return; // does not include byonm to deno info's output
};
// ideally deno_graph could handle this, but for now we just modify the json here
let json = json.as_object_mut().unwrap();
let modules = json.get_mut("modules").and_then(|m| m.as_array_mut());
if let Some(modules) = modules {
for module in modules.iter_mut() {
if matches!(module.get("kind").and_then(|k| k.as_str()), Some("npm")) {
// If there is only one module and it's "external", then that means
// someone provided an npm specifier as a cli argument. In this case,
// we want to show which npm package the cli argument resolved to.
let maybe_package = module
.get("specifier")
.and_then(|k| k.as_str())
.and_then(|specifier| NpmPackageNvReference::from_str(specifier).ok())
.and_then(|package_ref| {
npm_snapshot
.resolve_package_from_deno_module(package_ref.nv())
.ok()
});
if let Some(pkg) = maybe_package
&& let Some(module) = module.as_object_mut()
{
module.insert(
"npmPackage".to_string(),
pkg.id.as_serialized().into_string().into(),
);
}
}
let dependencies = module
.get_mut("dependencies")
.and_then(|d| d.as_array_mut());
if let Some(dependencies) = dependencies {
for dep in dependencies.iter_mut().flat_map(|d| d.as_object_mut()) {
if let Some(specifier) = dep.get("specifier").and_then(|s| s.as_str())
&& let Ok(npm_ref) = NpmPackageReqReference::from_str(specifier)
&& let Ok(pkg) =
npm_snapshot.resolve_pkg_from_pkg_req(npm_ref.req())
{
dep.insert(
"npmPackage".to_string(),
pkg.id.as_serialized().into_string().into(),
);
}
// don't show this in the output unless someone needs it
if let Some(code) =
dep.get_mut("code").and_then(|c| c.as_object_mut())
{
code.remove("resolutionMode");
}
if let Some(types) =
dep.get_mut("types").and_then(|c| c.as_object_mut())
{
types.remove("resolutionMode");
}
}
}
}
}
let mut sorted_packages = npm_snapshot
.all_packages_for_every_system()
.collect::<Vec<_>>();
sorted_packages.sort_by(|a, b| a.id.cmp(&b.id));
let mut json_packages = serde_json::Map::with_capacity(sorted_packages.len());
for pkg in sorted_packages {
let mut kv = serde_json::Map::new();
kv.insert("name".to_string(), pkg.id.nv.name.to_string().into());
kv.insert("version".to_string(), pkg.id.nv.version.to_string().into());
let mut deps = pkg.dependencies.values().collect::<Vec<_>>();
deps.sort();
let deps = deps
.into_iter()
.map(|id| serde_json::Value::String(id.as_serialized().into_string()))
.collect::<Vec<_>>();
kv.insert("dependencies".to_string(), deps.into());
let registry_url = npmrc.get_registry_url(&pkg.id.nv.name);
kv.insert("registryUrl".to_string(), registry_url.to_string().into());
json_packages.insert(pkg.id.as_serialized().into_string(), kv.into());
}
json.insert("npmPackages".to_string(), json_packages.into());
}
/// Precached information about npm packages that are used in deno info.
#[derive(Default)]
struct NpmInfo {
package_sizes: HashMap<NpmPackageId, u64>,
resolved_ids: HashMap<PackageNv, NpmPackageId>,
packages: HashMap<NpmPackageId, NpmResolutionPackage>,
}
impl NpmInfo {
pub fn build<'a>(
graph: &'a ModuleGraph,
npm_resolver: &'a CliManagedNpmResolver,
npm_snapshot: &'a NpmResolutionSnapshot,
) -> Self {
let mut info = NpmInfo::default();
if graph.npm_packages.is_empty() {
return info; // skip going over the modules if there's no npm packages
}
for module in graph.modules() {
if let Module::Npm(module) = module {
// TODO(dsherret): ok to use for now, but we should use the req in the future
#[allow(deprecated)]
let nv = module.nv_reference.nv();
if let Ok(package) = npm_snapshot.resolve_package_from_deno_module(nv) {
info.resolved_ids.insert(nv.clone(), package.id.clone());
if !info.packages.contains_key(&package.id) {
info.fill_package_info(package, npm_resolver, npm_snapshot);
}
}
}
}
info
}
fn fill_package_info<'a>(
&mut self,
package: &NpmResolutionPackage,
npm_resolver: &'a CliManagedNpmResolver,
npm_snapshot: &'a NpmResolutionSnapshot,
) {
self.packages.insert(package.id.clone(), package.clone());
if let Ok(folder) = npm_resolver.resolve_pkg_folder_from_pkg_id(&package.id)
&& let Ok(size) = crate::util::fs::dir_size(&folder)
{
self.package_sizes.insert(package.id.clone(), size);
}
for id in package.dependencies.values() {
if !self.packages.contains_key(id)
&& let Some(package) = npm_snapshot.package_from_id(id)
{
self.fill_package_info(package, npm_resolver, npm_snapshot);
}
}
}
pub fn resolve_package(
&self,
nv: &PackageNv,
) -> Option<&NpmResolutionPackage> {
let id = self.resolved_ids.get(nv)?;
self.packages.get(id)
}
}
struct GraphDisplayContext<'a> {
graph: &'a ModuleGraph,
npm_info: NpmInfo,
seen: HashSet<String>,
}
impl<'a> GraphDisplayContext<'a> {
pub fn write<TWrite: Write>(
graph: &'a ModuleGraph,
managed_npm_info: Option<(
&'a CliManagedNpmResolver,
&'a NpmResolutionSnapshot,
)>,
writer: &mut TWrite,
) -> Result<(), AnyError> {
let npm_info = match managed_npm_info {
Some((npm_resolver, npm_snapshot)) => {
NpmInfo::build(graph, npm_resolver, npm_snapshot)
}
None => NpmInfo::default(),
};
Self {
graph,
npm_info,
seen: Default::default(),
}
.into_writer(writer)
}
fn into_writer<TWrite: Write>(
mut self,
writer: &mut TWrite,
) -> Result<(), AnyError> {
if self.graph.roots.is_empty() || self.graph.roots.len() > 1 {
bail!("displaying graphs that have multiple roots is not supported.");
}
let root_specifier = self.graph.resolve(&self.graph.roots[0]);
match self.graph.try_get(root_specifier) {
Ok(Some(root)) => {
let maybe_cache_info = match root {
Module::Js(module) => module.maybe_cache_info.as_ref(),
Module::Json(module) => module.maybe_cache_info.as_ref(),
Module::Wasm(module) => module.maybe_cache_info.as_ref(),
Module::Node(_) | Module::Npm(_) | Module::External(_) => None,
};
if let Some(cache_info) = maybe_cache_info
&& let Some(local) = &cache_info.local
{
writeln!(
writer,
"{} {}",
colors::bold("local:"),
local.to_string_lossy()
)?;
}
if let Some(module) = root.js() {
writeln!(writer, "{} {}", colors::bold("type:"), module.media_type)?;
}
let total_modules_size = self
.graph
.modules()
.map(|m| {
let size = match m {
Module::Js(module) => module.size(),
Module::Json(module) => module.size(),
Module::Wasm(module) => module.size(),
Module::Node(_) | Module::Npm(_) | Module::External(_) => 0,
};
size as f64
})
.sum::<f64>();
let total_npm_package_size = self
.npm_info
.package_sizes
.values()
.map(|s| *s as f64)
.sum::<f64>();
let total_size = total_modules_size + total_npm_package_size;
let dep_count = self.graph.modules().count() - 1 // -1 for the root module
+ self.npm_info.packages.len()
- self.npm_info.resolved_ids.len();
writeln!(
writer,
"{} {} unique",
colors::bold("dependencies:"),
dep_count,
)?;
writeln!(
writer,
"{} {}",
colors::bold("size:"),
display::human_size(total_size),
)?;
writeln!(writer)?;
let root_node = self.build_module_info(root, false);
root_node.print(writer)?;
Ok(())
}
Err(err) => {
if let ModuleErrorKind::Missing { .. } = err.as_kind() {
bail!("module could not be found");
} else {
bail!("{:#}", err);
}
}
Ok(None) => {
bail!("an internal error occurred");
}
}
}
fn build_dep_info(&mut self, dep: &Dependency) -> Vec<DisplayTreeNode> {
let mut children = Vec::with_capacity(2);
if !dep.maybe_code.is_none()
&& let Some(child) = self.build_resolved_info(&dep.maybe_code, false)
{
children.push(child);
}
if !dep.maybe_type.is_none()
&& let Some(child) = self.build_resolved_info(&dep.maybe_type, true)
{
children.push(child);
}
children
}
fn build_module_info(
&mut self,
module: &Module,
type_dep: bool,
) -> DisplayTreeNode {
enum PackageOrSpecifier {
Package(Box<NpmResolutionPackage>),
Specifier(ModuleSpecifier),
}
use PackageOrSpecifier::*;
let package_or_specifier = match module.npm() {
Some(npm) => {
// TODO(dsherret): ok to use for now, but we should use the req in the future
#[allow(deprecated)]
let nv = npm.nv_reference.nv();
match self.npm_info.resolve_package(nv) {
Some(package) => Package(Box::new(package.clone())),
None => Specifier(module.specifier().clone()), // should never happen
}
}
None => Specifier(module.specifier().clone()),
};
let was_seen = !self.seen.insert(match &package_or_specifier {
Package(package) => package.id.as_serialized().into_string(),
Specifier(specifier) => specifier.to_string(),
});
let header_text = if was_seen {
let specifier_str = if type_dep {
colors::italic_gray(module.specifier()).to_string()
} else {
colors::gray(module.specifier()).to_string()
};
format!("{} {}", specifier_str, colors::gray("*"))
} else {
let header_text = if type_dep {
colors::italic(module.specifier()).to_string()
} else {
module.specifier().to_string()
};
let maybe_size = match &package_or_specifier {
Package(package) => {
self.npm_info.package_sizes.get(&package.id).copied()
}
Specifier(_) => match module {
Module::Js(module) => Some(module.size() as u64),
Module::Json(module) => Some(module.size() as u64),
Module::Wasm(module) => Some(module.size() as u64),
Module::Node(_) | Module::Npm(_) | Module::External(_) => None,
},
};
format!("{} {}", header_text, maybe_size_to_text(maybe_size))
};
let mut tree_node = DisplayTreeNode::from_text(header_text);
if !was_seen {
match &package_or_specifier {
Package(package) => {
tree_node.children.extend(self.build_npm_deps(package));
}
Specifier(_) => match module {
Module::Js(module) => {
if let Some(types_dep) = &module.maybe_types_dependency
&& let Some(child) =
self.build_resolved_info(&types_dep.dependency, true)
{
tree_node.children.push(child);
}
for dep in module.dependencies.values() {
tree_node.children.extend(self.build_dep_info(dep));
}
}
Module::Wasm(module) => {
for dep in module.dependencies.values() {
tree_node.children.extend(self.build_dep_info(dep));
}
}
Module::Json(_)
| Module::Npm(_)
| Module::Node(_)
| Module::External(_) => {}
},
}
}
tree_node
}
fn build_npm_deps(
&mut self,
package: &NpmResolutionPackage,
) -> Vec<DisplayTreeNode> {
let mut deps = package.dependencies.values().collect::<Vec<_>>();
deps.sort();
let mut children = Vec::with_capacity(deps.len());
for dep_id in deps.into_iter() {
let maybe_size = self.npm_info.package_sizes.get(dep_id).cloned();
let size_str = maybe_size_to_text(maybe_size);
let mut child = DisplayTreeNode::from_text(format!(
"npm:/{} {}",
dep_id.as_serialized(),
size_str
));
if let Some(package) = self.npm_info.packages.get(dep_id)
&& !package.dependencies.is_empty()
{
let was_seen =
!self.seen.insert(package.id.as_serialized().into_string());
if was_seen {
child.text = format!("{} {}", child.text, colors::gray("*"));
} else {
let package = package.clone();
child.children.extend(self.build_npm_deps(&package));
}
}
children.push(child);
}
children
}
fn build_error_info(
&mut self,
err: &ModuleError,
specifier: &ModuleSpecifier,
) -> DisplayTreeNode {
self.seen.insert(specifier.to_string());
match err.as_kind() {
ModuleErrorKind::InvalidTypeAssertion { .. } => {
self.build_error_msg(specifier, "(invalid import attribute)")
}
ModuleErrorKind::Load { err, .. } => {
use deno_graph::ModuleLoadError::*;
let message = match err {
HttpsChecksumIntegrity(_) => "(checksum integrity error)",
Decode(_) => "(loading decode error)",
Loader(err) => {
if err.get_class() == "NotCapable" {
"(not capable, requires --allow-import)"
} else {
"(loading error)"
}
}
Jsr(_) => "(loading error)",
Npm(_) => "(npm loading error)",
TooManyRedirects => "(too many redirects error)",
};
self.build_error_msg(specifier, message.as_ref())
}
ModuleErrorKind::Parse { .. } | ModuleErrorKind::WasmParse { .. } => {
self.build_error_msg(specifier, "(parsing error)")
}
ModuleErrorKind::UnsupportedImportAttributeType { .. } => {
self.build_error_msg(specifier, "(unsupported import attribute)")
}
ModuleErrorKind::UnsupportedModuleTypeForSourcePhaseImport { .. } => self
.build_error_msg(
specifier,
"(unsupported module type for source phase import)",
),
ModuleErrorKind::UnsupportedMediaType { .. } => {
self.build_error_msg(specifier, "(unsupported)")
}
ModuleErrorKind::Missing { .. }
| ModuleErrorKind::MissingDynamic { .. } => {
self.build_error_msg(specifier, "(missing)")
}
}
}
fn build_error_msg(
&self,
specifier: &ModuleSpecifier,
error_msg: &str,
) -> DisplayTreeNode {
DisplayTreeNode::from_text(format!(
"{} {}",
colors::red(specifier),
colors::red_bold(error_msg)
))
}
fn build_resolved_info(
&mut self,
resolution: &Resolution,
type_dep: bool,
) -> Option<DisplayTreeNode> {
match resolution {
Resolution::Ok(resolved) => {
let specifier = &resolved.specifier;
let resolved_specifier = self.graph.resolve(specifier);
Some(match self.graph.try_get(resolved_specifier) {
Ok(Some(module)) => self.build_module_info(module, type_dep),
Err(err) => self.build_error_info(err, resolved_specifier),
Ok(None) => DisplayTreeNode::from_text(format!(
"{} {}",
colors::red(specifier),
colors::red_bold("(missing)")
)),
})
}
Resolution::Err(err) => Some(DisplayTreeNode::from_text(format!(
"{} {}",
colors::italic(err.to_string()),
colors::red_bold("(resolve error)")
))),
_ => None,
}
}
}
fn maybe_size_to_text(maybe_size: Option<u64>) -> String {
colors::gray(format!(
"({})",
match maybe_size {
Some(size) => display::human_size(size as f64),
None => "unknown".to_string(),
}
))
.to_string()
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/deploy.rs | cli/tools/deploy.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::sync::Arc;
use deno_config::deno_json::NodeModulesDirMode;
use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_path_util::ResolveUrlOrPathError;
use deno_runtime::WorkerExecutionMode;
use deno_runtime::deno_permissions::PermissionsContainer;
use crate::args::DeployFlags;
use crate::args::Flags;
use crate::args::jsr_api_url;
use crate::factory::CliFactory;
use crate::ops;
use crate::registry;
pub async fn deploy(
mut flags: Flags,
deploy_flags: DeployFlags,
) -> Result<i32, AnyError> {
flags.node_modules_dir = Some(NodeModulesDirMode::None);
flags.no_lock = true;
if deploy_flags.sandbox {
// SAFETY: only this subcommand is running, nothing else, so it's safe to set an env var.
unsafe {
std::env::set_var("DENO_DEPLOY_CLI_SANDBOX", "1");
}
}
let mut factory = CliFactory::from_flags(Arc::new(flags));
let maybe_specifier_override =
if let Ok(specifier) = std::env::var("DENO_DEPLOY_CLI_SPECIFIER") {
let specifier =
Url::parse(&specifier).map_err(ResolveUrlOrPathError::UrlParse)?;
if let Ok(path) = specifier.to_file_path() {
factory.set_initial_cwd(path);
}
Some(specifier)
} else {
None
};
let client = factory.http_client_provider().get_or_create()?;
let registry_api_url = jsr_api_url();
let response =
registry::get_package(&client, registry_api_url, "deno", "deploy").await?;
let res = registry::parse_response::<registry::Package>(response).await?;
let worker_factory =
Arc::new(factory.create_cli_main_worker_factory().await?);
let specifier = if let Some(specifier) = maybe_specifier_override {
specifier
} else {
Url::parse(&format!(
"jsr:@deno/deploy@{}",
res
.latest_version
.expect("expected @deno/deploy to be published")
))
.map_err(ResolveUrlOrPathError::UrlParse)?
};
let mut worker = worker_factory
.create_custom_worker(
WorkerExecutionMode::Deploy,
specifier,
vec![],
vec![],
PermissionsContainer::allow_all(
factory.permission_desc_parser()?.clone(),
),
vec![ops::deploy::deno_deploy::init()],
Default::default(),
None,
)
.await?;
Ok(worker.run().await?)
}
pub fn get_token_entry() -> Result<keyring::Entry, keyring::Error> {
keyring::Entry::new("Deno Deploy Token", "Deno Deploy")
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/upgrade.rs | cli/tools/upgrade.rs | // Copyright 2018-2025 the Deno authors. MIT license.
//! This module provides feature to upgrade deno executable
use std::borrow::Cow;
use std::env;
use std::fs;
use std::io::IsTerminal;
use std::ops::Sub;
use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
use std::sync::Arc;
use std::time::Duration;
use async_trait::async_trait;
use deno_core::anyhow::Context;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::unsync::spawn;
use deno_core::url::Url;
use deno_lib::shared::ReleaseChannel;
use deno_lib::version;
use deno_semver::SmallStackString;
use deno_semver::Version;
use once_cell::sync::Lazy;
use crate::args::Flags;
use crate::args::UPGRADE_USAGE;
use crate::args::UpgradeFlags;
use crate::colors;
use crate::factory::CliFactory;
use crate::http_util::HttpClient;
use crate::http_util::HttpClientProvider;
use crate::util::archive;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
const RELEASE_URL: &str = "https://github.com/denoland/deno/releases";
const CANARY_URL: &str = "https://dl.deno.land/canary";
const DL_RELEASE_URL: &str = "https://dl.deno.land/release";
pub static ARCHIVE_NAME: Lazy<String> =
Lazy::new(|| format!("deno-{}.zip", env!("TARGET")));
// How often query server for new version. In hours.
const UPGRADE_CHECK_INTERVAL: i64 = 24;
const UPGRADE_CHECK_FETCH_DELAY: Duration = Duration::from_millis(500);
/// Environment necessary for doing the update checker.
/// An alternate trait implementation can be provided for testing purposes.
trait UpdateCheckerEnvironment: Clone {
fn read_check_file(&self) -> String;
fn write_check_file(&self, text: &str);
fn current_time(&self) -> chrono::DateTime<chrono::Utc>;
}
#[derive(Clone)]
struct RealUpdateCheckerEnvironment {
cache_file_path: PathBuf,
current_time: chrono::DateTime<chrono::Utc>,
}
impl RealUpdateCheckerEnvironment {
pub fn new(cache_file_path: PathBuf) -> Self {
Self {
cache_file_path,
// cache the current time
current_time: chrono::Utc::now(),
}
}
}
impl UpdateCheckerEnvironment for RealUpdateCheckerEnvironment {
fn read_check_file(&self) -> String {
std::fs::read_to_string(&self.cache_file_path).unwrap_or_default()
}
fn write_check_file(&self, text: &str) {
let _ = std::fs::write(&self.cache_file_path, text);
}
fn current_time(&self) -> chrono::DateTime<chrono::Utc> {
self.current_time
}
}
#[derive(Debug, Copy, Clone)]
enum UpgradeCheckKind {
Execution,
Lsp,
}
#[async_trait(?Send)]
trait VersionProvider: Clone {
/// Fetch latest available version for the given release channel
async fn latest_version(
&self,
release_channel: ReleaseChannel,
) -> Result<AvailableVersion, AnyError>;
/// Returns either a semver or git hash. It's up to implementor to
/// decide which one is appropriate, but in general only "stable"
/// and "lts" versions use semver.
fn current_version(&self) -> Cow<'_, str>;
fn get_current_exe_release_channel(&self) -> ReleaseChannel;
}
#[derive(Clone)]
struct RealVersionProvider {
http_client_provider: Arc<HttpClientProvider>,
check_kind: UpgradeCheckKind,
}
impl RealVersionProvider {
pub fn new(
http_client_provider: Arc<HttpClientProvider>,
check_kind: UpgradeCheckKind,
) -> Self {
Self {
http_client_provider,
check_kind,
}
}
}
#[async_trait(?Send)]
impl VersionProvider for RealVersionProvider {
async fn latest_version(
&self,
release_channel: ReleaseChannel,
) -> Result<AvailableVersion, AnyError> {
fetch_latest_version(
&self.http_client_provider.get_or_create()?,
release_channel,
self.check_kind,
)
.await
}
fn current_version(&self) -> Cow<'_, str> {
Cow::Borrowed(version::DENO_VERSION_INFO.version_or_git_hash())
}
fn get_current_exe_release_channel(&self) -> ReleaseChannel {
version::DENO_VERSION_INFO.release_channel
}
}
struct UpdateChecker<
TEnvironment: UpdateCheckerEnvironment,
TVersionProvider: VersionProvider,
> {
env: TEnvironment,
version_provider: TVersionProvider,
maybe_file: Option<CheckVersionFile>,
}
impl<TEnvironment: UpdateCheckerEnvironment, TVersionProvider: VersionProvider>
UpdateChecker<TEnvironment, TVersionProvider>
{
pub fn new(env: TEnvironment, version_provider: TVersionProvider) -> Self {
let maybe_file = CheckVersionFile::parse(env.read_check_file());
Self {
env,
version_provider,
maybe_file,
}
}
pub fn should_check_for_new_version(&self) -> bool {
let Some(file) = &self.maybe_file else {
return true;
};
let last_check_age = self
.env
.current_time()
.signed_duration_since(file.last_checked);
last_check_age > chrono::Duration::hours(UPGRADE_CHECK_INTERVAL)
}
/// Returns the current exe release channel and a version if a new one is available and it should be prompted about.
pub fn should_prompt(&self) -> Option<(ReleaseChannel, String)> {
let file = self.maybe_file.as_ref()?;
// If the current version saved is not the actually current version of the binary
// It means
// - We already check for a new version today
// - The user have probably upgraded today
// So we should not prompt and wait for tomorrow for the latest version to be updated again
let current_version = self.version_provider.current_version();
if file.current_version != current_version {
return None;
}
if file.latest_version == current_version {
return None;
}
if let Ok(current) = Version::parse_standard(¤t_version)
&& let Ok(latest) = Version::parse_standard(&file.latest_version)
&& current >= latest
{
return None;
}
let last_prompt_age = self
.env
.current_time()
.signed_duration_since(file.last_prompt);
if last_prompt_age > chrono::Duration::hours(UPGRADE_CHECK_INTERVAL) {
Some((file.current_release_channel, file.latest_version.clone()))
} else {
None
}
}
/// Store that we showed the update message to the user.
pub fn store_prompted(self) {
if let Some(file) = self.maybe_file {
self.env.write_check_file(
&file.with_last_prompt(self.env.current_time()).serialize(),
);
}
}
}
fn get_minor_version_blog_post_url(semver: &Version) -> String {
format!("https://deno.com/blog/v{}.{}", semver.major, semver.minor)
}
fn get_rc_version_blog_post_url(semver: &Version) -> String {
format!(
"https://deno.com/blog/v{}.{}-rc-{}",
semver.major, semver.minor, semver.pre[1]
)
}
async fn print_release_notes(
current_version: &str,
new_version: &str,
client: &HttpClient,
) {
let Ok(current_semver) = Version::parse_standard(current_version) else {
return;
};
let Ok(new_semver) = Version::parse_standard(new_version) else {
return;
};
let is_switching_from_deno1_to_deno2 =
new_semver.major == 2 && current_semver.major == 1;
let is_deno_2_rc = new_semver.major == 2
&& new_semver.minor == 0
&& new_semver.patch == 0
&& new_semver.pre.first().map(|s| s.as_str()) == Some("rc");
if is_deno_2_rc || is_switching_from_deno1_to_deno2 {
log::info!(
"{}\n\n {}\n",
colors::gray("Migration guide:"),
colors::bold(
"https://docs.deno.com/runtime/manual/advanced/migrate_deprecations"
)
);
}
if is_deno_2_rc {
log::info!(
"{}\n\n {}\n",
colors::gray("If you find a bug, please report to:"),
colors::bold("https://github.com/denoland/deno/issues/new")
);
// Check if there's blog post entry for this release
let blog_url_str = get_rc_version_blog_post_url(&new_semver);
let blog_url = Url::parse(&blog_url_str).unwrap();
if client.download(blog_url).await.is_ok() {
log::info!(
"{}\n\n {}\n",
colors::gray("Blog post:"),
colors::bold(blog_url_str)
);
}
return;
}
let should_print = current_semver.major != new_semver.major
|| current_semver.minor != new_semver.minor;
if !should_print {
return;
}
log::info!(
"{}\n\n {}\n",
colors::gray("Release notes:"),
colors::bold(format!(
"https://github.com/denoland/deno/releases/tag/v{}",
&new_version,
))
);
log::info!(
"{}\n\n {}\n",
colors::gray("Blog post:"),
colors::bold(get_minor_version_blog_post_url(&new_semver))
);
}
pub fn upgrade_check_enabled() -> bool {
matches!(
env::var("DENO_NO_UPDATE_CHECK"),
Err(env::VarError::NotPresent)
)
}
pub fn check_for_upgrades(
http_client_provider: Arc<HttpClientProvider>,
cache_file_path: PathBuf,
) {
if !upgrade_check_enabled() {
return;
}
let env = RealUpdateCheckerEnvironment::new(cache_file_path);
let version_provider = RealVersionProvider::new(
http_client_provider.clone(),
UpgradeCheckKind::Execution,
);
let update_checker = UpdateChecker::new(env, version_provider);
if update_checker.should_check_for_new_version() {
let env = update_checker.env.clone();
let version_provider = update_checker.version_provider.clone();
// do this asynchronously on a separate task
spawn(async move {
// Sleep for a small amount of time to not unnecessarily impact startup
// time.
tokio::time::sleep(UPGRADE_CHECK_FETCH_DELAY).await;
fetch_and_store_latest_version(&env, &version_provider).await;
// text is used by the test suite
log::debug!("Finished upgrade checker.")
});
}
// Don't bother doing any more computation if we're not in TTY environment.
let should_prompt =
log::log_enabled!(log::Level::Info) && std::io::stderr().is_terminal();
if !should_prompt {
return;
}
// Print a message if an update is available
if let Some((release_channel, upgrade_version)) =
update_checker.should_prompt()
{
match release_channel {
ReleaseChannel::Stable => {
log::info!(
"{} {} → {} {}",
colors::green("A new release of Deno is available:"),
colors::cyan(version::DENO_VERSION_INFO.deno),
colors::cyan(&upgrade_version),
colors::italic_gray("Run `deno upgrade` to install it.")
);
}
ReleaseChannel::Canary => {
log::info!(
"{} {}",
colors::green("A new canary release of Deno is available."),
colors::italic_gray("Run `deno upgrade canary` to install it.")
);
}
ReleaseChannel::Rc => {
log::info!(
"{} {}",
colors::green("A new release candidate of Deno is available."),
colors::italic_gray("Run `deno upgrade rc` to install it.")
);
}
ReleaseChannel::Lts => {
log::info!(
"{} {} → {} {}",
colors::green("A new LTS release of Deno is available:"),
colors::cyan(version::DENO_VERSION_INFO.deno),
colors::cyan(&upgrade_version),
colors::italic_gray("Run `deno upgrade lts` to install it.")
);
}
}
update_checker.store_prompted();
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct LspVersionUpgradeInfo {
pub latest_version: String,
pub is_canary: bool,
}
pub async fn check_for_upgrades_for_lsp(
http_client_provider: Arc<HttpClientProvider>,
) -> Result<Option<LspVersionUpgradeInfo>, AnyError> {
if !upgrade_check_enabled() {
return Ok(None);
}
let version_provider =
RealVersionProvider::new(http_client_provider, UpgradeCheckKind::Lsp);
check_for_upgrades_for_lsp_with_provider(&version_provider).await
}
async fn check_for_upgrades_for_lsp_with_provider(
version_provider: &impl VersionProvider,
) -> Result<Option<LspVersionUpgradeInfo>, AnyError> {
let release_channel = version_provider.get_current_exe_release_channel();
let latest_version = version_provider.latest_version(release_channel).await?;
let current_version = version_provider.current_version();
// Nothing to upgrade
if current_version == latest_version.version_or_hash {
return Ok(None);
}
match release_channel {
ReleaseChannel::Stable | ReleaseChannel::Rc | ReleaseChannel::Lts => {
if let Ok(current) = Version::parse_standard(¤t_version)
&& let Ok(latest) =
Version::parse_standard(&latest_version.version_or_hash)
&& current >= latest
{
return Ok(None); // nothing to upgrade
}
Ok(Some(LspVersionUpgradeInfo {
latest_version: latest_version.version_or_hash,
is_canary: false,
}))
}
ReleaseChannel::Canary => Ok(Some(LspVersionUpgradeInfo {
latest_version: latest_version.version_or_hash,
is_canary: true,
})),
}
}
async fn fetch_and_store_latest_version<
TEnvironment: UpdateCheckerEnvironment,
TVersionProvider: VersionProvider,
>(
env: &TEnvironment,
version_provider: &TVersionProvider,
) {
let release_channel = version_provider.get_current_exe_release_channel();
let Ok(latest_version) =
version_provider.latest_version(release_channel).await
else {
return;
};
let version_file = CheckVersionFile {
// put a date in the past here so that prompt can be shown on next run
last_prompt: env
.current_time()
.sub(chrono::Duration::hours(UPGRADE_CHECK_INTERVAL + 1)),
last_checked: env.current_time(),
current_version: version_provider.current_version().to_string(),
latest_version: latest_version.version_or_hash,
current_release_channel: release_channel,
};
env.write_check_file(&version_file.serialize());
}
pub async fn upgrade(
flags: Arc<Flags>,
upgrade_flags: UpgradeFlags,
) -> Result<(), AnyError> {
let factory = CliFactory::from_flags(flags);
let http_client_provider = factory.http_client_provider();
let client = http_client_provider.get_or_create()?;
let current_exe_path = std::env::current_exe()?;
let full_path_output_flag = match &upgrade_flags.output {
Some(output) => Some(
std::env::current_dir()
.context("failed getting cwd")?
.join(output),
),
None => None,
};
let output_exe_path =
full_path_output_flag.as_ref().unwrap_or(¤t_exe_path);
let permissions = set_exe_permissions(¤t_exe_path, output_exe_path)?;
let force_selection_of_new_version =
upgrade_flags.force || full_path_output_flag.is_some();
let requested_version =
RequestedVersion::from_upgrade_flags(upgrade_flags.clone())?;
log::info!("Current Deno version: v{}", version::DENO_VERSION_INFO.deno);
let maybe_selected_version_to_upgrade = match &requested_version {
RequestedVersion::Latest(channel) => {
find_latest_version_to_upgrade(
http_client_provider.clone(),
*channel,
force_selection_of_new_version,
)
.await?
}
RequestedVersion::SpecificVersion(channel, version) => {
select_specific_version_for_upgrade(
*channel,
version.clone(),
force_selection_of_new_version,
)?
}
};
let Some(selected_version_to_upgrade) = maybe_selected_version_to_upgrade
else {
return Ok(());
};
let banner_handle = spawn_banner_task(
&selected_version_to_upgrade.version_or_hash,
selected_version_to_upgrade.release_channel,
http_client_provider.get_or_create()?,
);
let download_url = get_download_url(
&selected_version_to_upgrade.version_or_hash,
requested_version.release_channel(),
)?;
log::info!("{}", colors::gray(format!("Downloading {}", &download_url)));
let Some(archive_data) = download_package(&client, download_url).await?
else {
log::error!("Download could not be found, aborting");
deno_runtime::exit(1)
};
log::info!(
"{}",
colors::gray(format!(
"Deno is upgrading to version {}",
&selected_version_to_upgrade.version_or_hash
))
);
let temp_dir = tempfile::TempDir::new()?;
let new_exe_path = archive::unpack_into_dir(archive::UnpackArgs {
exe_name: "deno",
archive_name: &ARCHIVE_NAME,
archive_data: &archive_data,
is_windows: cfg!(windows),
dest_path: temp_dir.path(),
})?;
fs::set_permissions(&new_exe_path, permissions)?;
check_exe(&new_exe_path)?;
if upgrade_flags.dry_run {
fs::remove_file(&new_exe_path)?;
log::info!("Upgraded successfully (dry run)");
if requested_version.release_channel() == ReleaseChannel::Stable {
print_release_notes(
version::DENO_VERSION_INFO.deno,
&selected_version_to_upgrade.version_or_hash,
&client,
)
.await;
}
drop(temp_dir);
return Ok(());
}
let output_exe_path =
full_path_output_flag.as_ref().unwrap_or(¤t_exe_path);
#[cfg(windows)]
kill_running_deno_lsp_processes();
let output_result = if *output_exe_path == current_exe_path {
replace_exe(&new_exe_path, output_exe_path)
} else {
fs::rename(&new_exe_path, output_exe_path)
.or_else(|_| fs::copy(&new_exe_path, output_exe_path).map(|_| ()))
};
check_windows_access_denied_error(output_result, output_exe_path)?;
log::info!(
"\nUpgraded successfully to Deno {} {}\n",
colors::green(selected_version_to_upgrade.display()),
colors::gray(&format!(
"({})",
selected_version_to_upgrade.release_channel.name()
))
);
if requested_version.release_channel() == ReleaseChannel::Stable {
print_release_notes(
version::DENO_VERSION_INFO.deno,
&selected_version_to_upgrade.version_or_hash,
&client,
)
.await;
}
if let Ok(Some(text)) = banner_handle.await {
log::info!("\n{}\n", text);
}
drop(temp_dir); // delete the temp dir
Ok(())
}
#[derive(Debug, PartialEq)]
enum RequestedVersion {
Latest(ReleaseChannel),
SpecificVersion(ReleaseChannel, String),
}
impl RequestedVersion {
fn from_upgrade_flags(upgrade_flags: UpgradeFlags) -> Result<Self, AnyError> {
let is_canary = upgrade_flags.canary;
let re_hash = lazy_regex::regex!("^[0-9a-f]{40}$");
let channel = if is_canary {
ReleaseChannel::Canary
} else if upgrade_flags.release_candidate {
ReleaseChannel::Rc
} else {
ReleaseChannel::Stable
};
let mut maybe_passed_version = upgrade_flags.version.clone();
// TODO(bartlomieju): prefer flags first? This whole logic could be cleaned up...
if let Some(val) = &upgrade_flags.version_or_hash_or_channel {
if let Ok(channel) = ReleaseChannel::deserialize(&val.to_lowercase()) {
// TODO(bartlomieju): print error if any other flags passed?
return Ok(Self::Latest(channel));
} else if re_hash.is_match(val) {
return Ok(Self::SpecificVersion(
ReleaseChannel::Canary,
val.to_string(),
));
} else {
maybe_passed_version = Some(val.to_string());
}
}
let Some(passed_version) = maybe_passed_version else {
return Ok(Self::Latest(channel));
};
let passed_version = passed_version
.strip_prefix('v')
.unwrap_or(&passed_version)
.to_string();
let (channel, passed_version) = if is_canary {
if !re_hash.is_match(&passed_version) {
bail!(
"Invalid commit hash passed ({})\n\nPass a semver, or a full 40 character git commit hash, or a release channel name.\n\nUsage:\n{}",
colors::gray(passed_version),
UPGRADE_USAGE
);
}
(ReleaseChannel::Canary, passed_version)
} else {
let Ok(semver) = Version::parse_standard(&passed_version) else {
bail!(
"Invalid version passed ({})\n\nPass a semver, or a full 40 character git commit hash, or a release channel name.\n\nUsage:\n{}",
colors::gray(passed_version),
UPGRADE_USAGE
);
};
if semver.pre.contains(&SmallStackString::from_static("rc")) {
(ReleaseChannel::Rc, passed_version)
} else {
(ReleaseChannel::Stable, passed_version)
}
};
Ok(RequestedVersion::SpecificVersion(channel, passed_version))
}
/// Channels that use Git hashes as versions are considered canary.
pub fn release_channel(&self) -> ReleaseChannel {
match self {
Self::Latest(channel) => *channel,
Self::SpecificVersion(channel, _) => *channel,
}
}
}
fn select_specific_version_for_upgrade(
release_channel: ReleaseChannel,
version: String,
force: bool,
) -> Result<Option<AvailableVersion>, AnyError> {
let current_is_passed = match release_channel {
ReleaseChannel::Stable | ReleaseChannel::Rc | ReleaseChannel::Lts => {
version::DENO_VERSION_INFO.release_channel == release_channel
&& version::DENO_VERSION_INFO.deno == version
}
ReleaseChannel::Canary => version::DENO_VERSION_INFO.git_hash == version,
};
if !force && current_is_passed {
log::info!(
"Version {} is already installed",
version::DENO_VERSION_INFO.deno
);
return Ok(None);
}
Ok(Some(AvailableVersion {
version_or_hash: version,
release_channel,
}))
}
async fn find_latest_version_to_upgrade(
http_client_provider: Arc<HttpClientProvider>,
release_channel: ReleaseChannel,
force: bool,
) -> Result<Option<AvailableVersion>, AnyError> {
log::info!(
"{}",
colors::gray(&format!("Looking up {} version", release_channel.name()))
);
let client = http_client_provider.get_or_create()?;
let latest_version_found = match fetch_latest_version(
&client,
release_channel,
UpgradeCheckKind::Execution,
)
.await
{
Ok(v) => v,
Err(err) => {
if err.to_string().contains("Not found") {
bail!(
"No {} release available at the moment.",
release_channel.name()
);
} else {
return Err(err);
}
}
};
let current_version = match release_channel {
ReleaseChannel::Canary => version::DENO_VERSION_INFO.git_hash,
ReleaseChannel::Stable | ReleaseChannel::Lts | ReleaseChannel::Rc => {
version::DENO_VERSION_INFO.deno
}
};
let should_upgrade = force
|| current_version != latest_version_found.version_or_hash
|| version::DENO_VERSION_INFO.release_channel != release_channel;
log::info!("");
if should_upgrade {
log::info!(
"Found latest {} version {}",
latest_version_found.release_channel.name(),
color_print::cformat!("<g>{}</>", latest_version_found.display())
);
} else {
log::info!(
"Local deno version {} is the most recent release",
color_print::cformat!("<g>{}</>", current_version)
);
}
log::info!("");
Ok(should_upgrade.then_some(latest_version_found))
}
#[derive(Debug, Clone, PartialEq)]
struct AvailableVersion {
version_or_hash: String,
release_channel: ReleaseChannel,
}
impl AvailableVersion {
/// Format display version, appending `v` before version number
/// for non-canary releases.
fn display(&self) -> Cow<'_, str> {
match self.release_channel {
ReleaseChannel::Canary => Cow::Borrowed(&self.version_or_hash),
_ => Cow::Owned(format!("v{}", self.version_or_hash)),
}
}
}
async fn fetch_latest_version(
client: &HttpClient,
release_channel: ReleaseChannel,
check_kind: UpgradeCheckKind,
) -> Result<AvailableVersion, AnyError> {
let url = get_latest_version_url(release_channel, env!("TARGET"), check_kind);
let text = client.download_text(url.parse()?).await?;
let version = normalize_version_from_server(release_channel, &text)?;
Ok(version)
}
fn normalize_version_from_server(
release_channel: ReleaseChannel,
text: &str,
) -> Result<AvailableVersion, AnyError> {
let text = text.trim();
match release_channel {
ReleaseChannel::Stable | ReleaseChannel::Rc | ReleaseChannel::Lts => {
let v = text.trim_start_matches('v').to_string();
Ok(AvailableVersion {
version_or_hash: v.to_string(),
release_channel,
})
}
ReleaseChannel::Canary => Ok(AvailableVersion {
version_or_hash: text.to_string(),
release_channel,
}),
}
}
fn get_latest_version_url(
release_channel: ReleaseChannel,
target_tuple: &str,
check_kind: UpgradeCheckKind,
) -> String {
let file_name = match release_channel {
ReleaseChannel::Stable => Cow::Borrowed("release-latest.txt"),
ReleaseChannel::Canary => {
Cow::Owned(format!("canary-{target_tuple}-latest.txt"))
}
ReleaseChannel::Rc => Cow::Borrowed("release-rc-latest.txt"),
ReleaseChannel::Lts => Cow::Borrowed("release-lts-latest.txt"),
};
let query_param = match check_kind {
UpgradeCheckKind::Execution => "",
UpgradeCheckKind::Lsp => "?lsp",
};
format!("{}/{}{}", base_upgrade_url(), file_name, query_param)
}
fn base_upgrade_url() -> Cow<'static, str> {
// this is used by the test suite
if let Ok(url) = env::var("DENO_DONT_USE_INTERNAL_BASE_UPGRADE_URL") {
Cow::Owned(url)
} else {
Cow::Borrowed("https://dl.deno.land")
}
}
fn get_download_url(
version: &str,
release_channel: ReleaseChannel,
) -> Result<Url, AnyError> {
let download_url = match release_channel {
ReleaseChannel::Stable => {
format!("{}/download/v{}/{}", RELEASE_URL, version, *ARCHIVE_NAME)
}
ReleaseChannel::Rc => {
format!("{}/v{}/{}", DL_RELEASE_URL, version, *ARCHIVE_NAME)
}
ReleaseChannel::Canary => {
format!("{}/{}/{}", CANARY_URL, version, *ARCHIVE_NAME)
}
ReleaseChannel::Lts => {
format!("{}/v{}/{}", DL_RELEASE_URL, version, *ARCHIVE_NAME)
}
};
Url::parse(&download_url).with_context(|| {
format!(
"Failed to parse URL to download new release: {}",
download_url
)
})
}
fn spawn_banner_task(
version: &str,
release_channel: ReleaseChannel,
client: HttpClient,
) -> deno_core::unsync::JoinHandle<Option<String>> {
let banner_url = get_banner_url(version, release_channel);
deno_core::unsync::spawn(async move {
let banner_url = banner_url?;
tokio::select! {
result = client.download_text(banner_url) => {
result.ok()
}
_ = tokio::time::sleep(Duration::from_secs(5)) => {
None
}
}
})
}
fn get_banner_url(
version: &str,
release_channel: ReleaseChannel,
) -> Option<Url> {
let download_url = match release_channel {
ReleaseChannel::Stable => {
format!("{}/v{}/banner.txt", DL_RELEASE_URL, version)
}
ReleaseChannel::Rc | ReleaseChannel::Lts | ReleaseChannel::Canary => {
return None;
}
};
Url::parse(&download_url).ok()
}
async fn download_package(
client: &HttpClient,
download_url: Url,
) -> Result<Option<Vec<u8>>, AnyError> {
let progress_bar = ProgressBar::new(ProgressBarStyle::DownloadBars);
// provide an empty string here in order to prefer the downloading
// text above which will stay alive after the progress bars are complete
let progress = progress_bar.update("");
let response = client
.download_with_progress_and_retries(download_url.clone(), &Default::default(), &progress)
.await
.with_context(|| format!("Failed downloading {download_url}. The version you requested may not have been built for the current architecture."))?;
Ok(response.into_maybe_bytes()?)
}
fn replace_exe(from: &Path, to: &Path) -> Result<(), std::io::Error> {
if cfg!(windows) {
// On windows you cannot replace the currently running executable.
// so first we rename it to deno.old.exe
fs::rename(to, to.with_extension("old.exe"))?;
} else {
fs::remove_file(to)?;
}
// Windows cannot rename files across device boundaries, so if rename fails,
// we try again with copy.
fs::rename(from, to).or_else(|_| fs::copy(from, to).map(|_| ()))?;
Ok(())
}
fn check_windows_access_denied_error(
output_result: Result<(), std::io::Error>,
output_exe_path: &Path,
) -> Result<(), AnyError> {
let Err(err) = output_result else {
return Ok(());
};
if !cfg!(windows) {
return Err(err.into());
}
const WIN_ERROR_ACCESS_DENIED: i32 = 5;
if err.raw_os_error() != Some(WIN_ERROR_ACCESS_DENIED) {
return Err(err.into());
};
Err(err).with_context(|| {
format!(
concat!(
"Could not replace the deno executable. This may be because an ",
"existing deno process is running. Please ensure there are no ",
"running deno processes (ex. Stop-Process -Name deno ; deno {}), ",
"close any editors before upgrading, and ensure you have ",
"sufficient permission to '{}'."
),
// skip the first argument, which is the executable path
std::env::args().skip(1).collect::<Vec<_>>().join(" "),
output_exe_path.display(),
)
})
}
#[cfg(windows)]
fn kill_running_deno_lsp_processes() {
// limit this to `deno lsp` invocations to avoid killing important programs someone might be running
let is_debug = log::log_enabled!(log::Level::Debug);
let get_pipe = || {
if is_debug {
std::process::Stdio::inherit()
} else {
std::process::Stdio::null()
}
};
let _ = Command::new("powershell.exe")
.args([
"-Command",
r#"Get-WmiObject Win32_Process | Where-Object {
$_.Name -eq 'deno.exe' -and
$_.CommandLine -match '^(?:\"[^\"]+\"|\S+)\s+lsp\b'
} | ForEach-Object {
if ($_.Terminate()) {
Write-Host 'Terminated:' $_.ProcessId
}
}"#,
])
.stdout(get_pipe())
.stderr(get_pipe())
.output();
}
fn set_exe_permissions(
current_exe_path: &Path,
output_exe_path: &Path,
) -> Result<std::fs::Permissions, AnyError> {
let Ok(metadata) = fs::metadata(output_exe_path) else {
let metadata = fs::metadata(current_exe_path)?;
return Ok(metadata.permissions());
};
let permissions = metadata.permissions();
if permissions.readonly() {
bail!(
"You do not have write permission to {}",
output_exe_path.display()
);
}
#[cfg(unix)]
if std::os::unix::fs::MetadataExt::uid(&metadata) == 0
&& !nix::unistd::Uid::effective().is_root()
{
bail!(
concat!(
"You don't have write permission to {} because it's owned by root.\n",
"Consider updating deno through your package manager if its installed from it.\n",
"Otherwise run `deno upgrade` as root.",
),
output_exe_path.display()
);
}
Ok(permissions)
}
fn check_exe(exe_path: &Path) -> Result<(), AnyError> {
let output = Command::new(exe_path)
.arg("-V")
.stderr(std::process::Stdio::inherit())
.output()?;
if !output.status.success() {
bail!(
"Failed to validate Deno executable. This may be because your OS is unsupported or the executable is corrupted"
)
} else {
Ok(())
}
}
#[derive(Debug)]
struct CheckVersionFile {
pub last_prompt: chrono::DateTime<chrono::Utc>,
pub last_checked: chrono::DateTime<chrono::Utc>,
pub current_version: String,
pub latest_version: String,
pub current_release_channel: ReleaseChannel,
}
impl CheckVersionFile {
pub fn parse(content: String) -> Option<Self> {
let split_content = content.split('!').collect::<Vec<_>>();
if split_content.len() != 5 {
return None;
}
let latest_version = split_content[2].trim().to_owned();
if latest_version.is_empty() {
return None;
}
let current_version = split_content[3].trim().to_owned();
if current_version.is_empty() {
return None;
}
let current_release_channel = split_content[4].trim().to_owned();
if current_release_channel.is_empty() {
return None;
}
let Ok(current_release_channel) =
ReleaseChannel::deserialize(¤t_release_channel)
else {
return None;
};
let last_prompt = chrono::DateTime::parse_from_rfc3339(split_content[0])
.map(|dt| dt.with_timezone(&chrono::Utc))
.ok()?;
let last_checked = chrono::DateTime::parse_from_rfc3339(split_content[1])
.map(|dt| dt.with_timezone(&chrono::Utc))
.ok()?;
Some(CheckVersionFile {
last_prompt,
last_checked,
current_version,
latest_version,
current_release_channel,
})
}
fn serialize(&self) -> String {
format!(
"{}!{}!{}!{}!{}",
self.last_prompt.to_rfc3339(),
self.last_checked.to_rfc3339(),
self.latest_version,
self.current_version,
self.current_release_channel.serialize()
)
}
fn with_last_prompt(self, dt: chrono::DateTime<chrono::Utc>) -> Self {
Self {
last_prompt: dt,
..self
}
}
}
#[cfg(test)]
mod test {
use std::cell::RefCell;
use std::rc::Rc;
use test_util::assert_contains;
use super::*;
#[test]
fn test_requested_version() {
let mut upgrade_flags = UpgradeFlags {
dry_run: false,
force: false,
release_candidate: false,
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/mod.rs | cli/tools/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
pub mod bench;
pub mod bundle;
pub mod check;
pub mod clean;
pub mod compile;
pub mod coverage;
pub mod deploy;
pub mod doc;
pub mod fmt;
pub mod info;
pub mod init;
pub mod installer;
pub mod jupyter;
pub mod lint;
pub mod pm;
pub mod publish;
pub mod repl;
pub mod run;
pub mod serve;
pub mod task;
pub mod test;
pub mod upgrade;
pub mod x;
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/x.rs | cli/tools/x.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::BTreeMap;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_core::anyhow;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_lib::worker::LibWorkerFactoryRoots;
use deno_npm_installer::PackagesAllowedScripts;
use deno_runtime::UnconfiguredRuntime;
use deno_runtime::deno_permissions::PathQueryDescriptor;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq;
use node_resolver::BinValue;
use crate::args::DenoXShimName;
use crate::args::Flags;
use crate::args::XFlags;
use crate::args::XFlagsKind;
use crate::factory::CliFactory;
use crate::node::CliNodeResolver;
use crate::npm::CliManagedNpmResolver;
use crate::npm::CliNpmResolver;
use crate::tools::pm::CacheTopLevelDepsOptions;
use crate::util::console::ConfirmOptions;
use crate::util::console::confirm;
use crate::util::draw_thread::DrawThread;
async fn resolve_local_bins(
node_resolver: &CliNodeResolver,
npm_resolver: &CliNpmResolver,
factory: &CliFactory,
) -> Result<BTreeMap<String, BinValue>, AnyError> {
match &npm_resolver {
deno_resolver::npm::NpmResolver::Byonm(npm_resolver) => {
let node_modules_dir = npm_resolver.root_node_modules_path().unwrap();
let bin_dir = node_modules_dir.join(".bin");
Ok(node_resolver.resolve_npm_commands_from_bin_dir(&bin_dir))
}
deno_resolver::npm::NpmResolver::Managed(npm_resolver) => {
let mut all_bins = BTreeMap::new();
for id in npm_resolver.resolution().top_level_packages() {
let package_folder =
npm_resolver.resolve_pkg_folder_from_pkg_id(&id)?;
let bins = match node_resolver
.resolve_npm_binary_commands_for_package(&package_folder)
{
Ok(bins) => bins,
Err(_) => {
crate::tools::pm::cache_top_level_deps(
factory,
None,
CacheTopLevelDepsOptions {
lockfile_only: false,
},
)
.await?;
node_resolver
.resolve_npm_binary_commands_for_package(&package_folder)?
}
};
for (command, bin_value) in bins {
all_bins.insert(command.clone(), bin_value.clone());
}
}
Ok(all_bins)
}
}
}
async fn run_js_file(
factory: &CliFactory,
roots: LibWorkerFactoryRoots,
unconfigured_runtime: Option<UnconfiguredRuntime>,
main_module: &deno_core::url::Url,
npm: bool,
) -> Result<i32, AnyError> {
let cli_options = factory.cli_options()?;
let preload_modules = cli_options.preload_modules()?;
let require_modules = cli_options.require_modules()?;
if npm {
crate::tools::run::set_npm_user_agent();
}
crate::tools::run::maybe_npm_install(factory).await?;
let worker_factory = factory
.create_cli_main_worker_factory_with_roots(roots)
.await?;
let mut worker = worker_factory
.create_main_worker_with_unconfigured_runtime(
deno_runtime::WorkerExecutionMode::Run,
main_module.clone(),
preload_modules,
require_modules,
unconfigured_runtime,
)
.await
.inspect_err(|e| deno_telemetry::report_event("boot_failure", e))?;
let exit_code = worker
.run()
.await
.inspect_err(|e| deno_telemetry::report_event("uncaught_exception", e))?;
Ok(exit_code)
}
async fn maybe_run_local_npm_bin(
factory: &CliFactory,
flags: &Flags,
roots: LibWorkerFactoryRoots,
unconfigured_runtime: &mut Option<UnconfiguredRuntime>,
node_resolver: &CliNodeResolver,
npm_resolver: &CliNpmResolver,
command: &str,
) -> Result<Option<i32>, AnyError> {
let permissions = factory.root_permissions_container()?;
let mut bins =
resolve_local_bins(node_resolver, npm_resolver, factory).await?;
let bin_value = if let Some(bin_value) = bins.remove(command) {
bin_value
} else if let Some(bin_value) = {
let command = if command.starts_with("@") && command.contains("/") {
command.split("/").last().unwrap()
} else {
command
};
bins.remove(command)
} {
bin_value
} else {
return Ok(None);
};
match bin_value {
BinValue::JsFile(path_buf) => {
let path = deno_path_util::url_from_file_path(path_buf.as_ref())?;
let unconfigured_runtime = unconfigured_runtime.take();
return run_js_file(factory, roots, unconfigured_runtime, &path, true)
.await
.map(Some);
}
BinValue::Executable(mut path_buf) => {
if cfg!(windows) && path_buf.extension().is_none() {
// prefer cmd shim over sh
path_buf.set_extension("cmd");
if !path_buf.exists() {
// just fall back to original path
path_buf.set_extension("");
}
}
permissions.check_run(
&deno_runtime::deno_permissions::RunQueryDescriptor::Path(
PathQueryDescriptor::new(
&factory.sys(),
std::borrow::Cow::Borrowed(path_buf.as_ref()),
)?,
),
"entrypoint",
)?;
let mut child = std::process::Command::new(path_buf)
.args(&flags.argv)
.spawn()
.context("Failed to spawn command")?;
let status = child.wait()?;
Ok(Some(status.code().unwrap_or(1)))
}
}
}
enum XTempDir {
Existing(PathBuf),
New(PathBuf),
}
impl XTempDir {
fn path(&self) -> &PathBuf {
match self {
XTempDir::Existing(path) => path,
XTempDir::New(path) => path,
}
}
}
fn create_package_temp_dir(
prefix: Option<&str>,
package_req: &PackageReq,
reload: bool,
deno_dir: &Path,
) -> Result<XTempDir, AnyError> {
let mut package_req_folder = String::from(prefix.unwrap_or(""));
package_req_folder.push_str(
&package_req
.to_string()
.replace("/", "__")
.replace(">", "gt")
.replace("<", "lt"),
);
let temp_dir = deno_dir.join("deno_x_cache").join(package_req_folder);
if temp_dir.exists() {
if reload || !temp_dir.join("deno.lock").exists() {
std::fs::remove_dir_all(&temp_dir)?;
} else {
let canonicalized_temp_dir = temp_dir
.canonicalize()
.ok()
.map(deno_path_util::strip_unc_prefix);
let temp_dir = canonicalized_temp_dir.unwrap_or(temp_dir);
return Ok(XTempDir::Existing(temp_dir));
}
}
std::fs::create_dir_all(&temp_dir)?;
let package_json_path = temp_dir.join("package.json");
std::fs::write(&package_json_path, "{}")?;
let deno_json_path = temp_dir.join("deno.json");
std::fs::write(&deno_json_path, r#"{"nodeModulesDir": "auto"}"#)?;
let canonicalized_temp_dir = temp_dir
.canonicalize()
.ok()
.map(deno_path_util::strip_unc_prefix);
let temp_dir = canonicalized_temp_dir.unwrap_or(temp_dir);
Ok(XTempDir::New(temp_dir))
}
fn write_shim(
out_dir: &Path,
shim_name: DenoXShimName,
) -> Result<(), AnyError> {
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let out_path = out_dir.join(shim_name.name());
if let DenoXShimName::Other(_) = shim_name {
std::fs::write(
&out_path,
r##"#!/bin/sh
SCRIPT_DIR="$(dirname -- "$(readlink -f -- "$0")")"
exec "$SCRIPT_DIR/deno" x "$@"
"##
.as_bytes(),
)?;
let mut permissions = std::fs::metadata(&out_path)?.permissions();
permissions.set_mode(0o755);
std::fs::set_permissions(&out_path, permissions)?;
} else {
match std::os::unix::fs::symlink("./deno", &out_path) {
Ok(_) => {}
Err(e) => match e.kind() {
std::io::ErrorKind::AlreadyExists => {
std::fs::remove_file(&out_path)?;
std::os::unix::fs::symlink("./deno", &out_path)?;
}
_ => return Err(e.into()),
},
}
}
}
#[cfg(windows)]
{
let out_path = out_dir.join(format!("{}.cmd", shim_name.name()));
std::fs::write(
out_path,
r##"@echo off
"%~dp0deno.exe" x %*
exit /b %ERRORLEVEL%
"##,
)?;
}
Ok(())
}
pub async fn run(
flags: Arc<Flags>,
x_flags: XFlags,
mut unconfigured_runtime: Option<UnconfiguredRuntime>,
roots: LibWorkerFactoryRoots,
) -> Result<i32, AnyError> {
let command_flags = match x_flags.kind {
XFlagsKind::InstallAlias(shim_name) => {
let exe = std::env::current_exe()?;
let out_dir = exe.parent().unwrap();
write_shim(out_dir, shim_name)?;
return Ok(0);
}
XFlagsKind::Command(command) => command,
XFlagsKind::Print => {
let factory = CliFactory::from_flags(flags.clone());
let npm_resolver = factory.npm_resolver().await?;
let node_resolver = factory.node_resolver().await?;
let bins =
resolve_local_bins(node_resolver, npm_resolver, &factory).await?;
if bins.is_empty() {
log::info!("No local commands found");
return Ok(0);
}
log::info!("Available (local) commands:");
for command in bins.keys() {
log::info!(" {}", command);
}
return Ok(0);
}
};
let factory = CliFactory::from_flags(flags.clone());
let npm_resolver = factory.npm_resolver().await?;
let node_resolver = factory.node_resolver().await?;
let result = maybe_run_local_npm_bin(
&factory,
&flags,
roots.clone(),
&mut unconfigured_runtime,
node_resolver,
npm_resolver,
&command_flags.command,
)
.await?;
if let Some(exit_code) = result {
return Ok(exit_code);
}
let cli_options = factory.cli_options()?;
let is_file_like = command_flags.command.starts_with('.')
|| command_flags.command.starts_with('/')
|| command_flags.command.starts_with('~')
|| command_flags.command.starts_with('\\')
|| Path::new(&command_flags.command).extension().is_some();
if is_file_like && Path::new(&command_flags.command).is_file() {
return Err(anyhow::anyhow!(
"Use 'deno run' to run a local file directly, 'deno x' is intended for running commands from packages."
));
}
let thing_to_run = match deno_core::url::Url::parse(&command_flags.command) {
Ok(url) => {
if url.scheme() == "npm" {
let req_ref = NpmPackageReqReference::from_specifier(&url)?;
ReqRefOrUrl::Npm(req_ref)
} else if url.scheme() == "jsr" {
let req_ref = JsrPackageReqReference::from_specifier(&url)?;
ReqRefOrUrl::Jsr(req_ref)
} else {
ReqRefOrUrl::Url(url)
}
}
Err(deno_core::url::ParseError::RelativeUrlWithoutBase) => {
let new_command = format!("npm:{}", command_flags.command);
let req_ref = NpmPackageReqReference::from_str(&new_command)?;
ReqRefOrUrl::Npm(req_ref)
}
Err(e) => {
return Err(e.into());
}
};
let cache_setting = cli_options.cache_setting();
let reload = matches!(cache_setting, CacheSetting::ReloadAll);
match thing_to_run {
ReqRefOrUrl::Npm(npm_package_req_reference) => {
let (managed_flags, managed_factory) = autoinstall_package(
ReqRef::Npm(&npm_package_req_reference),
&flags,
reload,
command_flags.yes,
&factory.deno_dir()?.root,
)
.await?;
let mut runner_flags = (*managed_flags).clone();
runner_flags.node_modules_dir =
Some(deno_config::deno_json::NodeModulesDirMode::Manual);
let runner_flags = Arc::new(runner_flags);
let runner_factory = CliFactory::from_flags(runner_flags.clone());
let runner_node_resolver = runner_factory.node_resolver().await?;
let runner_npm_resolver = runner_factory.npm_resolver().await?;
let bin_name =
if let Some(sub_path) = npm_package_req_reference.sub_path() {
sub_path
} else {
npm_package_req_reference.req().name.as_str()
};
let res = maybe_run_local_npm_bin(
&runner_factory,
&runner_flags,
roots.clone(),
&mut unconfigured_runtime,
runner_node_resolver,
runner_npm_resolver,
bin_name,
)
.await?;
if let Some(exit_code) = res {
Ok(exit_code)
} else {
let managed_npm_resolver =
managed_factory.npm_resolver().await?.as_managed().unwrap();
let bin_commands = bin_commands_for_package(
runner_node_resolver,
managed_npm_resolver,
npm_package_req_reference.req(),
)?;
let fallback_name = if bin_commands.len() == 1 {
Some(bin_commands.keys().next().unwrap())
} else {
None
};
if let Some(fallback_name) = fallback_name
&& let Some(exit_code) = maybe_run_local_npm_bin(
&runner_factory,
&runner_flags,
roots.clone(),
&mut unconfigured_runtime,
runner_node_resolver,
runner_npm_resolver,
fallback_name.as_ref(),
)
.await?
{
return Ok(exit_code);
}
Err(anyhow::anyhow!(
"Unable to choose binary for {}\n Available bins:\n{}",
command_flags.command,
bin_commands
.keys()
.map(|k| format!(" {}", k))
.collect::<Vec<_>>()
.join("\n")
))
}
}
ReqRefOrUrl::Jsr(jsr_package_req_reference) => {
let (_new_flags, new_factory) = autoinstall_package(
ReqRef::Jsr(&jsr_package_req_reference),
&flags,
reload,
command_flags.yes,
&factory.deno_dir()?.root,
)
.await?;
let url =
deno_core::url::Url::parse(&jsr_package_req_reference.to_string())?;
run_js_file(&new_factory, roots, None, &url, false).await
}
ReqRefOrUrl::Url(url) => {
let mut new_flags = (*flags).clone();
new_flags.node_modules_dir =
Some(deno_config::deno_json::NodeModulesDirMode::None);
new_flags.internal.lockfile_skip_write = true;
let new_flags = Arc::new(new_flags);
let new_factory = CliFactory::from_flags(new_flags.clone());
run_js_file(&new_factory, roots, None, &url, false).await
}
}
}
fn bin_commands_for_package(
node_resolver: &CliNodeResolver,
managed_npm_resolver: &CliManagedNpmResolver,
package_req: &PackageReq,
) -> Result<BTreeMap<String, BinValue>, AnyError> {
let pkg_id =
managed_npm_resolver.resolve_pkg_id_from_deno_module_req(package_req)?;
let package_folder =
managed_npm_resolver.resolve_pkg_folder_from_pkg_id(&pkg_id)?;
node_resolver
.resolve_npm_binary_commands_for_package(&package_folder)
.map_err(Into::into)
}
async fn autoinstall_package(
req_ref: ReqRef<'_>,
old_flags: &Flags,
reload: bool,
yes: bool,
deno_dir: &Path,
) -> Result<(Arc<Flags>, CliFactory), AnyError> {
fn make_new_flags(old_flags: &Flags, temp_dir: &Path) -> Arc<Flags> {
let mut new_flags = (*old_flags).clone();
new_flags.node_modules_dir =
Some(deno_config::deno_json::NodeModulesDirMode::Auto);
let temp_node_modules = temp_dir.join("node_modules");
new_flags.internal.root_node_modules_dir_override = Some(temp_node_modules);
new_flags.config_flag = crate::args::ConfigFlag::Path(
temp_dir.join("deno.json").to_string_lossy().into_owned(),
);
new_flags.allow_scripts = PackagesAllowedScripts::All;
log::debug!("new_flags: {:?}", new_flags);
Arc::new(new_flags)
}
let temp_dir = create_package_temp_dir(
Some(req_ref.prefix()),
req_ref.req(),
reload,
deno_dir,
)?;
let new_flags = make_new_flags(old_flags, temp_dir.path());
let new_factory = CliFactory::from_flags(new_flags.clone());
match temp_dir {
XTempDir::Existing(_) => Ok((new_flags, new_factory)),
XTempDir::New(temp_dir) => {
let confirmed = yes
|| match confirm(ConfirmOptions {
default: true,
message: format!("Install {}?", req_ref),
}) {
Some(true) => true,
Some(false) => false,
None if !DrawThread::is_supported() => {
log::warn!(
"Unable to prompt, installing {} without confirmation",
req_ref.req()
);
true
}
None => false,
};
if !confirmed {
return Err(anyhow::anyhow!("Installation rejected"));
}
match req_ref {
ReqRef::Npm(req_ref) => {
let pkg_json = temp_dir.join("package.json");
std::fs::write(
&pkg_json,
format!(
"{{\"dependencies\": {{\"{}\": \"{}\"}} }}",
req_ref.req().name,
req_ref.req().version_req
),
)?;
}
ReqRef::Jsr(req_ref) => {
let deno_json = temp_dir.join("deno.json");
std::fs::write(
&deno_json,
format!(
"{{ \"nodeModulesDir\": \"manual\", \"imports\": {{ \"{}\": \"{}\" }} }}",
req_ref.req().name,
format_args!("jsr:{}", req_ref.req())
),
)?;
}
}
crate::tools::pm::cache_top_level_deps(
&new_factory,
None,
CacheTopLevelDepsOptions {
lockfile_only: false,
},
)
.await?;
if let Some(lockfile) = new_factory.maybe_lockfile().await? {
lockfile.write_if_changed()?;
}
Ok((new_flags, new_factory))
}
}
}
#[derive(Debug, Clone, Copy)]
enum ReqRef<'a> {
Npm(&'a NpmPackageReqReference),
Jsr(&'a JsrPackageReqReference),
}
impl<'a> std::fmt::Display for ReqRef<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ReqRef::Npm(req) => write!(f, "{}", req),
ReqRef::Jsr(req) => write!(f, "{}", req),
}
}
}
impl<'a> ReqRef<'a> {
fn req(&self) -> &PackageReq {
match self {
ReqRef::Npm(req) => req.req(),
ReqRef::Jsr(req) => req.req(),
}
}
fn prefix(&self) -> &str {
match self {
ReqRef::Npm(_) => "npm-",
ReqRef::Jsr(_) => "jsr-",
}
}
}
enum ReqRefOrUrl {
Npm(NpmPackageReqReference),
Jsr(JsrPackageReqReference),
Url(deno_core::url::Url),
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/task.rs | cli/tools/task.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::HashMap;
use std::collections::HashSet;
use std::ffi::OsString;
use std::num::NonZeroUsize;
use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
use console_static_text::ansi::strip_ansi_codes;
use deno_config::workspace::FolderConfigs;
use deno_config::workspace::TaskDefinition;
use deno_config::workspace::TaskOrScript;
use deno_config::workspace::WorkspaceDirectory;
use deno_config::workspace::WorkspaceMemberTasksConfig;
use deno_config::workspace::WorkspaceTasksConfig;
use deno_core::anyhow::Context;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_core::futures::StreamExt;
use deno_core::futures::future::LocalBoxFuture;
use deno_core::futures::stream::futures_unordered;
use deno_core::url::Url;
use deno_npm_installer::PackageCaching;
use deno_path_util::normalize_path;
use deno_task_shell::KillSignal;
use deno_task_shell::ShellCommand;
use indexmap::IndexMap;
use indexmap::IndexSet;
use regex::Regex;
use crate::args::CliLockfile;
use crate::args::CliOptions;
use crate::args::Flags;
use crate::args::TaskFlags;
use crate::colors;
use crate::factory::CliFactory;
use crate::node::CliNodeResolver;
use crate::npm::CliNpmInstaller;
use crate::npm::CliNpmResolver;
use crate::task_runner;
use crate::task_runner::run_future_forwarding_signals;
use crate::util::fs::canonicalize_path;
use crate::util::progress_bar::ProgressBar;
#[derive(Debug)]
struct PackageTaskInfo {
matched_tasks: Vec<String>,
tasks_config: WorkspaceTasksConfig,
}
pub async fn execute_script(
flags: Arc<Flags>,
task_flags: TaskFlags,
) -> Result<i32, AnyError> {
let factory = CliFactory::from_flags(flags.clone());
let cli_options = factory.cli_options()?;
let start_dir = &cli_options.start_dir;
if !start_dir.has_deno_or_pkg_json() && !task_flags.eval {
bail!(
"deno task couldn't find deno.json(c) or package.json. See https://docs.deno.com/go/config"
)
}
let force_use_pkg_json =
std::env::var_os(crate::task_runner::USE_PKG_JSON_HIDDEN_ENV_VAR_NAME)
.map(|v| {
// always remove so sub processes don't inherit this env var
#[allow(clippy::undocumented_unsafe_blocks)]
unsafe {
std::env::remove_var(
crate::task_runner::USE_PKG_JSON_HIDDEN_ENV_VAR_NAME,
)
};
v == "1"
})
.unwrap_or(false);
// TODO(bartlomieju): this whole huge if statement should be a separate function, preferably with unit tests
let (packages_task_configs, name) = if let Some(filter) = &task_flags.filter {
// Filter based on package name
let package_regex = package_filter_to_regex(filter)?;
let Some(task_name) = &task_flags.task else {
print_available_tasks_workspace(
cli_options,
&package_regex,
filter,
force_use_pkg_json,
task_flags.recursive,
)?;
return Ok(0);
};
let task_regex = arg_to_task_name_filter(task_name)?;
let mut packages_task_info: Vec<PackageTaskInfo> = vec![];
let workspace = cli_options.workspace();
for (folder_url, folder) in
workspace.config_folders_sorted_by_dependencies()
{
if !task_flags.recursive
&& !matches_package(folder, force_use_pkg_json, &package_regex)
{
continue;
}
let member_dir = workspace.resolve_member_dir(folder_url);
let mut tasks_config = member_dir.to_tasks_config()?;
if force_use_pkg_json {
tasks_config = tasks_config.with_only_pkg_json();
}
let matched_tasks = match_tasks(&tasks_config, &task_regex);
if matched_tasks.is_empty() {
continue;
}
packages_task_info.push(PackageTaskInfo {
matched_tasks,
tasks_config,
});
}
// Logging every task definition would be too spammy. Pnpm only
// logs a simple message too.
if packages_task_info
.iter()
.all(|config| config.matched_tasks.is_empty())
{
log::warn!(
"{}",
colors::red(format!(
"No matching task or script '{}' found in selected packages.",
task_name
))
);
return Ok(0);
}
(packages_task_info, task_name)
} else {
let mut tasks_config = start_dir.to_tasks_config()?;
if force_use_pkg_json {
tasks_config = tasks_config.with_only_pkg_json()
}
let Some(task_name) = &task_flags.task else {
print_available_tasks(
&mut std::io::stdout(),
&cli_options.start_dir,
&tasks_config,
None,
)?;
return Ok(0);
};
let task_regex = arg_to_task_name_filter(task_name)?;
let matched_tasks = match_tasks(&tasks_config, &task_regex);
(
vec![PackageTaskInfo {
tasks_config,
matched_tasks,
}],
task_name,
)
};
let maybe_lockfile = factory.maybe_lockfile().await?.cloned();
let npm_installer = factory.npm_installer_if_managed().await?;
let npm_resolver = factory.npm_resolver().await?;
let node_resolver = factory.node_resolver().await?;
let progress_bar = factory.text_only_progress_bar();
let mut env_vars = task_runner::real_env_vars();
if flags.tunnel {
env_vars.insert("DENO_CONNECTED".into(), "1".into());
}
let no_of_concurrent_tasks = if let Ok(value) = std::env::var("DENO_JOBS") {
value.parse::<NonZeroUsize>().ok()
} else {
std::thread::available_parallelism().ok()
}
.unwrap_or_else(|| NonZeroUsize::new(2).unwrap());
let task_runner = TaskRunner {
task_flags: &task_flags,
npm_installer: npm_installer.map(|n| n.as_ref()),
npm_resolver,
node_resolver: node_resolver.as_ref(),
progress_bar,
env_vars,
cli_options,
maybe_lockfile,
concurrency: no_of_concurrent_tasks.into(),
};
let kill_signal = KillSignal::default();
run_future_forwarding_signals(kill_signal.clone(), async {
if task_flags.eval {
return task_runner
.run_deno_task(
&Url::from_directory_path(cli_options.initial_cwd()).unwrap(),
None,
"",
&TaskDefinition {
command: Some(task_flags.task.as_ref().unwrap().to_string()),
dependencies: vec![],
description: None,
},
kill_signal,
cli_options.argv(),
)
.await;
}
for task_config in &packages_task_configs {
let exit_code = task_runner
.run_tasks(task_config, name, &kill_signal, cli_options.argv())
.await?;
if exit_code > 0 {
return Ok(exit_code);
}
}
Ok(0)
})
.await
}
struct RunSingleOptions<'a> {
task_name: &'a str,
package_name: Option<&'a str>,
script: &'a str,
cwd: PathBuf,
custom_commands: HashMap<String, Rc<dyn ShellCommand>>,
kill_signal: KillSignal,
argv: &'a [String],
}
struct TaskRunner<'a> {
task_flags: &'a TaskFlags,
npm_installer: Option<&'a CliNpmInstaller>,
npm_resolver: &'a CliNpmResolver,
node_resolver: &'a CliNodeResolver,
progress_bar: &'a ProgressBar,
env_vars: HashMap<OsString, OsString>,
cli_options: &'a CliOptions,
maybe_lockfile: Option<Arc<CliLockfile>>,
concurrency: usize,
}
impl<'a> TaskRunner<'a> {
pub async fn run_tasks(
&self,
pkg_tasks_config: &PackageTaskInfo,
task_name: &str,
kill_signal: &KillSignal,
argv: &[String],
) -> Result<i32, deno_core::anyhow::Error> {
match sort_tasks_topo(pkg_tasks_config, task_name) {
Ok(sorted) => self.run_tasks_in_parallel(sorted, kill_signal, argv).await,
Err(err) => match err {
TaskError::NotFound(name) => {
if self.task_flags.is_run {
return Err(anyhow!("Task not found: {}", name));
}
log::error!("Task not found: {}", name);
if log::log_enabled!(log::Level::Error) {
self.print_available_tasks(&pkg_tasks_config.tasks_config)?;
}
Ok(1)
}
TaskError::TaskDepCycle { path } => {
log::error!("Task cycle detected: {}", path.join(" -> "));
Ok(1)
}
},
}
}
pub fn print_available_tasks(
&self,
tasks_config: &WorkspaceTasksConfig,
) -> Result<(), std::io::Error> {
print_available_tasks(
&mut std::io::stderr(),
&self.cli_options.start_dir,
tasks_config,
None,
)
}
async fn run_tasks_in_parallel(
&self,
tasks: Vec<ResolvedTask<'a>>,
kill_signal: &KillSignal,
args: &[String],
) -> Result<i32, deno_core::anyhow::Error> {
struct PendingTasksContext<'a> {
completed: HashSet<usize>,
running: HashSet<usize>,
tasks: &'a [ResolvedTask<'a>],
}
impl<'a> PendingTasksContext<'a> {
fn has_remaining_tasks(&self) -> bool {
self.completed.len() < self.tasks.len()
}
fn mark_complete(&mut self, task: &ResolvedTask) {
self.running.remove(&task.id);
self.completed.insert(task.id);
}
fn get_next_task<'b>(
&mut self,
runner: &'b TaskRunner<'b>,
kill_signal: &KillSignal,
argv: &'a [String],
) -> Option<
LocalBoxFuture<'b, Result<(i32, &'a ResolvedTask<'a>), AnyError>>,
>
where
'a: 'b,
{
let mut tasks_iter = self.tasks.iter().peekable();
while let Some(task) = tasks_iter.next() {
let args = if tasks_iter.peek().is_none() {
argv
} else {
&[]
};
if self.completed.contains(&task.id)
|| self.running.contains(&task.id)
{
continue;
}
let should_run = task
.dependencies
.iter()
.all(|dep_id| self.completed.contains(dep_id));
if !should_run {
continue;
}
self.running.insert(task.id);
let kill_signal = kill_signal.clone();
return Some(
async move {
match task.task_or_script {
TaskOrScript::Task { task: def, .. } => {
runner
.run_deno_task(
task.task_or_script.folder_url(),
task.task_or_script.package_name(),
task.name,
def,
kill_signal,
args,
)
.await
}
TaskOrScript::Script { details, .. } => {
runner
.run_npm_script(
task.task_or_script.folder_url(),
task.task_or_script.package_name(),
task.name,
&details.tasks,
kill_signal,
args,
)
.await
}
}
.map(|exit_code| (exit_code, task))
}
.boxed_local(),
);
}
None
}
}
let mut context = PendingTasksContext {
completed: HashSet::with_capacity(tasks.len()),
running: HashSet::with_capacity(self.concurrency),
tasks: &tasks,
};
let mut queue = futures_unordered::FuturesUnordered::new();
while context.has_remaining_tasks() {
while queue.len() < self.concurrency {
match context.get_next_task(self, kill_signal, args) {
Some(task) => {
queue.push(task);
}
_ => {
break;
}
}
}
// If queue is empty at this point, then there are no more tasks in the queue.
let Some(result) = queue.next().await else {
debug_assert_eq!(context.tasks.len(), 0);
break;
};
let (exit_code, name) = result?;
if exit_code > 0 {
return Ok(exit_code);
}
context.mark_complete(name);
}
Ok(0)
}
pub async fn run_deno_task(
&self,
dir_url: &Url,
package_name: Option<&str>,
task_name: &str,
definition: &TaskDefinition,
kill_signal: KillSignal,
argv: &'a [String],
) -> Result<i32, deno_core::anyhow::Error> {
let Some(command) = &definition.command else {
self.output_task(
task_name,
package_name,
&colors::gray("(no command)").to_string(),
);
return Ok(0);
};
self.maybe_npm_install().await?;
let cwd = match &self.task_flags.cwd {
Some(path) => canonicalize_path(Path::new(path))
.context("failed canonicalizing --cwd")?,
None => {
normalize_path(Cow::Owned(dir_url.to_file_path().unwrap())).into_owned()
}
};
let custom_commands = task_runner::resolve_custom_commands(
self.node_resolver,
self.npm_resolver,
)?;
self
.run_single(RunSingleOptions {
task_name,
package_name,
script: command,
cwd,
custom_commands,
kill_signal,
argv,
})
.await
}
pub async fn run_npm_script(
&self,
dir_url: &Url,
package_name: Option<&str>,
task_name: &str,
scripts: &IndexMap<String, String>,
kill_signal: KillSignal,
argv: &[String],
) -> Result<i32, deno_core::anyhow::Error> {
// ensure the npm packages are installed if using a managed resolver
self.maybe_npm_install().await?;
let cwd = match &self.task_flags.cwd {
Some(path) => Cow::Owned(canonicalize_path(Path::new(path))?),
None => normalize_path(Cow::Owned(dir_url.to_file_path().unwrap())),
};
// At this point we already checked if the task name exists in package.json.
// We can therefore check for "pre" and "post" scripts too, since we're only
// dealing with package.json here and not deno.json
let task_names = vec![
format!("pre{}", task_name),
task_name.to_string(),
format!("post{}", task_name),
];
let custom_commands = task_runner::resolve_custom_commands(
self.node_resolver,
self.npm_resolver,
)?;
for task_name in &task_names {
if let Some(script) = scripts.get(task_name) {
let exit_code = self
.run_single(RunSingleOptions {
task_name,
package_name,
script,
cwd: cwd.to_path_buf(),
custom_commands: custom_commands.clone(),
kill_signal: kill_signal.clone(),
argv,
})
.await?;
if exit_code > 0 {
return Ok(exit_code);
}
}
}
Ok(0)
}
async fn run_single(
&self,
opts: RunSingleOptions<'_>,
) -> Result<i32, AnyError> {
let RunSingleOptions {
task_name,
package_name,
script,
cwd,
custom_commands,
kill_signal,
argv,
} = opts;
self.output_task(
task_name,
package_name,
&task_runner::get_script_with_args(script, argv),
);
Ok(
task_runner::run_task(task_runner::RunTaskOptions {
task_name,
script,
cwd,
env_vars: self.env_vars.clone(),
custom_commands,
init_cwd: self.cli_options.initial_cwd(),
argv,
root_node_modules_dir: self.npm_resolver.root_node_modules_path(),
stdio: None,
kill_signal,
})
.await?
.exit_code,
)
}
async fn maybe_npm_install(&self) -> Result<(), AnyError> {
if let Some(npm_installer) = self.npm_installer {
self.progress_bar.deferred_keep_initialize_alive();
npm_installer
.ensure_top_level_package_json_install()
.await?;
npm_installer.cache_packages(PackageCaching::All).await?;
if let Some(lockfile) = &self.maybe_lockfile {
lockfile.write_if_changed()?;
}
}
Ok(())
}
fn output_task(
&self,
task_name: &str,
package_name: Option<&str>,
script: &str,
) {
log::info!(
"{} {}{} {}",
colors::green("Task"),
colors::cyan(task_name),
package_name
.filter(
|_| self.task_flags.recursive || self.task_flags.filter.is_some()
)
.map(|p| format!(" ({})", colors::gray(p)))
.unwrap_or_default(),
script,
);
}
}
#[derive(Debug)]
enum TaskError {
NotFound(String),
TaskDepCycle { path: Vec<String> },
}
struct ResolvedTask<'a> {
id: usize,
name: &'a str,
task_or_script: TaskOrScript<'a>,
dependencies: Vec<usize>,
}
fn sort_tasks_topo<'a>(
pkg_task_config: &'a PackageTaskInfo,
task_name: &str,
) -> Result<Vec<ResolvedTask<'a>>, TaskError> {
trait TasksConfig {
fn task(&self, name: &str) -> Option<(TaskOrScript<'_>, &dyn TasksConfig)>;
}
impl TasksConfig for WorkspaceTasksConfig {
fn task(&self, name: &str) -> Option<(TaskOrScript<'_>, &dyn TasksConfig)> {
if let Some(task_or_script) = self.member.task(name) {
return Some((task_or_script, self as &dyn TasksConfig));
}
if let Some(task_or_script) = self.root.task(name) {
// switch to only using the root tasks for the dependencies
return Some((task_or_script, &self.root as &dyn TasksConfig));
}
None
}
}
impl TasksConfig for WorkspaceMemberTasksConfig {
fn task(&self, name: &str) -> Option<(TaskOrScript<'_>, &dyn TasksConfig)> {
self
.task(name)
.map(|task_or_script| (task_or_script, self as &dyn TasksConfig))
}
}
fn sort_visit<'a>(
name: &'a str,
sorted: &mut Vec<ResolvedTask<'a>>,
mut path: Vec<(&'a Url, &'a str)>,
tasks_config: &'a dyn TasksConfig,
) -> Result<usize, TaskError> {
let Some((task_or_script, tasks_config)) = tasks_config.task(name) else {
return Err(TaskError::NotFound(name.to_string()));
};
let folder_url = task_or_script.folder_url();
if let Some(existing_task) = sorted.iter().find(|task| {
task.name == name && task.task_or_script.folder_url() == folder_url
}) {
// already exists
return Ok(existing_task.id);
}
if path.contains(&(folder_url, name)) {
path.push((folder_url, name));
return Err(TaskError::TaskDepCycle {
path: path.iter().map(|(_, s)| s.to_string()).collect(),
});
}
let mut dependencies: Vec<usize> = Vec::new();
if let TaskOrScript::Task { task, .. } = task_or_script {
dependencies.reserve(task.dependencies.len());
for dep in &task.dependencies {
let mut path = path.clone();
path.push((folder_url, name));
dependencies.push(sort_visit(dep, sorted, path, tasks_config)?);
}
}
let id = sorted.len();
sorted.push(ResolvedTask {
id,
name,
task_or_script,
dependencies,
});
Ok(id)
}
let mut sorted: Vec<ResolvedTask<'a>> = vec![];
for name in &pkg_task_config.matched_tasks {
sort_visit(name, &mut sorted, Vec::new(), &pkg_task_config.tasks_config)?;
}
if sorted.is_empty() {
return Err(TaskError::NotFound(task_name.to_string()));
}
Ok(sorted)
}
fn matches_package(
config: &FolderConfigs,
force_use_pkg_json: bool,
regex: &Regex,
) -> bool {
if !force_use_pkg_json
&& let Some(deno_json) = &config.deno_json
&& let Some(name) = &deno_json.json.name
&& regex.is_match(name)
{
return true;
}
if let Some(package_json) = &config.pkg_json
&& let Some(name) = &package_json.name
&& regex.is_match(name)
{
return true;
}
false
}
fn print_available_tasks_workspace(
cli_options: &Arc<CliOptions>,
package_regex: &Regex,
filter: &str,
force_use_pkg_json: bool,
recursive: bool,
) -> Result<(), AnyError> {
let workspace = cli_options.workspace();
let mut matched = false;
for (folder_url, folder) in workspace.config_folders() {
if !recursive && !matches_package(folder, force_use_pkg_json, package_regex)
{
continue;
}
matched = true;
let member_dir = workspace.resolve_member_dir(folder_url);
let mut tasks_config = member_dir.to_tasks_config()?;
let mut pkg_name = folder
.deno_json
.as_ref()
.and_then(|deno| deno.json.name.clone())
.or(folder.pkg_json.as_ref().and_then(|pkg| pkg.name.clone()));
if force_use_pkg_json {
tasks_config = tasks_config.with_only_pkg_json();
pkg_name = folder.pkg_json.as_ref().and_then(|pkg| pkg.name.clone());
}
print_available_tasks(
&mut std::io::stdout(),
&cli_options.start_dir,
&tasks_config,
pkg_name,
)?;
}
if !matched {
log::warn!(
"{}",
colors::red(format!(
"No package name matched the filter '{}' in available 'deno.json' or 'package.json' files.",
filter
))
);
}
Ok(())
}
pub struct AvailableTaskDescription {
pub is_root: bool,
pub is_deno: bool,
pub name: String,
pub task: TaskDefinition,
}
pub fn get_available_tasks_for_completion(
flags: Arc<Flags>,
) -> Result<Vec<AvailableTaskDescription>, AnyError> {
let factory = crate::factory::CliFactory::from_flags(flags);
let options = factory.cli_options()?;
let member_dir = &options.start_dir;
let tasks_config = member_dir.to_tasks_config()?;
get_available_tasks(member_dir, &tasks_config).map_err(AnyError::from)
}
fn get_available_tasks(
workspace_dir: &Arc<WorkspaceDirectory>,
tasks_config: &WorkspaceTasksConfig,
) -> Result<Vec<AvailableTaskDescription>, std::io::Error> {
let is_cwd_root_dir = tasks_config.root.is_empty();
let mut seen_task_names = HashSet::with_capacity(tasks_config.tasks_count());
let mut task_descriptions = Vec::with_capacity(tasks_config.tasks_count());
for config in [&tasks_config.member, &tasks_config.root] {
if let Some(config) = config.deno_json.as_ref() {
let is_root = !is_cwd_root_dir
&& config.folder_url
== *workspace_dir.workspace.root_dir_url().as_ref();
for (name, definition) in &config.tasks {
if !seen_task_names.insert(name) {
continue; // already seen
}
task_descriptions.push(AvailableTaskDescription {
is_root,
is_deno: true,
name: name.to_string(),
task: definition.clone(),
});
}
}
if let Some(config) = config.package_json.as_ref() {
let is_root = !is_cwd_root_dir
&& config.folder_url
== *workspace_dir.workspace.root_dir_url().as_ref();
for (name, script) in &config.tasks {
if !seen_task_names.insert(name) {
continue; // already seen
}
task_descriptions.push(AvailableTaskDescription {
is_root,
is_deno: false,
name: name.to_string(),
task: deno_config::deno_json::TaskDefinition {
command: Some(script.to_string()),
dependencies: vec![],
description: None,
},
});
}
}
}
Ok(task_descriptions)
}
fn print_available_tasks(
writer: &mut dyn std::io::Write,
workspace_dir: &Arc<WorkspaceDirectory>,
tasks_config: &WorkspaceTasksConfig,
pkg_name: Option<String>,
) -> Result<(), std::io::Error> {
let heading = if let Some(s) = pkg_name {
format!("Available tasks ({}):", colors::cyan(s))
} else {
"Available tasks:".to_string()
};
writeln!(writer, "{}", colors::green(heading))?;
if tasks_config.is_empty() {
writeln!(
writer,
" {}",
colors::red("No tasks found in configuration file")
)?;
return Ok(());
}
let task_descriptions = get_available_tasks(workspace_dir, tasks_config)?;
for desc in task_descriptions {
writeln!(
writer,
"- {}{}",
colors::cyan(desc.name),
if desc.is_root {
if desc.is_deno {
format!(" {}", colors::italic_gray("(workspace)"))
} else {
format!(" {}", colors::italic_gray("(workspace package.json)"))
}
} else if desc.is_deno {
"".to_string()
} else {
format!(" {}", colors::italic_gray("(package.json)"))
}
)?;
if let Some(description) = &desc.task.description {
let slash_slash = colors::italic_gray("//");
for line in description.lines() {
writeln!(
writer,
" {slash_slash} {}",
colors::italic_gray(strip_ansi_codes_and_escape_control_chars(line))
)?;
}
}
if let Some(command) = &desc.task.command {
writeln!(
writer,
" {}",
strip_ansi_codes_and_escape_control_chars(command)
)?;
};
if !desc.task.dependencies.is_empty() {
let dependencies = desc
.task
.dependencies
.into_iter()
.map(|d| strip_ansi_codes_and_escape_control_chars(&d))
.collect::<Vec<_>>()
.join(", ");
writeln!(
writer,
" {} {}",
colors::gray("depends on:"),
colors::cyan(dependencies)
)?;
}
}
Ok(())
}
fn strip_ansi_codes_and_escape_control_chars(s: &str) -> String {
strip_ansi_codes(s)
.chars()
.map(|c| match c {
'\n' => "\\n".to_string(),
'\r' => "\\r".to_string(),
'\t' => "\\t".to_string(),
c if c.is_control() => format!("\\x{:02x}", c as u8),
c => c.to_string(),
})
.collect()
}
fn visit_task_and_dependencies(
tasks_config: &WorkspaceTasksConfig,
visited: &mut HashSet<String>,
name: &str,
) {
if visited.contains(name) {
return;
}
visited.insert(name.to_string());
if let Some(TaskOrScript::Task { task, .. }) = &tasks_config.task(name) {
for dep in &task.dependencies {
visit_task_and_dependencies(tasks_config, visited, dep);
}
}
}
// Any of the matched tasks could be a child task of another matched
// one. Therefore we need to filter these out to ensure that every
// task is only run once.
fn match_tasks(
tasks_config: &WorkspaceTasksConfig,
task_name_filter: &TaskNameFilter,
) -> Vec<String> {
let mut matched: IndexSet<String> = IndexSet::new();
let mut visited: HashSet<String> = HashSet::new();
// Match tasks in deno.json
for name in tasks_config.task_names() {
if task_name_filter.matches(name) && !visited.contains(name) {
matched.insert(name.to_string());
visit_task_and_dependencies(tasks_config, &mut visited, name);
}
}
matched.iter().map(|s| s.to_string()).collect::<Vec<_>>()
}
fn package_filter_to_regex(input: &str) -> Result<regex::Regex, regex::Error> {
let mut regex_str = regex::escape(input);
regex_str = regex_str.replace("\\*", ".*");
Regex::new(®ex_str)
}
fn arg_to_task_name_filter(
input: &str,
) -> Result<TaskNameFilter<'_>, AnyError> {
if !input.contains("*") {
return Ok(TaskNameFilter::Exact(input));
}
let mut regex_str = regex::escape(input);
regex_str = regex_str.replace("\\*", ".*");
regex_str = format!("^{}", regex_str);
let re = Regex::new(®ex_str)?;
Ok(TaskNameFilter::Regex(re))
}
#[derive(Debug)]
enum TaskNameFilter<'s> {
Exact(&'s str),
Regex(regex::Regex),
}
impl TaskNameFilter<'_> {
fn matches(&self, name: &str) -> bool {
match self {
Self::Exact(n) => *n == name,
Self::Regex(re) => re.is_match(name),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_arg_to_task_name_filter() {
assert!(matches!(
arg_to_task_name_filter("test").unwrap(),
TaskNameFilter::Exact("test")
));
assert!(matches!(
arg_to_task_name_filter("test-").unwrap(),
TaskNameFilter::Exact("test-")
));
assert!(matches!(
arg_to_task_name_filter("test*").unwrap(),
TaskNameFilter::Regex(_)
));
let filter = arg_to_task_name_filter("test:*").unwrap();
assert!(filter.matches("test:deno"));
assert!(filter.matches("test:dprint"));
assert!(!filter.matches("update:latest:deno"));
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/clean.rs | cli/tools/clean.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::HashSet;
use std::ffi::OsString;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_cache_dir::GlobalOrLocalHttpCache;
use deno_core::anyhow::Context;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_graph::ModuleGraph;
use deno_graph::packages::PackageSpecifiers;
use deno_npm_installer::graph::NpmCachingStrategy;
use sys_traits::FsCanonicalize;
use sys_traits::FsCreateDirAll;
use walkdir::WalkDir;
use crate::args::CleanFlags;
use crate::args::Flags;
use crate::colors;
use crate::display;
use crate::factory::CliFactory;
use crate::graph_container::CollectSpecifiersOptions;
use crate::graph_container::ModuleGraphContainer;
use crate::graph_container::ModuleGraphUpdatePermit;
use crate::graph_util::BuildGraphRequest;
use crate::graph_util::BuildGraphWithNpmOptions;
use crate::sys::CliSys;
use crate::util::fs::FsCleaner;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
use crate::util::progress_bar::ProgressMessagePrompt;
pub async fn clean(
flags: Arc<Flags>,
clean_flags: CleanFlags,
) -> Result<(), AnyError> {
if !clean_flags.except_paths.is_empty() {
return clean_except(flags, &clean_flags.except_paths, clean_flags.dry_run)
.await;
}
let factory = CliFactory::from_flags(flags);
let deno_dir = factory.deno_dir()?;
if deno_dir.root.exists() {
let no_of_files = walkdir::WalkDir::new(&deno_dir.root).into_iter().count();
let progress_bar = ProgressBar::new(ProgressBarStyle::ProgressBars);
let progress_guard =
progress_bar.update_with_prompt(ProgressMessagePrompt::Cleaning, "");
progress_guard.set_total_size(no_of_files.try_into().unwrap());
let mut cleaner = FsCleaner::new(Some(progress_guard));
cleaner.rm_rf(&deno_dir.root)?;
// Drop the guard so that progress bar disappears.
drop(cleaner.progress_guard);
log::info!(
"{} {} {}",
colors::green("Removed"),
deno_dir.root.display(),
colors::gray(&format!(
"({} files, {})",
cleaner.files_removed + cleaner.dirs_removed,
display::human_size(cleaner.bytes_removed as f64)
))
);
}
Ok(())
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
enum Found {
Match,
Prefix,
}
#[derive(Clone, Debug, Default)]
struct PathNode {
exact: bool,
children: BTreeMap<OsString, usize>,
}
#[derive(Debug)]
struct PathTrie {
root: usize,
nodes: Vec<PathNode>,
rewrites: Vec<(PathBuf, PathBuf)>,
}
impl PathTrie {
fn new() -> Self {
Self {
root: 0,
nodes: vec![PathNode {
exact: false,
children: Default::default(),
}],
rewrites: vec![],
}
}
fn add_rewrite(&mut self, from: PathBuf, to: PathBuf) {
self.rewrites.push((from, to));
}
fn rewrite<'a>(&self, s: Cow<'a, Path>) -> Cow<'a, Path> {
let normalized = deno_path_util::normalize_path(s);
for (from, to) in &self.rewrites {
if normalized.starts_with(from) {
return Cow::Owned(to.join(normalized.strip_prefix(from).unwrap()));
}
}
normalized
}
fn insert(&mut self, s: PathBuf) {
let normalized = self.rewrite(Cow::Owned(s));
let components = normalized.components().map(|c| c.as_os_str());
let mut node = self.root;
for component in components {
if let Some(nd) = self.nodes[node].children.get(component).copied() {
node = nd;
} else {
let id = self.nodes.len();
self.nodes.push(PathNode::default());
self.nodes[node]
.children
.insert(component.to_os_string(), id);
node = id;
}
}
self.nodes[node].exact = true;
}
fn find(&self, s: &Path) -> Option<Found> {
let normalized = self.rewrite(Cow::Borrowed(s));
let components = normalized.components().map(|c| c.as_os_str());
let mut node = self.root;
for component in components {
if let Some(nd) = self.nodes[node].children.get(component).copied() {
node = nd;
} else {
return None;
}
}
Some(if self.nodes[node].exact {
Found::Match
} else {
Found::Prefix
})
}
}
fn try_get_canonicalized_root_dir<Sys: FsCanonicalize + FsCreateDirAll>(
sys: &Sys,
root_dir: &Path,
) -> Result<PathBuf, std::io::Error> {
match sys.fs_canonicalize(root_dir) {
Ok(path) => Ok(path),
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
sys.fs_create_dir_all(root_dir)?;
sys.fs_canonicalize(root_dir)
}
Err(err) => Err(err),
}
}
async fn clean_except(
flags: Arc<Flags>,
entrypoints: &[String],
dry_run: bool,
) -> Result<(), AnyError> {
let mut state = FsCleaner::default();
let factory = CliFactory::from_flags(flags.clone());
let sys = factory.sys();
let options = factory.cli_options()?;
let main_graph_container = factory.main_module_graph_container().await?;
let roots = main_graph_container.collect_specifiers(
entrypoints,
CollectSpecifiersOptions {
include_ignored_specified: true,
},
)?;
let http_cache = factory.global_http_cache()?;
let local_or_global_http_cache = factory.http_cache()?.clone();
let deno_dir = factory.deno_dir()?.clone();
let deno_dir_root_canonical =
try_get_canonicalized_root_dir(&sys, &deno_dir.root)
.unwrap_or(deno_dir.root.clone());
let mut permit = main_graph_container.acquire_update_permit().await;
let graph = permit.graph_mut();
graph.packages = PackageSpecifiers::default();
let graph_builder = factory.module_graph_builder().await?;
graph_builder
.build_graph_with_npm_resolution(
graph,
BuildGraphWithNpmOptions {
request: BuildGraphRequest::Roots(roots.clone()),
loader: None,
is_dynamic: false,
npm_caching: NpmCachingStrategy::Manual,
},
)
.await?;
let npm_resolver = factory.npm_resolver().await?;
let mut keep = HashSet::new();
let mut npm_reqs = Vec::new();
let mut keep_paths_trie = PathTrie::new();
if deno_dir_root_canonical != deno_dir.root {
keep_paths_trie
.add_rewrite(deno_dir.root.clone(), deno_dir_root_canonical.clone());
}
for (_, entry) in graph.walk(
roots.iter(),
deno_graph::WalkOptions {
check_js: deno_graph::CheckJsOption::False,
follow_dynamic: true,
kind: graph.graph_kind(),
prefer_fast_check_graph: false,
},
) {
match entry {
deno_graph::ModuleEntryRef::Module(module) => match module {
deno_graph::Module::Js(js_module) => {
keep.insert(&js_module.specifier);
}
deno_graph::Module::Json(json_module) => {
keep.insert(&json_module.specifier);
}
deno_graph::Module::Wasm(wasm_module) => {
keep.insert(&wasm_module.specifier);
}
deno_graph::Module::Npm(npm_module) => {
if let Some(managed) = npm_resolver.as_managed() {
// TODO(dsherret): ok to use for now, but we should use the req in the future
#[allow(deprecated)]
let nv = npm_module.nv_reference.nv();
let id = managed
.resolution()
.resolve_pkg_id_from_deno_module(nv)
.unwrap();
npm_reqs
.extend(managed.resolution().resolve_pkg_reqs_from_pkg_id(&id));
}
}
deno_graph::Module::Node(_) => {}
deno_graph::Module::External(_) => {}
},
deno_graph::ModuleEntryRef::Err(_) => {}
deno_graph::ModuleEntryRef::Redirect(_) => {}
}
}
for url in &keep {
if (url.scheme() == "http" || url.scheme() == "https")
&& let Ok(path) = http_cache.local_path_for_url(url)
{
keep_paths_trie.insert(path);
}
if let Some(path) = deno_dir
.gen_cache
.get_cache_filename_with_extension(url, "js")
{
let path = deno_dir.gen_cache.location.join(path);
keep_paths_trie.insert(path);
}
}
let npm_cache = factory.npm_cache()?;
let snap = npm_resolver.as_managed().unwrap().resolution().snapshot();
// TODO(nathanwhit): remove once we don't need packuments for creating the snapshot from lockfile
for package in snap.all_system_packages(&options.npm_system_info()) {
keep_paths_trie.insert(
npm_cache
.package_name_folder(&package.id.nv.name)
.join("registry.json"),
);
}
let snap = snap.subset(&npm_reqs);
let node_modules_path = npm_resolver.root_node_modules_path();
let mut node_modules_keep = HashSet::new();
for package in snap.all_system_packages(&options.npm_system_info()) {
if node_modules_path.is_some() {
node_modules_keep.insert(package.get_package_cache_folder_id());
}
keep_paths_trie.insert(npm_cache.package_folder_for_id(
&deno_npm::NpmPackageCacheFolderId {
nv: package.id.nv.clone(),
copy_index: package.copy_index,
},
));
}
if dry_run {
#[allow(clippy::print_stderr)]
{
eprintln!("would remove:");
}
}
let jsr_url = crate::args::jsr_url();
add_jsr_meta_paths(graph, &mut keep_paths_trie, jsr_url, &|url| {
http_cache
.local_path_for_url(url)
.map_err(Into::into)
.map(Some)
})?;
walk_removing(
&mut state,
walkdir::WalkDir::new(&deno_dir.root)
.contents_first(false)
.min_depth(2),
&keep_paths_trie,
&deno_dir.root,
dry_run,
)?;
let mut node_modules_cleaned = FsCleaner::default();
if let Some(dir) = node_modules_path {
clean_node_modules(
&mut node_modules_cleaned,
&node_modules_keep,
dir,
dry_run,
)?;
}
let mut vendor_cleaned = FsCleaner::default();
if let Some(vendor_dir) = options.vendor_dir_path()
&& let GlobalOrLocalHttpCache::Local(cache) = local_or_global_http_cache
{
let mut trie = PathTrie::new();
if deno_dir_root_canonical != deno_dir.root {
trie.add_rewrite(deno_dir.root.clone(), deno_dir_root_canonical);
}
let cache = cache.clone();
add_jsr_meta_paths(graph, &mut trie, jsr_url, &|url| match cache
.local_path_for_url(url)
{
Ok(path) => Ok(path),
Err(err) => {
log::warn!(
"failed to get local path for jsr meta url {}: {}",
url,
err
);
Ok(None)
}
})?;
for url in keep {
if url.scheme() == "http" || url.scheme() == "https" {
match cache.local_path_for_url(url) {
Ok(Some(path)) => {
trie.insert(path);
}
Ok(None) => {}
Err(err) => {
log::warn!("failed to get local path for url {}: {}", url, err);
}
}
}
}
walk_removing(
&mut vendor_cleaned,
WalkDir::new(vendor_dir).contents_first(false),
&trie,
vendor_dir,
dry_run,
)?;
}
if !dry_run {
log_stats(&state, &deno_dir.root);
if let Some(dir) = node_modules_path {
log_stats(&node_modules_cleaned, dir);
}
if let Some(dir) = options.vendor_dir_path() {
log_stats(&vendor_cleaned, dir);
}
}
Ok(())
}
fn log_stats(cleaner: &FsCleaner, dir: &Path) {
if cleaner.bytes_removed == 0
&& cleaner.dirs_removed == 0
&& cleaner.files_removed == 0
{
return;
}
log::info!(
"{} {}",
colors::green("Removed"),
colors::gray(&format!(
"{} files, {} from {}",
cleaner.files_removed + cleaner.dirs_removed,
display::human_size(cleaner.bytes_removed as f64),
dir.display()
))
);
}
fn add_jsr_meta_paths(
graph: &ModuleGraph,
path_trie: &mut PathTrie,
jsr_url: &Url,
url_to_path: &dyn Fn(&Url) -> Result<Option<PathBuf>, AnyError>,
) -> Result<(), AnyError> {
for package in graph.packages.mappings().values() {
let Ok(base_url) = jsr_url.join(&format!("{}/", &package.name)) else {
continue;
};
let keep = url_to_path(&base_url.join("meta.json").unwrap())?;
if let Some(keep) = keep {
path_trie.insert(keep);
}
let keep = url_to_path(
&base_url
.join(&format!("{}_meta.json", package.version))
.unwrap(),
)?;
if let Some(keep) = keep {
path_trie.insert(keep);
}
}
Ok(())
}
// TODO(nathanwhit): use strategy pattern instead of branching on dry_run
fn walk_removing(
cleaner: &mut FsCleaner,
walker: WalkDir,
trie: &PathTrie,
base: &Path,
dry_run: bool,
) -> Result<(), AnyError> {
let mut walker = walker.into_iter();
while let Some(entry) = walker.next() {
let entry = entry?;
if let Some(found) = trie.find(entry.path()) {
if entry.file_type().is_dir() && matches!(found, Found::Match) {
walker.skip_current_dir();
continue;
}
continue;
}
if !entry.path().starts_with(base) {
panic!(
"would have removed a file outside of the base directory: base: {}, path: {}",
base.display(),
entry.path().display()
);
}
if entry.file_type().is_dir() {
if dry_run {
#[allow(clippy::print_stderr)]
{
eprintln!(" {}", entry.path().display());
}
} else {
cleaner.rm_rf(entry.path())?;
}
walker.skip_current_dir();
} else if dry_run {
#[allow(clippy::print_stderr)]
{
eprintln!(" {}", entry.path().display());
}
} else {
cleaner.remove_file(entry.path(), Some(entry.metadata()?))?;
}
}
Ok(())
}
fn clean_node_modules(
cleaner: &mut FsCleaner,
keep_pkgs: &HashSet<deno_npm::NpmPackageCacheFolderId>,
dir: &Path,
dry_run: bool,
) -> Result<(), AnyError> {
if !dir.ends_with("node_modules") || !dir.is_dir() {
bail!("expected a node_modules directory, got: {}", dir.display());
}
let base = dir.join(".deno");
if !base.exists() {
return Ok(());
}
let keep_names = keep_pkgs
.iter()
.map(deno_resolver::npm::get_package_folder_id_folder_name)
.collect::<HashSet<_>>();
// remove the actual packages from node_modules/.deno
let entries = match std::fs::read_dir(&base) {
Ok(entries) => entries,
Err(err)
if matches!(
err.kind(),
std::io::ErrorKind::NotFound | std::io::ErrorKind::NotADirectory
) =>
{
return Ok(());
}
Err(err) => {
return Err(err).with_context(|| {
format!(
"failed to clean node_modules directory at {}",
dir.display()
)
});
}
};
// TODO(nathanwhit): this probably shouldn't reach directly into this code
let mut setup_cache = deno_npm_installer::LocalSetupCache::load(
CliSys::default(),
base.join(".setup-cache.bin"),
);
for entry in entries {
let entry = entry?;
if !entry.file_type()?.is_dir() {
continue;
}
let file_name = entry.file_name();
let file_name = file_name.to_string_lossy();
if keep_names.contains(file_name.as_ref()) || file_name == "node_modules" {
continue;
} else if dry_run {
#[allow(clippy::print_stderr)]
{
eprintln!(" {}", entry.path().display());
}
} else {
cleaner.rm_rf(&entry.path())?;
}
}
// remove top level symlinks from node_modules/<package> to node_modules/.deno/<package>
// where the target doesn't exist (because it was removed above)
clean_node_modules_symlinks(
cleaner,
&keep_names,
dir,
dry_run,
&mut |name| {
setup_cache.remove_root_symlink(name);
},
)?;
// remove symlinks from node_modules/.deno/node_modules/<package> to node_modules/.deno/<package>
// where the target doesn't exist (because it was removed above)
clean_node_modules_symlinks(
cleaner,
&keep_names,
&base.join("node_modules"),
dry_run,
&mut |name| {
setup_cache.remove_deno_symlink(name);
},
)?;
if !dry_run {
setup_cache.save();
}
Ok(())
}
// node_modules/.deno/chalk@5.0.1/node_modules/chalk -> chalk@5.0.1
fn node_modules_package_actual_dir_to_name(
path: &Path,
) -> Option<Cow<'_, str>> {
path
.parent()?
.parent()?
.file_name()
.map(|name| name.to_string_lossy())
}
fn clean_node_modules_symlinks(
cleaner: &mut FsCleaner,
keep_names: &HashSet<String>,
dir: &Path,
dry_run: bool,
on_remove: &mut dyn FnMut(&str),
) -> Result<(), AnyError> {
for entry in std::fs::read_dir(dir)? {
let entry = entry?;
let ty = entry.file_type()?;
if ty.is_symlink() {
let target = std::fs::read_link(entry.path())?;
let name = node_modules_package_actual_dir_to_name(&target);
if let Some(name) = name
&& !keep_names.contains(&*name)
{
if dry_run {
#[allow(clippy::print_stderr)]
{
eprintln!(" {}", entry.path().display());
}
} else {
on_remove(&name);
cleaner.remove_file(&entry.path(), None)?;
}
}
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use std::path::Path;
use super::Found::*;
#[test]
fn path_trie() {
let mut trie = super::PathTrie::new();
#[cfg(unix)]
{
trie.add_rewrite(
Path::new("/RewriteMe").into(),
Path::new("/Actual").into(),
);
}
#[cfg(windows)]
{
trie.add_rewrite(
Path::new("C:/RewriteMe").into(),
Path::new("C:/Actual").into(),
);
}
let paths = {
#[cfg(unix)]
{
[
"/foo/bar/deno",
"/foo/bar/deno/1",
"/foo/bar/deno/2",
"/foo/baz",
"/Actual/thing/quux",
]
}
#[cfg(windows)]
{
[
r"C:\foo\bar\deno",
r"C:\foo\bar\deno\1",
r"C:\foo\bar\deno\2",
r"C:\foo\baz",
r"D:\thing",
r"C:\Actual\thing\quux",
]
}
};
let cases = {
#[cfg(unix)]
{
[
("/", Some(Prefix)),
("/foo", Some(Prefix)),
("/foo/", Some(Prefix)),
("/foo/bar", Some(Prefix)),
("/foo/bar/deno", Some(Match)),
("/foo/bar/deno/1", Some(Match)),
("/foo/bar/deno/2", Some(Match)),
("/foo/baz", Some(Match)),
("/fo", None),
("/foo/baz/deno", None),
("/Actual/thing/quux", Some(Match)),
("/RewriteMe/thing/quux", Some(Match)),
("/RewriteMe/thing", Some(Prefix)),
]
}
#[cfg(windows)]
{
[
(r"C:\", Some(Prefix)),
(r"C:\foo", Some(Prefix)),
(r"C:\foo\", Some(Prefix)),
(r"C:\foo\", Some(Prefix)),
(r"C:\foo\bar", Some(Prefix)),
(r"C:\foo\bar\deno\1", Some(Match)),
(r"C:\foo\bar\deno\2", Some(Match)),
(r"C:\foo\baz", Some(Match)),
(r"C:\fo", None),
(r"C:\foo\baz\deno", None),
(r"D:\", Some(Prefix)),
(r"E:\", None),
(r"C:\Actual\thing\quux", Some(Match)),
(r"C:\RewriteMe\thing\quux", Some(Match)),
(r"C:\RewriteMe\thing", Some(Prefix)),
]
}
};
for pth in paths {
let path = Path::new(pth);
trie.insert(path.into());
}
for (input, expect) in cases {
let path = Path::new(input);
assert_eq!(trie.find(path), expect, "on input: {input}");
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/fmt.rs | cli/tools/fmt.rs | // Copyright 2018-2025 the Deno authors. MIT license.
//! This module provides file formatting utilities using
//! [`dprint-plugin-typescript`](https://github.com/dprint/dprint-plugin-typescript).
//!
//! At the moment it is only consumed using CLI but in
//! the future it can be easily extended to provide
//! the same functions as ops available in JS runtime.
use std::borrow::Cow;
use std::fs;
use std::io::Read;
use std::io::Write;
use std::io::stdin;
use std::io::stdout;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use async_trait::async_trait;
use deno_ast::ParsedSource;
use deno_config::glob::FileCollector;
use deno_config::glob::FilePatterns;
use deno_core::anyhow::Context;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::futures;
use deno_core::parking_lot::Mutex;
use deno_core::unsync::spawn_blocking;
use deno_core::url::Url;
use log::debug;
use log::info;
use log::warn;
use crate::args::CliOptions;
use crate::args::Flags;
use crate::args::FmtFlags;
use crate::args::FmtOptions;
use crate::args::FmtOptionsConfig;
use crate::args::ProseWrap;
use crate::args::UnstableFmtOptions;
use crate::cache::CacheDBHash;
use crate::cache::Caches;
use crate::cache::IncrementalCache;
use crate::colors;
use crate::factory::CliFactory;
use crate::sys::CliSys;
use crate::util;
use crate::util::file_watcher;
use crate::util::fs::canonicalize_path;
use crate::util::path::get_extension;
/// Format JavaScript/TypeScript files.
pub async fn format(
flags: Arc<Flags>,
fmt_flags: FmtFlags,
) -> Result<(), AnyError> {
if fmt_flags.is_stdin() {
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let start_dir = &cli_options.start_dir;
let fmt_config = start_dir
.to_fmt_config(FilePatterns::new_with_base(start_dir.dir_path()))?;
let fmt_options = FmtOptions::resolve(
fmt_config,
cli_options.resolve_config_unstable_fmt_options(),
&fmt_flags,
);
return format_stdin(
&fmt_flags,
fmt_options,
cli_options
.ext_flag()
.as_ref()
.map(|s| s.as_str())
.unwrap_or("ts"),
);
}
if let Some(watch_flags) = &fmt_flags.watch {
file_watcher::watch_func(
flags,
file_watcher::PrintConfig::new("Fmt", !watch_flags.no_clear_screen),
move |flags, watcher_communicator, changed_paths| {
let fmt_flags = fmt_flags.clone();
watcher_communicator.show_path_changed(changed_paths.clone());
Ok(async move {
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let caches = factory.caches()?;
let mut paths_with_options_batches =
resolve_paths_with_options_batches(cli_options, &fmt_flags)?;
for paths_with_options in &mut paths_with_options_batches {
let _ = watcher_communicator
.watch_paths(paths_with_options.paths.clone());
let files = std::mem::take(&mut paths_with_options.paths);
paths_with_options.paths = if let Some(paths) = &changed_paths {
if fmt_flags.check {
// check all files on any changed (https://github.com/denoland/deno/issues/12446)
if files.iter().any(|path| {
canonicalize_path(path)
.map(|path| paths.contains(&path))
.unwrap_or(false)
}) {
files
} else {
[].to_vec()
}
} else {
files
.into_iter()
.filter(|path| {
canonicalize_path(path)
.map(|path| paths.contains(&path))
.unwrap_or(false)
})
.collect::<Vec<_>>()
}
} else {
files
};
}
format_files(
caches,
cli_options,
&fmt_flags,
paths_with_options_batches,
)
.await?;
Ok(())
})
},
)
.await?;
} else {
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let caches = factory.caches()?;
let paths_with_options_batches =
resolve_paths_with_options_batches(cli_options, &fmt_flags)?;
return format_files(
caches,
cli_options,
&fmt_flags,
paths_with_options_batches,
)
.await;
}
Ok(())
}
struct PathsWithOptions {
base: PathBuf,
paths: Vec<PathBuf>,
options: FmtOptions,
}
fn resolve_paths_with_options_batches(
cli_options: &CliOptions,
fmt_flags: &FmtFlags,
) -> Result<Vec<PathsWithOptions>, AnyError> {
maybe_show_format_confirmation(cli_options, fmt_flags)?;
let members_fmt_options =
cli_options.resolve_fmt_options_for_members(fmt_flags)?;
let mut paths_with_options_batches =
Vec::with_capacity(members_fmt_options.len());
for (_ctx, member_fmt_options) in members_fmt_options {
let files =
collect_fmt_files(cli_options, member_fmt_options.files.clone());
if !files.is_empty() {
paths_with_options_batches.push(PathsWithOptions {
base: member_fmt_options.files.base.clone(),
paths: files,
options: member_fmt_options,
});
}
}
if paths_with_options_batches.is_empty() && !fmt_flags.permit_no_files {
return Err(anyhow!("No target files found."));
}
Ok(paths_with_options_batches)
}
fn maybe_show_format_confirmation(
cli_options: &CliOptions,
fmt_flags: &FmtFlags,
) -> Result<(), AnyError> {
if fmt_flags.check
|| !fmt_flags.files.include.is_empty()
|| cli_options.workspace().deno_jsons().next().is_some()
|| cli_options.workspace().package_jsons().next().is_some()
{
return Ok(());
}
let confirm_result =
util::console::confirm(util::console::ConfirmOptions {
default: true,
message: format!(
"{} It looks like you're not in a workspace. Are you sure you want to format the entire '{}' directory?",
colors::yellow("Warning"),
cli_options.initial_cwd().display()
),
})
.unwrap_or(false);
if confirm_result {
Ok(())
} else {
bail!(
"Did not format non-workspace directory. Run again specifying the current directory (ex. `deno fmt .`)"
)
}
}
async fn format_files(
caches: &Arc<Caches>,
cli_options: &Arc<CliOptions>,
fmt_flags: &FmtFlags,
paths_with_options_batches: Vec<PathsWithOptions>,
) -> Result<(), AnyError> {
let formatter: Box<dyn Formatter> = if fmt_flags.check {
Box::new(CheckFormatter::default())
} else {
Box::new(RealFormatter::default())
};
for paths_with_options in paths_with_options_batches {
log::debug!(
"Formatting {} file(s) in {}",
paths_with_options.paths.len(),
paths_with_options.base.display()
);
let fmt_options = paths_with_options.options;
let paths = paths_with_options.paths;
let incremental_cache = Arc::new(IncrementalCache::new(
caches.fmt_incremental_cache_db(),
CacheDBHash::from_hashable((&fmt_options.options, &fmt_options.unstable)),
&paths,
));
formatter
.handle_files(
paths,
fmt_options.options,
fmt_options.unstable,
incremental_cache.clone(),
cli_options.ext_flag().clone(),
)
.await?;
incremental_cache.wait_completion().await;
}
formatter.finish()
}
fn collect_fmt_files(
cli_options: &CliOptions,
files: FilePatterns,
) -> Vec<PathBuf> {
FileCollector::new(|e| {
is_supported_ext_fmt(e.path)
|| (e.path.extension().is_none() && cli_options.ext_flag().is_some())
})
.ignore_git_folder()
.ignore_node_modules()
.use_gitignore()
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
.collect_file_patterns(&CliSys::default(), &files)
}
/// Formats markdown (using <https://github.com/dprint/dprint-plugin-markdown>) and its code blocks
/// (ts/tsx, js/jsx).
fn format_markdown(
file_text: &str,
fmt_options: &FmtOptionsConfig,
unstable_options: &UnstableFmtOptions,
) -> Result<Option<String>, AnyError> {
let markdown_config = get_resolved_markdown_config(fmt_options);
dprint_plugin_markdown::format_text(
file_text,
&markdown_config,
move |tag, text, line_width| {
let tag = tag.to_lowercase();
if matches!(
tag.as_str(),
"ts"
| "tsx"
| "js"
| "jsx"
| "cjs"
| "cts"
| "mjs"
| "mts"
| "javascript"
| "typescript"
| "json"
| "jsonc"
| "css"
| "scss"
| "sass"
| "less"
| "html"
| "svelte"
| "vue"
| "astro"
| "vto"
| "njk"
| "yml"
| "yaml"
| "sql"
) {
// It's important to tell dprint proper file extension, otherwise
// it might parse the file twice.
let extension = match tag.as_str() {
"javascript" => "js",
"typescript" => "ts",
rest => rest,
};
let fake_filename =
PathBuf::from(format!("deno_fmt_stdin.{extension}"));
match extension {
"json" | "jsonc" => {
let mut json_config = get_resolved_json_config(fmt_options);
json_config.line_width = line_width;
dprint_plugin_json::format_text(&fake_filename, text, &json_config)
}
"css" | "scss" | "sass" | "less" => {
format_css(&fake_filename, text, fmt_options)
}
"html" | "svg" | "xml" => {
format_html(&fake_filename, text, fmt_options, unstable_options)
}
"svelte" | "vue" | "astro" | "vto" | "njk" => {
if unstable_options.component {
format_html(&fake_filename, text, fmt_options, unstable_options)
} else {
Ok(None)
}
}
"yml" | "yaml" => format_yaml(text, fmt_options),
"sql" => {
if unstable_options.sql {
format_sql(text, fmt_options)
} else {
Ok(None)
}
}
_ => {
let mut codeblock_config =
get_resolved_typescript_config(fmt_options);
codeblock_config.line_width = line_width;
dprint_plugin_typescript::format_text(
dprint_plugin_typescript::FormatTextOptions {
path: &fake_filename,
extension: None,
text: text.to_string(),
config: &codeblock_config,
external_formatter: Some(
&create_external_formatter_for_typescript(unstable_options),
),
},
)
}
}
} else {
Ok(None)
}
},
)
}
/// Formats JSON and JSONC using the rules provided by .deno()
/// of configuration builder of <https://github.com/dprint/dprint-plugin-json>.
/// See <https://github.com/dprint/dprint-plugin-json/blob/cfa1052dbfa0b54eb3d814318034cdc514c813d7/src/configuration/builder.rs#L87> for configuration.
pub fn format_json(
file_path: &Path,
file_text: &str,
fmt_options: &FmtOptionsConfig,
) -> Result<Option<String>, AnyError> {
let config = get_resolved_json_config(fmt_options);
dprint_plugin_json::format_text(file_path, file_text, &config)
}
pub fn format_css(
file_path: &Path,
file_text: &str,
fmt_options: &FmtOptionsConfig,
) -> Result<Option<String>, AnyError> {
let formatted_str = malva::format_text(
file_text,
malva::detect_syntax(file_path).unwrap_or(malva::Syntax::Css),
&get_resolved_malva_config(fmt_options),
)
.map_err(AnyError::from)?;
Ok(if formatted_str == file_text {
None
} else {
Some(formatted_str)
})
}
fn format_yaml(
file_text: &str,
fmt_options: &FmtOptionsConfig,
) -> Result<Option<String>, AnyError> {
let ignore_file = file_text
.lines()
.take_while(|line| line.starts_with('#'))
.any(|line| {
line
.strip_prefix('#')
.unwrap()
.trim()
.starts_with("deno-fmt-ignore-file")
});
if ignore_file {
return Ok(None);
}
let formatted_str =
pretty_yaml::format_text(file_text, &get_resolved_yaml_config(fmt_options))
.map_err(AnyError::from)?;
Ok(if formatted_str == file_text {
None
} else {
Some(formatted_str)
})
}
pub fn format_html(
file_path: &Path,
file_text: &str,
fmt_options: &FmtOptionsConfig,
unstable_options: &UnstableFmtOptions,
) -> Result<Option<String>, AnyError> {
let format_result = markup_fmt::format_text(
file_text,
markup_fmt::detect_language(file_path)
.unwrap_or(markup_fmt::Language::Html),
&get_resolved_markup_fmt_config(fmt_options),
|text, hints| {
let mut file_name =
file_path.file_name().expect("missing file name").to_owned();
file_name.push(".");
file_name.push(hints.ext);
let path = file_path.with_file_name(file_name);
match hints.ext {
"css" | "scss" | "sass" | "less" => {
let mut malva_config = get_resolved_malva_config(fmt_options);
malva_config.layout.print_width = hints.print_width;
if hints.attr {
malva_config.language.quotes =
if let Some(true) = fmt_options.single_quote {
malva::config::Quotes::AlwaysDouble
} else {
malva::config::Quotes::AlwaysSingle
};
malva_config.language.single_line_top_level_declarations = true;
}
malva::format_text(
text,
malva::detect_syntax(path).unwrap_or(malva::Syntax::Css),
&malva_config,
)
.map(Cow::from)
.map_err(AnyError::from)
}
"json" | "jsonc" => {
let mut json_config = get_resolved_json_config(fmt_options);
json_config.line_width = hints.print_width as u32;
dprint_plugin_json::format_text(&path, text, &json_config).map(
|formatted| {
if let Some(formatted) = formatted {
Cow::from(formatted)
} else {
Cow::from(text)
}
},
)
}
_ => {
let mut typescript_config_builder =
get_typescript_config_builder(fmt_options);
typescript_config_builder.file_indent_level(hints.indent_level);
let mut typescript_config = typescript_config_builder.build();
typescript_config.line_width = hints.print_width as u32;
dprint_plugin_typescript::format_text(
dprint_plugin_typescript::FormatTextOptions {
path: &path,
extension: None,
text: text.to_string(),
config: &typescript_config,
external_formatter: Some(
&create_external_formatter_for_typescript(unstable_options),
),
},
)
.map(|formatted| {
if let Some(formatted) = formatted {
Cow::from(formatted)
} else {
Cow::from(text)
}
})
}
}
},
)
.map_err(|error| match error {
markup_fmt::FormatError::Syntax(error) => {
fn inner(
error: &markup_fmt::SyntaxError,
file_path: &Path,
) -> Option<String> {
let url = Url::from_file_path(file_path).ok()?;
let error_msg = format!(
"Syntax error ({}) at {}:{}:{}\n",
error.kind,
url.as_str(),
error.line,
error.column
);
Some(error_msg)
}
if let Some(error_msg) = inner(&error, file_path) {
AnyError::msg(error_msg)
} else {
AnyError::from(error)
}
}
markup_fmt::FormatError::External(errors) => {
let last = errors.len() - 1;
AnyError::msg(
errors
.into_iter()
.enumerate()
.map(|(i, error)| {
if i == last {
format!("{error}")
} else {
format!("{error}\n\n")
}
})
.collect::<String>(),
)
}
});
let formatted_str = format_result?;
Ok(if formatted_str == file_text {
None
} else {
Some(formatted_str)
})
}
/// A function for formatting embedded code blocks in JavaScript and TypeScript.
fn create_external_formatter_for_typescript(
unstable_options: &UnstableFmtOptions,
) -> impl Fn(
&str,
String,
&dprint_plugin_typescript::configuration::Configuration,
) -> deno_core::anyhow::Result<Option<String>>
+ use<> {
let unstable_sql = unstable_options.sql;
move |lang, text, config| match lang {
"css" => format_embedded_css(&text, config),
"html" | "xml" | "svg" => format_embedded_html(lang, &text, config),
"sql" => {
if unstable_sql {
format_embedded_sql(&text, config)
} else {
Ok(None)
}
}
_ => Ok(None),
}
}
/// Formats embedded CSS code blocks in JavaScript and TypeScript.
///
/// This function supports properties only CSS expressions, like:
/// ```css
/// margin: 10px;
/// padding: 10px;
/// ```
///
/// To support this scenario, this function first wraps the text with `a { ... }`,
/// and then strips it off after formatting with malva.
fn format_embedded_css(
text: &str,
config: &dprint_plugin_typescript::configuration::Configuration,
) -> deno_core::anyhow::Result<Option<String>> {
use malva::config;
let options = config::FormatOptions {
layout: config::LayoutOptions {
indent_width: config.indent_width as usize,
use_tabs: config.use_tabs,
print_width: config.line_width as usize,
line_break: match config.new_line_kind {
dprint_core::configuration::NewLineKind::LineFeed => {
config::LineBreak::Lf
}
dprint_core::configuration::NewLineKind::CarriageReturnLineFeed => {
config::LineBreak::Crlf
}
_ => config::LineBreak::Lf,
},
},
language: config::LanguageOptions {
hex_case: config::HexCase::Lower,
hex_color_length: None,
quotes: config::Quotes::AlwaysDouble,
operator_linebreak: config::OperatorLineBreak::After,
block_selector_linebreak: config::BlockSelectorLineBreak::Consistent,
omit_number_leading_zero: false,
trailing_comma: false,
format_comments: false,
align_comments: true,
linebreak_in_pseudo_parens: false,
declaration_order: None,
single_line_block_threshold: None,
keyframe_selector_notation: None,
attr_value_quotes: config::AttrValueQuotes::Always,
prefer_single_line: false,
selectors_prefer_single_line: None,
function_args_prefer_single_line: None,
sass_content_at_rule_prefer_single_line: None,
sass_include_at_rule_prefer_single_line: None,
sass_map_prefer_single_line: None,
sass_module_config_prefer_single_line: None,
sass_params_prefer_single_line: None,
less_import_options_prefer_single_line: None,
less_mixin_args_prefer_single_line: None,
less_mixin_params_prefer_single_line: None,
single_line_top_level_declarations: false,
selector_override_comment_directive: "malva-selector-override".into(),
ignore_comment_directive: "malva-ignore".into(),
ignore_file_comment_directive: "malva-ignore-file".into(),
declaration_order_group_by:
config::DeclarationOrderGroupBy::NonDeclaration,
},
};
// Wraps the text in a css block of `a { ... ;}`
// to make it valid css
// Note: We choose LESS for the syntax because it allows us to use
// @variable for both property values and mixins, which is convenient
// for handling placeholders used as both properties and mixins.
let text = malva::format_text(
&format!("a{{\n{}\n;}}", text),
malva::Syntax::Less,
&options,
)?;
let mut buf = vec![];
for (i, l) in text.lines().enumerate() {
// skip the first line (a {)
if i == 0 {
continue;
}
// skip the last line (})
if l.starts_with("}") {
continue;
}
let mut chars = l.chars();
// indent width option is disregarded when use tabs is true since
// only one tab will be inserted when indented once
// https://malva.netlify.app/config/indent-width.html
let indent_width = if config.use_tabs {
1
} else {
config.indent_width as usize
};
// drop the indentation
for _ in 0..indent_width {
chars.next();
}
buf.push(chars.as_str());
}
Ok(Some(buf.join("\n").to_string()))
}
/// Formats the embedded HTML code blocks in JavaScript and TypeScript.
fn format_embedded_html(
lang: &str,
text: &str,
config: &dprint_plugin_typescript::configuration::Configuration,
) -> deno_core::anyhow::Result<Option<String>> {
use markup_fmt::config;
let language = match lang {
"xml" | "svg" => markup_fmt::Language::Xml,
_ => markup_fmt::Language::Html,
};
let options = config::FormatOptions {
layout: config::LayoutOptions {
indent_width: config.indent_width as usize,
use_tabs: config.use_tabs,
print_width: config.line_width as usize,
line_break: match config.new_line_kind {
dprint_core::configuration::NewLineKind::LineFeed => {
config::LineBreak::Lf
}
dprint_core::configuration::NewLineKind::CarriageReturnLineFeed => {
config::LineBreak::Crlf
}
_ => config::LineBreak::Lf,
},
},
language: config::LanguageOptions {
quotes: config::Quotes::Double,
format_comments: false,
script_indent: false,
html_script_indent: None,
vue_script_indent: None,
svelte_script_indent: None,
astro_script_indent: None,
style_indent: false,
html_style_indent: None,
vue_style_indent: None,
svelte_style_indent: None,
astro_style_indent: None,
closing_bracket_same_line: false,
closing_tag_line_break_for_empty:
config::ClosingTagLineBreakForEmpty::Fit,
max_attrs_per_line: None,
prefer_attrs_single_line: false,
single_attr_same_line: false,
html_normal_self_closing: None,
html_void_self_closing: None,
component_self_closing: None,
svg_self_closing: None,
mathml_self_closing: None,
whitespace_sensitivity: config::WhitespaceSensitivity::Css,
component_whitespace_sensitivity: None,
doctype_keyword_case: config::DoctypeKeywordCase::Upper,
v_bind_style: None,
v_on_style: None,
v_for_delimiter_style: None,
v_slot_style: None,
component_v_slot_style: None,
default_v_slot_style: None,
named_v_slot_style: None,
v_bind_same_name_short_hand: None,
strict_svelte_attr: false,
svelte_attr_shorthand: None,
svelte_directive_shorthand: None,
astro_attr_shorthand: None,
script_formatter: None,
ignore_comment_directive: "deno-fmt-ignore".into(),
ignore_file_comment_directive: "deno-fmt-ignore-file".into(),
},
};
let text = markup_fmt::format_text(text, language, &options, |code, _| {
Ok::<_, std::convert::Infallible>(code.into())
})?;
Ok(Some(text.to_string()))
}
/// Formats the embedded SQL code blocks in JavaScript and TypeScript.
fn format_embedded_sql(
text: &str,
config: &dprint_plugin_typescript::configuration::Configuration,
) -> deno_core::anyhow::Result<Option<String>> {
Ok(Some(format_sql_text(
text,
config.use_tabs,
config.indent_width,
)))
}
fn format_sql_text(text: &str, use_tabs: bool, indent_width: u8) -> String {
let mut text = sqlformat::format(
text,
&sqlformat::QueryParams::None,
&sqlformat::FormatOptions {
ignore_case_convert: None,
indent: if use_tabs {
sqlformat::Indent::Tabs
} else {
sqlformat::Indent::Spaces(indent_width)
},
// leave one blank line between queries.
lines_between_queries: 2,
uppercase: Some(true),
},
);
// Add single new line to the end of text.
text.push('\n');
text
}
pub fn format_sql(
file_text: &str,
fmt_options: &FmtOptionsConfig,
) -> Result<Option<String>, AnyError> {
let ignore_file = file_text
.lines()
.take_while(|line| line.starts_with("--"))
.any(|line| {
line
.strip_prefix("--")
.unwrap()
.trim()
.starts_with("deno-fmt-ignore-file")
});
if ignore_file {
return Ok(None);
}
let formatted_str = format_sql_text(
file_text,
fmt_options.use_tabs.unwrap_or_default(),
fmt_options.indent_width.unwrap_or(2),
);
Ok(if formatted_str == file_text {
None
} else {
Some(formatted_str)
})
}
/// Formats a single TS, TSX, JS, JSX, JSONC, JSON, MD, IPYNB or SQL file.
pub fn format_file(
file_path: &Path,
file: &FileContents,
fmt_options: &FmtOptionsConfig,
unstable_options: &UnstableFmtOptions,
ext: Option<String>,
) -> Result<Option<String>, AnyError> {
let ext = ext
.or_else(|| get_extension(file_path))
.unwrap_or("ts".to_string());
let maybe_result = match ext.as_str() {
"md" | "mkd" | "mkdn" | "mdwn" | "mdown" | "markdown" => {
format_markdown(&file.text, fmt_options, unstable_options)?
}
"json" | "jsonc" => format_json(file_path, &file.text, fmt_options)?,
"css" | "scss" | "sass" | "less" => {
format_css(file_path, &file.text, fmt_options)?
}
"html" | "xml" | "svg" => {
format_html(file_path, &file.text, fmt_options, unstable_options)?
}
"svelte" | "vue" | "astro" | "vto" | "njk" | "mustache" => {
if unstable_options.component {
format_html(file_path, &file.text, fmt_options, unstable_options)?
} else {
None
}
}
"yml" | "yaml" => format_yaml(&file.text, fmt_options)?,
"ipynb" => dprint_plugin_jupyter::format_text(
&file.text,
|file_path: &Path, file_text: String| {
let file = FileContents {
had_bom: false,
text: file_text.into(),
};
format_file(file_path, &file, fmt_options, unstable_options, None)
},
)?,
"sql" => {
if unstable_options.sql {
format_sql(&file.text, fmt_options)?
} else {
None
}
}
_ => {
let config = get_resolved_typescript_config(fmt_options);
dprint_plugin_typescript::format_text(
dprint_plugin_typescript::FormatTextOptions {
path: file_path,
extension: Some(&ext),
text: file.text.to_string(),
config: &config,
external_formatter: Some(&create_external_formatter_for_typescript(
unstable_options,
)),
},
)?
}
};
Ok(match maybe_result {
Some(result) => Some(result),
None if file.had_bom => {
// return back the text without the BOM
Some(file.text.to_string())
}
None => None,
})
}
pub fn format_parsed_source(
parsed_source: &ParsedSource,
fmt_options: &FmtOptionsConfig,
unstable_options: &UnstableFmtOptions,
) -> Result<Option<String>, AnyError> {
dprint_plugin_typescript::format_parsed_source(
parsed_source,
&get_resolved_typescript_config(fmt_options),
Some(&create_external_formatter_for_typescript(unstable_options)),
)
}
#[async_trait]
trait Formatter {
async fn handle_files(
&self,
paths: Vec<PathBuf>,
fmt_options: FmtOptionsConfig,
unstable_options: UnstableFmtOptions,
incremental_cache: Arc<IncrementalCache>,
ext: Option<String>,
) -> Result<(), AnyError>;
fn finish(&self) -> Result<(), AnyError>;
}
#[derive(Default)]
struct CheckFormatter {
not_formatted_files_count: Arc<AtomicUsize>,
checked_files_count: Arc<AtomicUsize>,
}
#[async_trait]
impl Formatter for CheckFormatter {
async fn handle_files(
&self,
paths: Vec<PathBuf>,
fmt_options: FmtOptionsConfig,
unstable_options: UnstableFmtOptions,
incremental_cache: Arc<IncrementalCache>,
ext: Option<String>,
) -> Result<(), AnyError> {
// prevent threads outputting at the same time
let output_lock = Arc::new(Mutex::new(0));
run_parallelized(paths, {
let not_formatted_files_count = self.not_formatted_files_count.clone();
let checked_files_count = self.checked_files_count.clone();
move |file_path| {
checked_files_count.fetch_add(1, Ordering::Relaxed);
let file = read_file_contents(&file_path)?;
// skip checking the file if we know it's formatted
if !file.had_bom
&& incremental_cache.is_file_same(&file_path, &file.text)
{
return Ok(());
}
match format_file(
&file_path,
&file,
&fmt_options,
&unstable_options,
ext.clone(),
) {
Ok(Some(formatted_text)) => {
not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
let _g = output_lock.lock();
let diff =
deno_resolver::display::diff(&file.text, &formatted_text);
info!("");
info!("{} {}:", colors::bold("from"), file_path.display());
if file.had_bom {
info!(" {}", colors::gray("File has strippable UTF-8 BOM."));
}
info!("{}", diff);
}
Ok(None) => {
// When checking formatting, only update the incremental cache when
// the file is the same since we don't bother checking for stable
// formatting here. Additionally, ensure this is done during check
// so that CIs that cache the DENO_DIR will get the benefit of
// incremental formatting
incremental_cache.update_file(&file_path, &file.text);
}
Err(e) => {
not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
let _g = output_lock.lock();
warn!("Error checking: {}", file_path.to_string_lossy());
warn!(
"{}",
format!("{e}")
.split('\n')
.map(|l| {
if l.trim().is_empty() {
String::new()
} else {
format!(" {l}")
}
})
.collect::<Vec<_>>()
.join("\n")
);
}
}
Ok(())
}
})
.await?;
Ok(())
}
fn finish(&self) -> Result<(), AnyError> {
let not_formatted_files_count =
self.not_formatted_files_count.load(Ordering::Relaxed);
let checked_files_count = self.checked_files_count.load(Ordering::Relaxed);
let checked_files_str =
format!("{} {}", checked_files_count, files_str(checked_files_count));
if not_formatted_files_count == 0 {
info!("Checked {}", checked_files_str);
Ok(())
} else {
let not_formatted_files_str = files_str(not_formatted_files_count);
Err(anyhow!(
"Found {not_formatted_files_count} not formatted {not_formatted_files_str} in {checked_files_str}",
))
}
}
}
#[derive(Default)]
struct RealFormatter {
formatted_files_count: Arc<AtomicUsize>,
failed_files_count: Arc<AtomicUsize>,
checked_files_count: Arc<AtomicUsize>,
}
#[async_trait]
impl Formatter for RealFormatter {
async fn handle_files(
&self,
paths: Vec<PathBuf>,
fmt_options: FmtOptionsConfig,
unstable_options: UnstableFmtOptions,
incremental_cache: Arc<IncrementalCache>,
ext: Option<String>,
) -> Result<(), AnyError> {
let output_lock = Arc::new(Mutex::new(0)); // prevent threads outputting at the same time
run_parallelized(paths, {
let formatted_files_count = self.formatted_files_count.clone();
let failed_files_count = self.failed_files_count.clone();
let checked_files_count = self.checked_files_count.clone();
move |file_path| {
checked_files_count.fetch_add(1, Ordering::Relaxed);
let file = read_file_contents(&file_path)?;
// skip formatting the file if we know it's formatted
if !file.had_bom
&& incremental_cache.is_file_same(&file_path, &file.text)
{
return Ok(());
}
match format_ensure_stable(&file_path, &file, |file_path, file| {
format_file(
file_path,
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/publish/diagnostics.rs | cli/tools/publish/diagnostics.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::ParseDiagnostic;
use deno_ast::SourcePos;
use deno_ast::SourceRange;
use deno_ast::SourceRanged;
use deno_ast::SourceTextInfo;
use deno_ast::diagnostics::Diagnostic;
use deno_ast::diagnostics::DiagnosticLevel;
use deno_ast::diagnostics::DiagnosticLocation;
use deno_ast::diagnostics::DiagnosticSnippet;
use deno_ast::diagnostics::DiagnosticSnippetHighlight;
use deno_ast::diagnostics::DiagnosticSnippetHighlightStyle;
use deno_ast::diagnostics::DiagnosticSourcePos;
use deno_ast::diagnostics::DiagnosticSourceRange;
use deno_ast::swc::common::util::take::Take;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::url::Url;
use deno_graph::fast_check::FastCheckDiagnostic;
use deno_semver::Version;
use super::unfurl::SpecifierUnfurlerDiagnostic;
#[derive(Clone, Default)]
pub struct PublishDiagnosticsCollector {
diagnostics: Arc<Mutex<Vec<PublishDiagnostic>>>,
}
impl PublishDiagnosticsCollector {
pub fn print_and_error(&self) -> Result<(), AnyError> {
let mut errors = 0;
let mut has_slow_types_errors = false;
let mut diagnostics = self.diagnostics.lock().take();
diagnostics.sort_by_cached_key(|d| d.sorting_key());
for diagnostic in diagnostics {
log::error!("{}", diagnostic.display());
if matches!(diagnostic.level(), DiagnosticLevel::Error) {
errors += 1;
}
if matches!(diagnostic, PublishDiagnostic::FastCheck(..)) {
has_slow_types_errors = true;
}
}
if errors > 0 {
if has_slow_types_errors {
log::error!(
"This package contains errors for slow types. Fixing these errors will:\n"
);
log::error!(
" 1. Significantly improve your package users' type checking performance."
);
log::error!(" 2. Improve the automatic documentation generation.");
log::error!(" 3. Enable automatic .d.ts generation for Node.js.");
log::error!(
"\nDon't want to bother? You can choose to skip this step by"
);
log::error!("providing the --allow-slow-types flag.\n");
}
Err(anyhow!(
"Found {} problem{}",
errors,
if errors == 1 { "" } else { "s" }
))
} else {
Ok(())
}
}
pub fn has_error(&self) -> bool {
self
.diagnostics
.lock()
.iter()
.any(|d| matches!(d.level(), DiagnosticLevel::Error))
}
pub fn push(&self, diagnostic: PublishDiagnostic) {
self.diagnostics.lock().push(diagnostic);
}
}
pub struct RelativePackageImportDiagnosticReferrer {
pub text_info: SourceTextInfo,
pub referrer: deno_graph::Range,
}
pub enum PublishDiagnostic {
FastCheck(FastCheckDiagnostic),
SpecifierUnfurl(SpecifierUnfurlerDiagnostic),
InvalidPath {
path: PathBuf,
message: String,
},
DuplicatePath {
path: PathBuf,
},
UnsupportedFileType {
specifier: Url,
kind: String,
},
InvalidExternalImport {
kind: String,
imported: Url,
text_info: SourceTextInfo,
referrer: deno_graph::Range,
},
ExcludedModule {
specifier: Url,
},
MissingConstraint {
specifier: Url,
specifier_text: String,
resolved_version: Option<Version>,
text_info: SourceTextInfo,
referrer: deno_graph::Range,
},
BannedTripleSlashDirectives {
specifier: Url,
text_info: SourceTextInfo,
range: SourceRange,
},
UnstableRawImport {
text_info: SourceTextInfo,
referrer: deno_graph::Range,
},
RelativePackageImport {
specifier: Url,
from_package_name: String,
to_package_name: String,
maybe_referrer: Option<RelativePackageImportDiagnosticReferrer>,
},
SyntaxError(ParseDiagnostic),
MissingLicense {
config_specifier: Url,
},
}
impl PublishDiagnostic {
fn sorting_key(&self) -> (String, String, Option<SourcePos>) {
let loc = self.location();
let (specifier, source_pos) = match loc {
DiagnosticLocation::Module { specifier } => (specifier.to_string(), None),
DiagnosticLocation::Path { path } => (path.display().to_string(), None),
DiagnosticLocation::ModulePosition {
specifier,
source_pos,
text_info,
} => (
specifier.to_string(),
Some(match source_pos {
DiagnosticSourcePos::SourcePos(s) => s,
DiagnosticSourcePos::ByteIndex(index) => {
text_info.range().start() + index
}
DiagnosticSourcePos::LineAndCol { line, column } => {
text_info.line_start(line) + column
}
}),
),
};
(self.code().to_string(), specifier, source_pos)
}
}
impl Diagnostic for PublishDiagnostic {
fn level(&self) -> DiagnosticLevel {
use PublishDiagnostic::*;
match self {
FastCheck(FastCheckDiagnostic::UnsupportedJavaScriptEntrypoint {
..
}) => DiagnosticLevel::Warning,
FastCheck(_) => DiagnosticLevel::Error,
SpecifierUnfurl(d) => d.level(),
InvalidPath { .. } => DiagnosticLevel::Error,
DuplicatePath { .. } => DiagnosticLevel::Error,
UnsupportedFileType { .. } => DiagnosticLevel::Warning,
InvalidExternalImport { .. } => DiagnosticLevel::Error,
ExcludedModule { .. } => DiagnosticLevel::Error,
MissingConstraint { .. } => DiagnosticLevel::Error,
BannedTripleSlashDirectives { .. } => DiagnosticLevel::Error,
RelativePackageImport { .. } => DiagnosticLevel::Error,
SyntaxError { .. } => DiagnosticLevel::Error,
MissingLicense { .. } => DiagnosticLevel::Error,
UnstableRawImport { .. } => DiagnosticLevel::Error,
}
}
fn code(&self) -> Cow<'_, str> {
use PublishDiagnostic::*;
match &self {
FastCheck(diagnostic) => diagnostic.code(),
SpecifierUnfurl(diagnostic) => diagnostic.code(),
InvalidPath { .. } => Cow::Borrowed("invalid-path"),
DuplicatePath { .. } => Cow::Borrowed("case-insensitive-duplicate-path"),
UnsupportedFileType { .. } => Cow::Borrowed("unsupported-file-type"),
InvalidExternalImport { .. } => Cow::Borrowed("invalid-external-import"),
ExcludedModule { .. } => Cow::Borrowed("excluded-module"),
MissingConstraint { .. } => Cow::Borrowed("missing-constraint"),
BannedTripleSlashDirectives { .. } => {
Cow::Borrowed("banned-triple-slash-directives")
}
RelativePackageImport { .. } => Cow::Borrowed("relative-package-import"),
SyntaxError { .. } => Cow::Borrowed("syntax-error"),
MissingLicense { .. } => Cow::Borrowed("missing-license"),
UnstableRawImport { .. } => Cow::Borrowed("unstable-raw-import"),
}
}
fn message(&self) -> Cow<'_, str> {
use PublishDiagnostic::*;
match &self {
FastCheck(diagnostic) => diagnostic.message(),
SpecifierUnfurl(diagnostic) => diagnostic.message(),
InvalidPath { message, .. } => Cow::Borrowed(message.as_str()),
DuplicatePath { .. } => Cow::Borrowed(
"package path is a case insensitive duplicate of another path in the package",
),
UnsupportedFileType { kind, .. } => {
Cow::Owned(format!("unsupported file type '{kind}'"))
}
InvalidExternalImport { kind, .. } => {
Cow::Owned(format!("invalid import to a {kind} specifier"))
}
ExcludedModule { .. } => Cow::Borrowed(
"module in package's module graph was excluded from publishing",
),
MissingConstraint { specifier, .. } => Cow::Owned(format!(
"specifier '{}' is missing a version constraint",
specifier
)),
BannedTripleSlashDirectives { .. } => Cow::Borrowed(
"triple slash directives that modify globals are not allowed",
),
RelativePackageImport {
from_package_name,
to_package_name,
..
} => Cow::Owned(format!(
"invalid relative import from the '{}' package into the '{}' package",
from_package_name, to_package_name,
)),
SyntaxError(diagnostic) => diagnostic.message(),
MissingLicense { .. } => Cow::Borrowed("missing license field or file"),
UnstableRawImport { .. } => {
Cow::Borrowed("raw imports have not been stabilized")
}
}
}
fn location(&self) -> DiagnosticLocation<'_> {
fn from_referrer_range<'a>(
referrer: &'a deno_graph::Range,
text_info: &'a SourceTextInfo,
) -> DiagnosticLocation<'a> {
DiagnosticLocation::ModulePosition {
specifier: Cow::Borrowed(&referrer.specifier),
text_info: Cow::Borrowed(text_info),
source_pos: DiagnosticSourcePos::LineAndCol {
line: referrer.range.start.line,
column: referrer.range.start.character,
},
}
}
use PublishDiagnostic::*;
match &self {
FastCheck(diagnostic) => diagnostic.location(),
SpecifierUnfurl(diagnostic) => diagnostic.location(),
InvalidPath { path, .. } => {
DiagnosticLocation::Path { path: path.clone() }
}
DuplicatePath { path, .. } => {
DiagnosticLocation::Path { path: path.clone() }
}
UnsupportedFileType { specifier, .. } => DiagnosticLocation::Module {
specifier: Cow::Borrowed(specifier),
},
InvalidExternalImport {
referrer,
text_info,
..
}
| UnstableRawImport {
referrer,
text_info,
} => from_referrer_range(referrer, text_info),
ExcludedModule { specifier } => DiagnosticLocation::Module {
specifier: Cow::Borrowed(specifier),
},
MissingConstraint {
referrer,
text_info,
..
} => from_referrer_range(referrer, text_info),
BannedTripleSlashDirectives {
specifier,
range,
text_info,
} => DiagnosticLocation::ModulePosition {
specifier: Cow::Borrowed(specifier),
source_pos: DiagnosticSourcePos::SourcePos(range.start),
text_info: Cow::Borrowed(text_info),
},
RelativePackageImport {
specifier,
maybe_referrer,
..
} => match maybe_referrer {
Some(referrer) => {
from_referrer_range(&referrer.referrer, &referrer.text_info)
}
None => DiagnosticLocation::Module {
specifier: Cow::Borrowed(specifier),
},
},
SyntaxError(diagnostic) => diagnostic.location(),
MissingLicense { config_specifier } => DiagnosticLocation::Module {
specifier: Cow::Borrowed(config_specifier),
},
}
}
fn snippet(&self) -> Option<DiagnosticSnippet<'_>> {
fn from_range<'a>(
text_info: &'a SourceTextInfo,
referrer: &'a deno_graph::Range,
) -> Option<DiagnosticSnippet<'a>> {
if referrer.range.start.line == 0 && referrer.range.start.character == 0 {
return None; // no range, probably a jsxImportSource import
}
Some(DiagnosticSnippet {
source: Cow::Borrowed(text_info),
highlights: vec![DiagnosticSnippetHighlight {
style: DiagnosticSnippetHighlightStyle::Error,
range: DiagnosticSourceRange {
start: DiagnosticSourcePos::LineAndCol {
line: referrer.range.start.line,
column: referrer.range.start.character,
},
end: DiagnosticSourcePos::LineAndCol {
line: referrer.range.end.line,
column: referrer.range.end.character,
},
},
description: Some("the specifier".into()),
}],
})
}
use PublishDiagnostic::*;
match &self {
FastCheck(d) => d.snippet(),
SpecifierUnfurl(d) => d.snippet(),
InvalidPath { .. }
| DuplicatePath { .. }
| UnsupportedFileType { .. } => None,
InvalidExternalImport {
referrer,
text_info,
..
}
| UnstableRawImport {
referrer,
text_info,
..
} => from_range(text_info, referrer),
ExcludedModule { .. } => None,
MissingConstraint {
referrer,
text_info,
..
} => from_range(text_info, referrer),
BannedTripleSlashDirectives {
range, text_info, ..
} => Some(DiagnosticSnippet {
source: Cow::Borrowed(text_info),
highlights: vec![DiagnosticSnippetHighlight {
style: DiagnosticSnippetHighlightStyle::Error,
range: DiagnosticSourceRange {
start: DiagnosticSourcePos::SourcePos(range.start),
end: DiagnosticSourcePos::SourcePos(range.end),
},
description: Some("the triple slash directive".into()),
}],
}),
RelativePackageImport { maybe_referrer, .. } => {
maybe_referrer.as_ref().and_then(|referrer| {
from_range(&referrer.text_info, &referrer.referrer)
})
}
SyntaxError(diagnostic) => diagnostic.snippet(),
MissingLicense { .. } => None,
}
}
fn hint(&self) -> Option<Cow<'_, str>> {
use PublishDiagnostic::*;
match &self {
FastCheck(diagnostic) => diagnostic.hint(),
SpecifierUnfurl(d) => d.hint(),
InvalidPath { .. } => Some(Cow::Borrowed(
"rename or remove the file, or add it to 'publish.exclude' in the config file",
)),
DuplicatePath { .. } => Some(Cow::Borrowed("rename or remove the file")),
UnsupportedFileType { .. } => Some(Cow::Borrowed(
"remove the file, or add it to 'publish.exclude' in the config file",
)),
InvalidExternalImport { .. } => Some(Cow::Borrowed(
"replace this import with one from jsr or npm, or vendor the dependency into your package",
)),
ExcludedModule { .. } => Some(Cow::Borrowed(
"remove the module from 'exclude' and/or 'publish.exclude' in the config file or use 'publish.exclude' with a negative glob to unexclude from gitignore",
)),
MissingConstraint { specifier_text, .. } => Some(Cow::Borrowed(
if specifier_text.starts_with("jsr:")
|| specifier_text.starts_with("npm:")
{
"specify a version constraint for the specifier"
} else {
"specify a version constraint for the specifier in the import map"
},
)),
BannedTripleSlashDirectives { .. } => {
Some(Cow::Borrowed("remove the triple slash directive"))
}
RelativePackageImport {
to_package_name, ..
} => Some(Cow::Owned(format!(
"replace the relative import with a bare specifier that imports from the other package by name and optionally an export (ex. \"{}\")",
to_package_name
))),
SyntaxError(diagnostic) => diagnostic.hint(),
MissingLicense { .. } => Some(Cow::Borrowed(
"add a \"license\" field. Alternatively, add a LICENSE file to the package and ensure it is not ignored from being published",
)),
UnstableRawImport { .. } => Some(Cow::Borrowed(
"for the time being, embed the data directly into a JavaScript file (ex. as encoded base64 text)",
)),
}
}
fn snippet_fixed(&self) -> Option<DiagnosticSnippet<'_>> {
use PublishDiagnostic::*;
match &self {
InvalidExternalImport { imported, .. } => {
match crate::registry::get_jsr_alternative(imported) {
Some(replacement) => {
let replacement = SourceTextInfo::new(replacement.into());
let start = replacement.line_start(0);
let end = replacement.line_end(0);
Some(DiagnosticSnippet {
source: Cow::Owned(replacement),
highlights: vec![DiagnosticSnippetHighlight {
style: DiagnosticSnippetHighlightStyle::Hint,
range: DiagnosticSourceRange {
start: DiagnosticSourcePos::SourcePos(start),
end: DiagnosticSourcePos::SourcePos(end),
},
description: Some("try this specifier".into()),
}],
})
}
None => None,
}
}
SyntaxError(d) => d.snippet_fixed(),
SpecifierUnfurl(d) => d.snippet_fixed(),
FastCheck(_)
| RelativePackageImport { .. }
| InvalidPath { .. }
| DuplicatePath { .. }
| UnsupportedFileType { .. }
| ExcludedModule { .. }
| MissingConstraint { .. }
| BannedTripleSlashDirectives { .. }
| MissingLicense { .. }
| UnstableRawImport { .. } => None,
}
}
fn info(&self) -> Cow<'_, [Cow<'_, str>]> {
use PublishDiagnostic::*;
match &self {
FastCheck(d) => d.info(),
SpecifierUnfurl(d) => d.info(),
InvalidPath { .. } => Cow::Borrowed(&[Cow::Borrowed(
"to portably support all platforms, including windows, the allowed characters in package paths are limited",
)]),
DuplicatePath { .. } => Cow::Borrowed(&[Cow::Borrowed(
"to support case insensitive file systems, no two package paths may differ only by case",
)]),
UnsupportedFileType { .. } => Cow::Borrowed(&[
Cow::Borrowed("only files and directories are supported"),
Cow::Borrowed("the file was ignored and will not be published"),
]),
InvalidExternalImport { imported, .. } => Cow::Owned(vec![
Cow::Owned(format!("the import was resolved to '{}'", imported)),
Cow::Borrowed("this specifier is not allowed to be imported on jsr"),
Cow::Borrowed(
"jsr only supports importing `jsr:`, `npm:`, `data:`, `bun:`, and `node:` specifiers",
),
]),
ExcludedModule { .. } => Cow::Owned(vec![Cow::Borrowed(
"excluded modules referenced via a package export will error at runtime due to not existing in the package",
)]),
MissingConstraint {
resolved_version, ..
} => Cow::Owned(vec![
Cow::Owned(format!(
"the specifier resolved to version {} today, but will resolve to a different",
resolved_version
.as_ref()
.map(|v| v.to_string())
.unwrap_or_else(|| "<unresolved>".to_string())
)),
Cow::Borrowed(
"major version if one is published in the future and potentially break",
),
]),
BannedTripleSlashDirectives { .. } => Cow::Borrowed(&[
Cow::Borrowed(
"instead instruct the user of your package to specify these directives",
),
Cow::Borrowed("or set their 'lib' compiler option appropriately"),
]),
RelativePackageImport { .. } => Cow::Borrowed(&[Cow::Borrowed(
"importing modules in another package using a relative import won't work once the packages are published",
)]),
SyntaxError(diagnostic) => diagnostic.info(),
MissingLicense { .. } => Cow::Borrowed(&[]),
UnstableRawImport { .. } => Cow::Borrowed(&[]),
}
}
fn docs_url(&self) -> Option<Cow<'_, str>> {
use PublishDiagnostic::*;
match &self {
FastCheck(d) => d.docs_url(),
SpecifierUnfurl(d) => d.docs_url(),
InvalidPath { .. } => {
Some(Cow::Borrowed("https://jsr.io/go/invalid-path"))
}
DuplicatePath { .. } => Some(Cow::Borrowed(
"https://jsr.io/go/case-insensitive-duplicate-path",
)),
UnsupportedFileType { .. } => {
Some(Cow::Borrowed("https://jsr.io/go/unsupported-file-type"))
}
InvalidExternalImport { .. } => {
Some(Cow::Borrowed("https://jsr.io/go/invalid-external-import"))
}
ExcludedModule { .. } => {
Some(Cow::Borrowed("https://jsr.io/go/excluded-module"))
}
MissingConstraint { .. } => {
Some(Cow::Borrowed("https://jsr.io/go/missing-constraint"))
}
BannedTripleSlashDirectives { .. } => Some(Cow::Borrowed(
"https://jsr.io/go/banned-triple-slash-directives",
)),
RelativePackageImport { .. } => {
Some(Cow::Borrowed("https://jsr.io/go/relative-package-import"))
}
SyntaxError(diagnostic) => diagnostic.docs_url(),
MissingLicense { .. } => {
Some(Cow::Borrowed("https://jsr.io/go/missing-license"))
}
UnstableRawImport { .. } => Some(Cow::Borrowed(
"https://github.com/denoland/deno/issues/29904",
)),
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/publish/tar.rs | cli/tools/publish/tar.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::fmt::Write as FmtWrite;
use std::io::Write;
use bytes::Bytes;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_graph::ModuleGraph;
use sha2::Digest;
use tar::Header;
use super::diagnostics::PublishDiagnosticsCollector;
use super::module_content::ModuleContentProvider;
use super::paths::CollectedPublishPath;
#[derive(Debug, Clone, PartialEq)]
pub struct PublishableTarballFile {
pub path_str: String,
pub specifier: Url,
pub hash: String,
pub size: usize,
}
#[derive(Debug, Clone, PartialEq)]
pub struct PublishableTarball {
pub files: Vec<PublishableTarballFile>,
pub hash: String,
pub bytes: Bytes,
}
pub fn create_gzipped_tarball(
module_content_provider: &ModuleContentProvider,
graph: &ModuleGraph,
diagnostics_collector: &PublishDiagnosticsCollector,
publish_paths: Vec<CollectedPublishPath>,
) -> Result<PublishableTarball, AnyError> {
let mut tar = TarGzArchive::new();
let mut files = vec![];
for path in publish_paths {
let path_str = &path.relative_path;
let specifier = &path.specifier;
let content = match path.maybe_content {
Some(content) => content.clone(),
None => module_content_provider.resolve_content_maybe_unfurling(
graph,
diagnostics_collector,
&path.path,
specifier,
)?,
};
files.push(PublishableTarballFile {
path_str: path_str.clone(),
specifier: specifier.clone(),
// This hash string matches the checksum computed by registry
hash: format!("sha256-{:x}", sha2::Sha256::digest(&content)),
size: content.len(),
});
assert!(path_str.starts_with('/'));
tar
.add_file(format!(".{}", path_str), &content)
.with_context(|| {
format!("Unable to add file to tarball '{}'", path.path.display())
})?;
}
let v = tar.finish().context("Unable to finish tarball")?;
let hash_bytes: Vec<u8> = sha2::Sha256::digest(&v).iter().cloned().collect();
let mut hash = "sha256-".to_string();
for byte in hash_bytes {
write!(&mut hash, "{:02x}", byte).unwrap();
}
files.sort_by(|a, b| a.specifier.cmp(&b.specifier));
Ok(PublishableTarball {
files,
hash,
bytes: Bytes::from(v),
})
}
struct TarGzArchive {
builder: tar::Builder<Vec<u8>>,
}
impl TarGzArchive {
pub fn new() -> Self {
Self {
builder: tar::Builder::new(Vec::new()),
}
}
pub fn add_file(
&mut self,
path: String,
data: &[u8],
) -> Result<(), AnyError> {
let mut header = Header::new_gnu();
header.set_size(data.len() as u64);
self.builder.append_data(&mut header, &path, data)?;
Ok(())
}
fn finish(mut self) -> Result<Vec<u8>, AnyError> {
self.builder.finish()?;
let bytes = self.builder.into_inner()?;
let mut gz_bytes = Vec::new();
let mut encoder = flate2::write::GzEncoder::new(
&mut gz_bytes,
flate2::Compression::default(),
);
encoder.write_all(&bytes)?;
encoder.finish()?;
Ok(gz_bytes)
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/publish/publish_order.rs | cli/tools/publish/publish_order.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap;
use std::collections::HashSet;
use std::collections::VecDeque;
use deno_ast::ModuleSpecifier;
use deno_config::workspace::JsrPackageConfig;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_graph::ModuleGraph;
pub struct PublishOrderGraph {
packages: HashMap<String, HashSet<String>>,
in_degree: HashMap<String, usize>,
reverse_map: HashMap<String, Vec<String>>,
}
impl PublishOrderGraph {
pub fn next(&mut self) -> Vec<String> {
let mut package_names_with_depth = self
.in_degree
.iter()
.filter_map(|(name, °ree)| if degree == 0 { Some(name) } else { None })
.map(|item| (item.clone(), self.compute_depth(item, HashSet::new())))
.collect::<Vec<_>>();
// sort by depth to in order to prioritize those packages
package_names_with_depth.sort_by(|a, b| match b.1.cmp(&a.1) {
std::cmp::Ordering::Equal => a.0.cmp(&b.0),
other => other,
});
let sorted_package_names = package_names_with_depth
.into_iter()
.map(|(name, _)| name)
.collect::<Vec<_>>();
for name in &sorted_package_names {
self.in_degree.remove(name);
}
sorted_package_names
}
pub fn finish_package(&mut self, name: &str) {
if let Some(package_names) = self.reverse_map.remove(name) {
for name in package_names {
*self.in_degree.get_mut(&name).unwrap() -= 1;
}
}
}
/// There could be pending packages if there's a circular dependency.
pub fn ensure_no_pending(&self) -> Result<(), AnyError> {
// this is inefficient, but that's ok because it's simple and will
// only ever happen when there's an error
fn identify_cycle<'a>(
current_name: &'a String,
mut visited: HashSet<&'a String>,
packages: &HashMap<String, HashSet<String>>,
) -> Option<Vec<String>> {
if visited.insert(current_name) {
let deps = packages.get(current_name).unwrap();
for dep in deps {
if let Some(mut cycle) =
identify_cycle(dep, visited.clone(), packages)
{
cycle.push(current_name.to_string());
return Some(cycle);
}
}
None
} else {
Some(vec![current_name.to_string()])
}
}
if self.in_degree.is_empty() {
Ok(())
} else {
let mut pkg_names = self.in_degree.keys().collect::<Vec<_>>();
pkg_names.sort(); // determinism
let mut cycle =
identify_cycle(pkg_names[0], HashSet::new(), &self.packages).unwrap();
cycle.reverse();
bail!(
"Circular package dependency detected: {}",
cycle.join(" -> ")
);
}
}
fn compute_depth(
&self,
package_name: &String,
mut visited: HashSet<String>,
) -> usize {
if visited.contains(package_name) {
return 0; // cycle
}
visited.insert(package_name.clone());
let Some(parents) = self.reverse_map.get(package_name) else {
return 0;
};
let max_depth = parents
.iter()
.map(|child| self.compute_depth(child, visited.clone()))
.max()
.unwrap_or(0);
1 + max_depth
}
}
pub fn build_publish_order_graph(
graph: &ModuleGraph,
roots: &[JsrPackageConfig],
) -> Result<PublishOrderGraph, AnyError> {
let packages = build_pkg_deps(graph, roots)?;
Ok(build_publish_order_graph_from_pkgs_deps(packages))
}
fn build_pkg_deps(
graph: &deno_graph::ModuleGraph,
roots: &[JsrPackageConfig],
) -> Result<HashMap<String, HashSet<String>>, AnyError> {
let mut members = HashMap::with_capacity(roots.len());
let mut seen_modules = HashSet::with_capacity(graph.modules().count());
let roots = roots
.iter()
.map(|r| {
(
ModuleSpecifier::from_directory_path(r.config_file.dir_path()).unwrap(),
r,
)
})
.collect::<Vec<_>>();
for (root_dir_url, pkg_config) in &roots {
let mut deps = HashSet::new();
let mut pending = VecDeque::new();
pending.extend(pkg_config.config_file.resolve_export_value_urls()?);
while let Some(specifier) = pending.pop_front() {
let Some(module) = graph.get(&specifier).and_then(|m| m.js()) else {
continue;
};
let mut dep_specifiers =
Vec::with_capacity(module.dependencies.len() + 1);
if let Some(types_dep) = &module.maybe_types_dependency
&& let Some(specifier) = types_dep.dependency.maybe_specifier()
{
dep_specifiers.push(specifier);
}
for (_, dep) in &module.dependencies {
if let Some(specifier) = dep.maybe_code.maybe_specifier() {
dep_specifiers.push(specifier);
}
if let Some(specifier) = dep.maybe_type.maybe_specifier() {
dep_specifiers.push(specifier);
}
}
for specifier in dep_specifiers {
let specifier = graph.resolve(specifier);
if specifier.scheme() != "file" {
continue;
}
if specifier.as_str().starts_with(root_dir_url.as_str()) {
if seen_modules.insert(specifier.clone()) {
pending.push_back(specifier.clone());
}
} else {
let found_root = roots.iter().find(|(dir_url, _)| {
specifier.as_str().starts_with(dir_url.as_str())
});
if let Some(root) = found_root {
deps.insert(root.1.name.clone());
}
}
}
}
members.insert(pkg_config.name.clone(), deps);
}
Ok(members)
}
fn build_publish_order_graph_from_pkgs_deps(
packages: HashMap<String, HashSet<String>>,
) -> PublishOrderGraph {
let mut in_degree = HashMap::new();
let mut reverse_map: HashMap<String, Vec<String>> = HashMap::new();
// build the graph, in-degree map, and set of all nodes
for (pkg_name, deps) in &packages {
in_degree.insert(pkg_name.clone(), deps.len());
for dep in deps {
reverse_map
.entry(dep.clone())
.or_default()
.push(pkg_name.clone());
}
}
PublishOrderGraph {
packages: packages.clone(),
in_degree,
reverse_map,
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_graph_no_deps() {
let mut graph = build_publish_order_graph_from_pkgs_deps(HashMap::from([
("a".to_string(), HashSet::new()),
("b".to_string(), HashSet::new()),
("c".to_string(), HashSet::new()),
]));
assert_eq!(
graph.next(),
vec!["a".to_string(), "b".to_string(), "c".to_string()],
);
graph.finish_package("a");
assert!(graph.next().is_empty());
graph.finish_package("b");
assert!(graph.next().is_empty());
graph.finish_package("c");
assert!(graph.next().is_empty());
graph.ensure_no_pending().unwrap();
}
#[test]
fn test_graph_single_dep() {
let mut graph = build_publish_order_graph_from_pkgs_deps(HashMap::from([
("a".to_string(), HashSet::from(["b".to_string()])),
("b".to_string(), HashSet::from(["c".to_string()])),
("c".to_string(), HashSet::new()),
]));
assert_eq!(graph.next(), vec!["c".to_string()]);
graph.finish_package("c");
assert_eq!(graph.next(), vec!["b".to_string()]);
graph.finish_package("b");
assert_eq!(graph.next(), vec!["a".to_string()]);
graph.finish_package("a");
assert!(graph.next().is_empty());
graph.ensure_no_pending().unwrap();
}
#[test]
fn test_graph_multiple_dep() {
let mut graph = build_publish_order_graph_from_pkgs_deps(HashMap::from([
(
"a".to_string(),
HashSet::from(["b".to_string(), "c".to_string()]),
),
("b".to_string(), HashSet::from(["c".to_string()])),
("c".to_string(), HashSet::new()),
("d".to_string(), HashSet::new()),
("e".to_string(), HashSet::from(["f".to_string()])),
("f".to_string(), HashSet::new()),
]));
assert_eq!(
graph.next(),
vec!["c".to_string(), "f".to_string(), "d".to_string()]
);
graph.finish_package("f");
assert_eq!(graph.next(), vec!["e".to_string()]);
graph.finish_package("e");
assert!(graph.next().is_empty());
graph.finish_package("d");
assert!(graph.next().is_empty());
graph.finish_package("c");
assert_eq!(graph.next(), vec!["b".to_string()]);
graph.finish_package("b");
assert_eq!(graph.next(), vec!["a".to_string()]);
graph.finish_package("a");
assert!(graph.next().is_empty());
graph.ensure_no_pending().unwrap();
}
#[test]
fn test_graph_circular_dep() {
let mut graph = build_publish_order_graph_from_pkgs_deps(HashMap::from([
("a".to_string(), HashSet::from(["b".to_string()])),
("b".to_string(), HashSet::from(["c".to_string()])),
("c".to_string(), HashSet::from(["a".to_string()])),
]));
assert!(graph.next().is_empty());
assert_eq!(
graph.ensure_no_pending().unwrap_err().to_string(),
"Circular package dependency detected: a -> b -> c -> a"
);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/publish/paths.rs | cli/tools/publish/paths.rs | // Copyright 2018-2025 the Deno authors. MIT license.
// Validation logic in this file is shared with registry/api/src/ids.rs
use std::collections::HashSet;
use std::path::Path;
use std::path::PathBuf;
use deno_ast::ModuleSpecifier;
use deno_config::glob::FileCollector;
use deno_config::glob::FilePatterns;
use deno_core::error::AnyError;
use thiserror::Error;
use super::diagnostics::PublishDiagnostic;
use super::diagnostics::PublishDiagnosticsCollector;
use crate::args::CliOptions;
use crate::sys::CliSys;
/// A package path, like '/foo' or '/foo/bar'. The path is prefixed with a slash
/// and does not end with a slash.
///
/// The path must not contain any double slashes, dot segments, or dot dot
/// segments.
///
/// The path must be less than 160 characters long, including the slash prefix.
///
/// The path must not contain any windows reserved characters, like CON, PRN,
/// AUX, NUL, or COM1.
///
/// The path must not contain any windows path separators, like backslash or
/// colon.
///
/// The path must only contain ascii alphanumeric characters, and the characters
/// '$', '(', ')', '+', '-', '.', '@', '[', ']', '_', '{', '}', '~'.
///
/// Path's are case sensitive, but comparisons and hashing are case insensitive.
/// This matches the behaviour of the Windows FS APIs.
#[derive(Clone, Default)]
pub struct PackagePath {
path: String,
lower: Option<String>,
}
impl PartialEq for PackagePath {
fn eq(&self, other: &Self) -> bool {
let self_lower = self.lower.as_ref().unwrap_or(&self.path);
let other_lower = other.lower.as_ref().unwrap_or(&other.path);
self_lower == other_lower
}
}
impl Eq for PackagePath {}
impl std::hash::Hash for PackagePath {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
let lower = self.lower.as_ref().unwrap_or(&self.path);
lower.hash(state);
}
}
impl PackagePath {
pub fn new(path: String) -> Result<Self, PackagePathValidationError> {
let len = path.len();
if len > 160 {
return Err(PackagePathValidationError::TooLong(len));
}
if len == 0 {
return Err(PackagePathValidationError::MissingPrefix);
}
let mut components = path.split('/').peekable();
let Some("") = components.next() else {
return Err(PackagePathValidationError::MissingPrefix);
};
let mut has_upper = false;
let mut valid_char_mapper = |c: char| {
if c.is_ascii_uppercase() {
has_upper = true;
}
valid_char(c)
};
while let Some(component) = components.next() {
if component.is_empty() {
if components.peek().is_none() {
return Err(PackagePathValidationError::TrailingSlash);
}
return Err(PackagePathValidationError::EmptyComponent);
}
if component == "." || component == ".." {
return Err(PackagePathValidationError::DotSegment);
}
if let Some(err) = component.chars().find_map(&mut valid_char_mapper) {
return Err(err);
}
let basename = match component.rsplit_once('.') {
Some((_, "")) => {
return Err(PackagePathValidationError::TrailingDot(
component.to_owned(),
));
}
Some((basename, _)) => basename,
None => component,
};
let lower_basename = basename.to_ascii_lowercase();
if WINDOWS_RESERVED_NAMES
.binary_search(&&*lower_basename)
.is_ok()
{
return Err(PackagePathValidationError::ReservedName(
component.to_owned(),
));
}
}
let lower = has_upper.then(|| path.to_ascii_lowercase());
Ok(Self { path, lower })
}
}
const WINDOWS_RESERVED_NAMES: [&str; 22] = [
"aux", "com1", "com2", "com3", "com4", "com5", "com6", "com7", "com8",
"com9", "con", "lpt1", "lpt2", "lpt3", "lpt4", "lpt5", "lpt6", "lpt7",
"lpt8", "lpt9", "nul", "prn",
];
fn valid_char(c: char) -> Option<PackagePathValidationError> {
match c {
'a'..='z'
| 'A'..='Z'
| '0'..='9'
| '$'
| '('
| ')'
| '+'
| '-'
| '.'
| '@'
| '['
| ']'
| '_'
| '{'
| '}'
| '~' => None,
// informative error messages for some invalid characters
'\\' | ':' => Some(
PackagePathValidationError::InvalidWindowsPathSeparatorChar(c),
),
'<' | '>' | '"' | '|' | '?' | '*' => {
Some(PackagePathValidationError::InvalidWindowsChar(c))
}
' ' | '\t' | '\n' | '\r' => {
Some(PackagePathValidationError::InvalidWhitespace(c))
}
'%' | '#' => Some(PackagePathValidationError::InvalidSpecialUrlChar(c)),
// other invalid characters
c => Some(PackagePathValidationError::InvalidOtherChar(c)),
}
}
#[derive(Debug, Clone, Error)]
pub enum PackagePathValidationError {
#[error(
"package path must be at most 160 characters long, but is {0} characters long"
)]
TooLong(usize),
#[error("package path must be prefixed with a slash")]
MissingPrefix,
#[error("package path must not end with a slash")]
TrailingSlash,
#[error("package path must not contain empty components")]
EmptyComponent,
#[error("package path must not contain dot segments like '.' or '..'")]
DotSegment,
#[error(
"package path must not contain windows reserved names like 'CON' or 'PRN' (found '{0}')"
)]
ReservedName(String),
#[error("path segment must not end in a dot (found '{0}')")]
TrailingDot(String),
#[error(
"package path must not contain windows path separators like '\\' or ':' (found '{0}')"
)]
InvalidWindowsPathSeparatorChar(char),
#[error(
"package path must not contain windows reserved characters like '<', '>', '\"', '|', '?', or '*' (found '{0}')"
)]
InvalidWindowsChar(char),
#[error("package path must not contain whitespace (found '{}')", .0.escape_debug())]
InvalidWhitespace(char),
#[error("package path must not contain special URL characters (found '{}')", .0.escape_debug())]
InvalidSpecialUrlChar(char),
#[error("package path must not contain invalid characters (found '{}')", .0.escape_debug())]
InvalidOtherChar(char),
}
pub struct CollectedPublishPath {
pub specifier: ModuleSpecifier,
pub path: PathBuf,
/// Relative path to use in the tarball. This should be prefixed with a `/`.
pub relative_path: String,
/// Specify the contents for any injected paths.
pub maybe_content: Option<Vec<u8>>,
}
pub struct CollectPublishPathsOptions<'a> {
pub root_dir: &'a Path,
pub cli_options: &'a CliOptions,
pub file_patterns: FilePatterns,
pub force_include_paths: Vec<PathBuf>,
pub diagnostics_collector: &'a PublishDiagnosticsCollector,
}
pub fn collect_publish_paths(
opts: CollectPublishPathsOptions,
) -> Result<Vec<CollectedPublishPath>, AnyError> {
let diagnostics_collector = opts.diagnostics_collector;
let publish_paths =
collect_paths(opts.cli_options, diagnostics_collector, opts.file_patterns);
let publish_paths_set = publish_paths.iter().cloned().collect::<HashSet<_>>();
let capacity = publish_paths.len() + opts.force_include_paths.len();
let mut paths = HashSet::with_capacity(capacity);
let mut result = Vec::with_capacity(capacity);
let force_include_paths = opts
.force_include_paths
.into_iter()
.filter(|path| !publish_paths_set.contains(path));
for path in publish_paths.into_iter().chain(force_include_paths) {
let Ok(specifier) = ModuleSpecifier::from_file_path(&path) else {
diagnostics_collector
.to_owned()
.push(PublishDiagnostic::InvalidPath {
path: path.to_path_buf(),
message: "unable to convert path to url".to_string(),
});
continue;
};
let Ok(relative_path) = path.strip_prefix(opts.root_dir) else {
diagnostics_collector
.to_owned()
.push(PublishDiagnostic::InvalidPath {
path: path.to_path_buf(),
message: "path is not in publish directory".to_string(),
});
continue;
};
let relative_path =
relative_path
.components()
.fold("".to_string(), |mut path, component| {
path.push('/');
match component {
std::path::Component::Normal(normal) => {
path.push_str(&normal.to_string_lossy())
}
std::path::Component::CurDir => path.push('.'),
std::path::Component::ParentDir => path.push_str(".."),
_ => unreachable!(),
}
path
});
match PackagePath::new(relative_path.clone()) {
Ok(package_path) => {
if !paths.insert(package_path) {
diagnostics_collector.to_owned().push(
PublishDiagnostic::DuplicatePath {
path: path.to_path_buf(),
},
);
}
}
Err(err) => {
diagnostics_collector
.to_owned()
.push(PublishDiagnostic::InvalidPath {
path: path.to_path_buf(),
message: err.to_string(),
});
}
}
result.push(CollectedPublishPath {
specifier,
path,
relative_path,
maybe_content: None,
});
}
Ok(result)
}
fn collect_paths(
cli_options: &CliOptions,
diagnostics_collector: &PublishDiagnosticsCollector,
file_patterns: FilePatterns,
) -> Vec<PathBuf> {
FileCollector::new(|e| {
if !e.metadata.file_type().is_file() {
if let Ok(specifier) = ModuleSpecifier::from_file_path(e.path) {
diagnostics_collector.push(PublishDiagnostic::UnsupportedFileType {
specifier,
kind: if e.metadata.file_type().is_symlink() {
"symlink".to_string()
} else {
"Unknown".to_string()
},
});
}
return false;
}
e.path
.file_name()
.map(|s| s != ".DS_Store" && s != ".gitignore")
.unwrap_or(true)
})
.ignore_git_folder()
.ignore_node_modules()
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
.use_gitignore()
.collect_file_patterns(&CliSys::default(), &file_patterns)
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/publish/provenance.rs | cli/tools/publish/provenance.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap;
use std::env;
use aws_lc_rs::rand::SystemRandom;
use aws_lc_rs::signature::EcdsaKeyPair;
use aws_lc_rs::signature::KeyPair;
use base64::Engine as _;
use base64::engine::general_purpose::STANDARD_NO_PAD;
use base64::prelude::BASE64_STANDARD;
use deno_core::anyhow;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::url::Url;
use http_body_util::BodyExt;
use once_cell::sync::Lazy;
use p256::elliptic_curve;
use p256::pkcs8::AssociatedOid;
use serde::Deserialize;
use serde::Serialize;
use sha2::Digest;
use spki::der::EncodePem;
use spki::der::asn1;
use spki::der::pem::LineEnding;
use super::auth::gha_oidc_token;
use super::auth::is_gha;
use crate::http_util;
use crate::http_util::HttpClient;
use crate::registry::OidcTokenResponse;
const PAE_PREFIX: &str = "DSSEv1";
/// DSSE Pre-Auth Encoding
///
/// https://github.com/secure-systems-lab/dsse/blob/master/protocol.md#signature-definition
fn pre_auth_encoding(payload_type: &str, payload: &str) -> Vec<u8> {
format!(
"{} {} {} {} {}",
PAE_PREFIX,
payload_type.len(),
payload_type,
payload.len(),
payload,
)
.into_bytes()
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct Signature {
keyid: &'static str,
sig: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct Envelope {
payload_type: String,
payload: String,
signatures: Vec<Signature>,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SignatureBundle {
#[serde(rename = "$case")]
case: &'static str,
dsse_envelope: Envelope,
}
#[derive(Serialize)]
pub struct SubjectDigest {
pub sha256: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Subject {
pub name: String,
pub digest: SubjectDigest,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct GhaResourceDigest {
git_commit: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct GithubInternalParameters {
event_name: String,
repository_id: String,
repository_owner_id: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct ResourceDescriptor {
uri: String,
digest: Option<GhaResourceDigest>,
}
#[derive(Serialize)]
struct InternalParameters {
github: GithubInternalParameters,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct GhaWorkflow {
#[serde(rename = "ref")]
ref_: String,
repository: String,
path: String,
}
#[derive(Serialize)]
struct ExternalParameters {
workflow: GhaWorkflow,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct BuildDefinition {
build_type: &'static str,
resolved_dependencies: [ResourceDescriptor; 1],
internal_parameters: InternalParameters,
external_parameters: ExternalParameters,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct Builder {
id: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct Metadata {
invocation_id: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct RunDetails {
builder: Builder,
metadata: Metadata,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct Predicate {
build_definition: BuildDefinition,
run_details: RunDetails,
}
impl Predicate {
pub fn new_github_actions() -> Self {
let repo =
std::env::var("GITHUB_REPOSITORY").expect("GITHUB_REPOSITORY not set");
let rel_ref = std::env::var("GITHUB_WORKFLOW_REF")
.unwrap_or_default()
.replace(&format!("{}/", &repo), "");
let delimn = rel_ref.find('@').unwrap();
let (workflow_path, mut workflow_ref) = rel_ref.split_at(delimn);
workflow_ref = &workflow_ref[1..];
let server_url = std::env::var("GITHUB_SERVER_URL").unwrap();
Self {
build_definition: BuildDefinition {
build_type: GITHUB_BUILD_TYPE,
external_parameters: ExternalParameters {
workflow: GhaWorkflow {
ref_: workflow_ref.to_string(),
repository: format!("{}/{}", server_url, &repo),
path: workflow_path.to_string(),
},
},
internal_parameters: InternalParameters {
github: GithubInternalParameters {
event_name: std::env::var("GITHUB_EVENT_NAME").unwrap_or_default(),
repository_id: std::env::var("GITHUB_REPOSITORY_ID")
.unwrap_or_default(),
repository_owner_id: std::env::var("GITHUB_REPOSITORY_OWNER_ID")
.unwrap_or_default(),
},
},
resolved_dependencies: [ResourceDescriptor {
uri: format!(
"git+{}/{}@{}",
server_url,
&repo,
std::env::var("GITHUB_REF").unwrap()
),
digest: Some(GhaResourceDigest {
git_commit: std::env::var("GITHUB_SHA").unwrap(),
}),
}],
},
run_details: RunDetails {
builder: Builder {
id: format!(
"{}/{}",
&GITHUB_BUILDER_ID_PREFIX,
std::env::var("RUNNER_ENVIRONMENT").unwrap()
),
},
metadata: Metadata {
invocation_id: format!(
"{}/{}/actions/runs/{}/attempts/{}",
server_url,
repo,
std::env::var("GITHUB_RUN_ID").unwrap(),
std::env::var("GITHUB_RUN_ATTEMPT").unwrap()
),
},
},
}
}
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct ProvenanceAttestation {
#[serde(rename = "type")]
_type: &'static str,
subject: Vec<Subject>,
predicate_type: &'static str,
predicate: Predicate,
}
impl ProvenanceAttestation {
pub fn new_github_actions(subjects: Vec<Subject>) -> Self {
Self {
_type: INTOTO_STATEMENT_TYPE,
subject: subjects,
predicate_type: SLSA_PREDICATE_TYPE,
predicate: Predicate::new_github_actions(),
}
}
}
const INTOTO_STATEMENT_TYPE: &str = "https://in-toto.io/Statement/v1";
const SLSA_PREDICATE_TYPE: &str = "https://slsa.dev/provenance/v1";
const INTOTO_PAYLOAD_TYPE: &str = "application/vnd.in-toto+json";
const GITHUB_BUILDER_ID_PREFIX: &str = "https://github.com/actions/runner";
const GITHUB_BUILD_TYPE: &str =
"https://slsa-framework.github.io/github-actions-buildtypes/workflow/v1";
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct X509Certificate {
pub raw_bytes: String,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct X509CertificateChain {
pub certificates: [X509Certificate; 1],
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct VerificationMaterialContent {
#[serde(rename = "$case")]
pub case: &'static str,
pub x509_certificate_chain: X509CertificateChain,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct TlogEntry {
pub log_index: u64,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct VerificationMaterial {
pub content: VerificationMaterialContent,
pub tlog_entries: [TlogEntry; 1],
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ProvenanceBundle {
pub media_type: &'static str,
pub content: SignatureBundle,
pub verification_material: VerificationMaterial,
}
pub async fn generate_provenance(
http_client: &HttpClient,
subjects: Vec<Subject>,
) -> Result<ProvenanceBundle, AnyError> {
if !is_gha() {
bail!("Automatic provenance is only available in GitHub Actions");
}
if gha_oidc_token().is_none() {
bail!(
"Provenance generation in Github Actions requires 'id-token' permission"
);
};
let slsa = ProvenanceAttestation::new_github_actions(subjects);
let attestation = serde_json::to_string(&slsa)?;
let bundle = attest(http_client, &attestation, INTOTO_PAYLOAD_TYPE).await?;
Ok(bundle)
}
pub async fn attest(
http_client: &HttpClient,
data: &str,
type_: &str,
) -> Result<ProvenanceBundle, AnyError> {
// DSSE Pre-Auth Encoding (PAE) payload
let pae = pre_auth_encoding(type_, data);
let signer = FulcioSigner::new(http_client)?;
let (signature, key_material) = Box::pin(signer.sign(&pae)).await?;
let content = SignatureBundle {
case: "dsseSignature",
dsse_envelope: Envelope {
payload_type: type_.to_string(),
payload: BASE64_STANDARD.encode(data),
signatures: vec![Signature {
keyid: "",
sig: BASE64_STANDARD.encode(signature.as_ref()),
}],
},
};
let transparency_logs =
testify(http_client, &content, &key_material.certificate).await?;
// First log entry is the one we're interested in
let (_, log_entry) = transparency_logs.iter().next().unwrap();
let bundle = ProvenanceBundle {
media_type: "application/vnd.in-toto+json",
content,
verification_material: VerificationMaterial {
content: VerificationMaterialContent {
case: "x509CertificateChain",
x509_certificate_chain: X509CertificateChain {
certificates: [X509Certificate {
raw_bytes: key_material.certificate,
}],
},
},
tlog_entries: [TlogEntry {
log_index: log_entry.log_index,
}],
},
};
Ok(bundle)
}
static DEFAULT_FULCIO_URL: Lazy<String> = Lazy::new(|| {
env::var("FULCIO_URL")
.unwrap_or_else(|_| "https://fulcio.sigstore.dev".to_string())
});
static ALGORITHM: &aws_lc_rs::signature::EcdsaSigningAlgorithm =
&aws_lc_rs::signature::ECDSA_P256_SHA256_ASN1_SIGNING;
struct KeyMaterial {
pub _case: &'static str,
pub certificate: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct PublicKey {
algorithm: &'static str,
content: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct PublicKeyRequest {
public_key: PublicKey,
proof_of_possession: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct Credentials {
oidc_identity_token: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct CreateSigningCertificateRequest {
credentials: Credentials,
public_key_request: PublicKeyRequest,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct CertificateChain {
certificates: Vec<String>,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct SignedCertificate {
chain: CertificateChain,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct SigningCertificateResponse {
signed_certificate_embedded_sct: Option<SignedCertificate>,
signed_certificate_detached_sct: Option<SignedCertificate>,
}
struct FulcioSigner<'a> {
// The ephemeral key pair used to sign.
ephemeral_signer: EcdsaKeyPair,
rng: SystemRandom,
http_client: &'a HttpClient,
}
impl<'a> FulcioSigner<'a> {
pub fn new(http_client: &'a HttpClient) -> Result<Self, AnyError> {
let rng = SystemRandom::new();
let document = EcdsaKeyPair::generate_pkcs8(ALGORITHM, &rng)?;
let ephemeral_signer =
EcdsaKeyPair::from_pkcs8(ALGORITHM, document.as_ref())?;
Ok(Self {
ephemeral_signer,
rng,
http_client,
})
}
pub async fn sign(
self,
data: &[u8],
) -> Result<(aws_lc_rs::signature::Signature, KeyMaterial), AnyError> {
// Request token from GitHub Actions for audience "sigstore"
let token = self.gha_request_token("sigstore").await?;
// Extract the subject from the token
let subject = extract_jwt_subject(&token)?;
// Sign the subject to create a challenge
let challenge =
self.ephemeral_signer.sign(&self.rng, subject.as_bytes())?;
let subject_public_key = self.ephemeral_signer.public_key().as_ref();
let algorithm = spki::AlgorithmIdentifier {
oid: elliptic_curve::ALGORITHM_OID,
parameters: Some((&p256::NistP256::OID).into()),
};
let spki = spki::SubjectPublicKeyInfoRef {
algorithm,
subject_public_key: asn1::BitStringRef::from_bytes(subject_public_key)?,
};
let pem = spki.to_pem(LineEnding::LF)?;
// Create signing certificate
let certificates =
Box::pin(self.create_signing_certificate(&token, pem, challenge)).await?;
let signature = self.ephemeral_signer.sign(&self.rng, data)?;
Ok((
signature,
KeyMaterial {
_case: "x509Certificate",
certificate: certificates[0].clone(),
},
))
}
async fn create_signing_certificate(
&self,
token: &str,
public_key: String,
challenge: aws_lc_rs::signature::Signature,
) -> Result<Vec<String>, AnyError> {
let url = format!("{}/api/v2/signingCert", *DEFAULT_FULCIO_URL);
let request_body = CreateSigningCertificateRequest {
credentials: Credentials {
oidc_identity_token: token.to_string(),
},
public_key_request: PublicKeyRequest {
public_key: PublicKey {
algorithm: "ECDSA",
content: public_key,
},
proof_of_possession: BASE64_STANDARD.encode(challenge.as_ref()),
},
};
let response = self
.http_client
.post_json(url.parse()?, &request_body)?
.send()
.await?;
let body: SigningCertificateResponse =
http_util::body_to_json(response).await?;
let key = body
.signed_certificate_embedded_sct
.or(body.signed_certificate_detached_sct)
.ok_or_else(|| anyhow::anyhow!("No certificate chain returned"))?;
Ok(key.chain.certificates)
}
async fn gha_request_token(&self, aud: &str) -> Result<String, AnyError> {
let Ok(req_url) = env::var("ACTIONS_ID_TOKEN_REQUEST_URL") else {
bail!("Not running in GitHub Actions");
};
let Some(token) = gha_oidc_token() else {
bail!("No OIDC token available");
};
let mut url = req_url.parse::<Url>()?;
url.query_pairs_mut().append_pair("audience", aud);
let res_bytes = self
.http_client
.get(url)?
.header(
http::header::AUTHORIZATION,
format!("Bearer {}", token)
.parse()
.map_err(http::Error::from)?,
)
.send()
.await?
.collect()
.await?
.to_bytes();
let res: OidcTokenResponse = serde_json::from_slice(&res_bytes)?;
Ok(res.value)
}
}
#[derive(Deserialize)]
struct JwtSubject<'a> {
email: Option<String>,
sub: String,
iss: &'a str,
}
fn extract_jwt_subject(token: &str) -> Result<String, AnyError> {
let parts: Vec<&str> = token.split('.').collect();
let payload = parts[1];
let payload = STANDARD_NO_PAD.decode(payload)?;
let subject: JwtSubject = serde_json::from_slice(&payload)?;
match subject.iss {
"https://accounts.google.com" | "https://oauth2.sigstore.dev/auth" => {
Ok(subject.email.unwrap_or(subject.sub))
}
_ => Ok(subject.sub),
}
}
static DEFAULT_REKOR_URL: Lazy<String> = Lazy::new(|| {
env::var("REKOR_URL")
.unwrap_or_else(|_| "https://rekor.sigstore.dev".to_string())
});
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct LogEntry {
#[allow(dead_code)]
#[serde(rename = "logID")]
pub log_id: String,
pub log_index: u64,
}
type RekorEntry = HashMap<String, LogEntry>;
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct RekorSignature {
sig: String,
// `publicKey` is not the standard part of
// DSSE, but it's required by Rekor.
public_key: String,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct DsseEnvelope {
payload: String,
payload_type: String,
signatures: [RekorSignature; 1],
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct ProposedIntotoEntry {
api_version: &'static str,
kind: &'static str,
spec: ProposedIntotoEntrySpec,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct ProposedIntotoEntrySpec {
content: ProposedIntotoEntryContent,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct ProposedIntotoEntryContent {
envelope: DsseEnvelope,
hash: ProposedIntotoEntryHash,
payload_hash: ProposedIntotoEntryHash,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct ProposedIntotoEntryHash {
algorithm: &'static str,
value: String,
}
// Rekor witness
async fn testify(
http_client: &HttpClient,
content: &SignatureBundle,
public_key: &str,
) -> Result<RekorEntry, AnyError> {
// Rekor "intoto" entry for the given DSSE envelope and signature.
//
// Calculate the value for the payloadHash field into the Rekor entry
let payload_hash = faster_hex::hex_string(&sha2::Sha256::digest(
content.dsse_envelope.payload.as_bytes(),
));
// Calculate the value for the hash field into the Rekor entry
let envelope_hash = faster_hex::hex_string(&{
let dsse = DsseEnvelope {
payload: content.dsse_envelope.payload.clone(),
payload_type: content.dsse_envelope.payload_type.clone(),
signatures: [RekorSignature {
sig: content.dsse_envelope.signatures[0].sig.clone(),
public_key: public_key.to_string(),
}],
};
sha2::Sha256::digest(serde_json::to_string(&dsse)?.as_bytes())
});
// Re-create the DSSE envelop. `publicKey` is not the standard part of
// DSSE, but it's required by Rekor.
//
// Double-encode payload and signature cause that's what Rekor expects
let dsse = DsseEnvelope {
payload_type: content.dsse_envelope.payload_type.clone(),
payload: BASE64_STANDARD.encode(content.dsse_envelope.payload.clone()),
signatures: [RekorSignature {
sig: BASE64_STANDARD
.encode(content.dsse_envelope.signatures[0].sig.clone()),
public_key: BASE64_STANDARD.encode(public_key),
}],
};
let proposed_intoto_entry = ProposedIntotoEntry {
api_version: "0.0.2",
kind: "intoto",
spec: ProposedIntotoEntrySpec {
content: ProposedIntotoEntryContent {
envelope: dsse,
hash: ProposedIntotoEntryHash {
algorithm: "sha256",
value: envelope_hash,
},
payload_hash: ProposedIntotoEntryHash {
algorithm: "sha256",
value: payload_hash,
},
},
},
};
let url = format!("{}/api/v1/log/entries", *DEFAULT_REKOR_URL);
let res = http_client
.post_json(url.parse()?, &proposed_intoto_entry)?
.send()
.await?;
let body: RekorEntry = http_util::body_to_json(res).await?;
Ok(body)
}
#[cfg(test)]
mod tests {
use std::env;
use super::ProvenanceAttestation;
use super::Subject;
use super::SubjectDigest;
#[test]
fn slsa_github_actions() {
// Set environment variable
if env::var("GITHUB_ACTIONS").is_err() {
#[allow(clippy::undocumented_unsafe_blocks)]
unsafe {
env::set_var("CI", "true");
env::set_var("GITHUB_ACTIONS", "true");
env::set_var("ACTIONS_ID_TOKEN_REQUEST_URL", "https://example.com");
env::set_var("ACTIONS_ID_TOKEN_REQUEST_TOKEN", "dummy");
env::set_var("GITHUB_REPOSITORY", "littledivy/deno_sdl2");
env::set_var("GITHUB_SERVER_URL", "https://github.com");
env::set_var("GITHUB_REF", "refs/tags/sdl2@0.0.1");
env::set_var("GITHUB_SHA", "lol");
env::set_var("GITHUB_RUN_ID", "1");
env::set_var("GITHUB_RUN_ATTEMPT", "1");
env::set_var("RUNNER_ENVIRONMENT", "github-hosted");
env::set_var(
"GITHUB_WORKFLOW_REF",
"littledivy/deno_sdl2@refs/tags/sdl2@0.0.1",
)
}
}
let subject = Subject {
name: "jsr:@divy/sdl2@0.0.1".to_string(),
digest: SubjectDigest {
sha256: "yourmom".to_string(),
},
};
let slsa = ProvenanceAttestation::new_github_actions(vec![subject]);
assert_eq!(
slsa.subject.len(),
1,
"Subject should be an array per the in-toto specification"
);
assert_eq!(slsa.subject[0].name, "jsr:@divy/sdl2@0.0.1");
assert_eq!(slsa.subject[0].digest.sha256, "yourmom");
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/publish/auth.rs | cli/tools/publish/auth.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::io::IsTerminal;
use deno_core::anyhow;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
pub enum AuthMethod {
Interactive,
Token(String),
Oidc(OidcConfig),
}
pub struct OidcConfig {
pub url: String,
pub token: String,
}
pub(crate) fn is_gha() -> bool {
std::env::var("GITHUB_ACTIONS").unwrap_or_default() == "true"
}
pub(crate) fn gha_oidc_token() -> Option<String> {
std::env::var("ACTIONS_ID_TOKEN_REQUEST_TOKEN").ok()
}
fn get_gh_oidc_env_vars() -> Option<Result<(String, String), AnyError>> {
if std::env::var("GITHUB_ACTIONS").unwrap_or_default() == "true" {
let url = std::env::var("ACTIONS_ID_TOKEN_REQUEST_URL");
let token = std::env::var("ACTIONS_ID_TOKEN_REQUEST_TOKEN");
match (url, token) {
(Ok(url), Ok(token)) => Some(Ok((url, token))),
(Err(_), Err(_)) => Some(Err(anyhow::anyhow!(
"No means to authenticate. Pass a token to `--token`, or enable tokenless publishing from GitHub Actions using OIDC. Learn more at https://deno.co/ghoidc"
))),
_ => None,
}
} else {
None
}
}
pub fn get_auth_method(
maybe_token: Option<String>,
dry_run: bool,
) -> Result<AuthMethod, AnyError> {
if dry_run {
// We don't authenticate in dry-run mode.
return Ok(AuthMethod::Interactive);
}
if let Some(token) = maybe_token {
return Ok(AuthMethod::Token(token));
}
match get_gh_oidc_env_vars() {
Some(Ok((url, token))) => Ok(AuthMethod::Oidc(OidcConfig { url, token })),
Some(Err(err)) => Err(err),
None if std::io::stdin().is_terminal() => Ok(AuthMethod::Interactive),
None => {
bail!("No means to authenticate. Pass a token to `--token`.")
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/publish/module_content.rs | cli/tools/publish/module_content.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::path::Path;
use std::sync::Arc;
use deno_ast::MediaType;
use deno_ast::ParsedSource;
use deno_ast::SourceTextInfo;
use deno_ast::TextChange;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_graph::ModuleGraph;
use deno_resolver::cache::LazyGraphSourceParser;
use deno_resolver::cache::ParsedSourceCache;
use deno_resolver::deno_json::CompilerOptionsResolver;
use deno_resolver::workspace::ResolutionKind;
use lazy_regex::Lazy;
use sys_traits::FsMetadata;
use sys_traits::FsRead;
use super::diagnostics::PublishDiagnostic;
use super::diagnostics::PublishDiagnosticsCollector;
use super::unfurl::PositionOrSourceRangeRef;
use super::unfurl::SpecifierUnfurler;
use super::unfurl::SpecifierUnfurlerDiagnostic;
use crate::sys::CliSys;
struct JsxFolderOptions<'a> {
jsx_runtime: &'static str,
jsx_classic: Option<Cow<'a, deno_ast::JsxClassicOptions>>,
jsx_import_source: Option<String>,
jsx_import_source_types: Option<String>,
}
pub struct ModuleContentProvider<TSys: FsMetadata + FsRead = CliSys> {
specifier_unfurler: SpecifierUnfurler<TSys>,
parsed_source_cache: Arc<ParsedSourceCache>,
sys: TSys,
compiler_options_resolver: Arc<CompilerOptionsResolver>,
}
impl<TSys: FsMetadata + FsRead> ModuleContentProvider<TSys> {
pub fn new(
parsed_source_cache: Arc<ParsedSourceCache>,
specifier_unfurler: SpecifierUnfurler<TSys>,
sys: TSys,
compiler_options_resolver: Arc<CompilerOptionsResolver>,
) -> Self {
Self {
specifier_unfurler,
parsed_source_cache,
sys,
compiler_options_resolver,
}
}
pub fn resolve_content_maybe_unfurling(
&self,
graph: &ModuleGraph,
diagnostics_collector: &PublishDiagnosticsCollector,
path: &Path,
specifier: &Url,
) -> Result<Vec<u8>, AnyError> {
let source_parser =
LazyGraphSourceParser::new(&self.parsed_source_cache, graph);
let media_type = MediaType::from_specifier(specifier);
let parsed_source = match source_parser.get_or_parse_source(specifier)? {
Some(parsed_source) => parsed_source,
None => {
let data = self.sys.fs_read(path).with_context(|| {
format!("Unable to read file '{}'", path.display())
})?;
match media_type {
MediaType::JavaScript
| MediaType::Jsx
| MediaType::Mjs
| MediaType::Cjs
| MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Dts
| MediaType::Dmts
| MediaType::Dcts
| MediaType::Tsx => {
// continue
}
MediaType::SourceMap
| MediaType::Unknown
| MediaType::Html
| MediaType::Sql
| MediaType::Json
| MediaType::Jsonc
| MediaType::Json5
| MediaType::Wasm
| MediaType::Css => {
// not unfurlable data
return Ok(data.into_owned());
}
}
let text = String::from_utf8_lossy(&data);
deno_ast::parse_module(deno_ast::ParseParams {
specifier: specifier.clone(),
text: text.into(),
media_type,
capture_tokens: false,
maybe_syntax: None,
scope_analysis: false,
})?
}
};
log::debug!("Unfurling {}", specifier);
let mut reporter = |diagnostic| {
diagnostics_collector
.push(PublishDiagnostic::SpecifierUnfurl(diagnostic));
};
let text_info = parsed_source.text_info_lazy();
let module_info =
deno_graph::ast::ParserModuleAnalyzer::module_info(&parsed_source);
let mut text_changes = Vec::new();
if media_type.is_jsx() {
self.add_jsx_text_changes(
specifier,
&parsed_source,
text_info,
&module_info,
&mut reporter,
&mut text_changes,
)?;
}
self.specifier_unfurler.unfurl_to_changes(
specifier,
&parsed_source,
&module_info,
&mut text_changes,
&mut reporter,
);
let rewritten_text =
deno_ast::apply_text_changes(text_info.text_str(), text_changes);
Ok(rewritten_text.into_bytes())
}
fn add_jsx_text_changes(
&self,
specifier: &Url,
parsed_source: &ParsedSource,
text_info: &SourceTextInfo,
module_info: &deno_graph::analysis::ModuleInfo,
diagnostic_reporter: &mut dyn FnMut(SpecifierUnfurlerDiagnostic),
text_changes: &mut Vec<TextChange>,
) -> Result<(), AnyError> {
static JSX_RUNTIME_RE: Lazy<regex::Regex> =
lazy_regex::lazy_regex!(r"(?i)^[\s*]*@jsxRuntime\s+(\S+)");
static JSX_FACTORY_RE: Lazy<regex::Regex> =
lazy_regex::lazy_regex!(r"(?i)^[\s*]*@jsxFactory\s+(\S+)");
static JSX_FRAGMENT_FACTORY_RE: Lazy<regex::Regex> =
lazy_regex::lazy_regex!(r"(?i)^[\s*]*@jsxFragmentFactory\s+(\S+)");
let start_pos = if parsed_source.program_ref().shebang().is_some() {
match text_info.text_str().find('\n') {
Some(index) => index + 1,
None => return Ok(()), // nothing in this file
}
} else {
0
};
let mut add_text_change = |new_text: String| {
text_changes.push(TextChange {
range: start_pos..start_pos,
new_text,
})
};
let jsx_options =
self.resolve_jsx_options(specifier, text_info, diagnostic_reporter)?;
let leading_comments = parsed_source.get_leading_comments();
let leading_comments_has_re = |regex: ®ex::Regex| {
leading_comments
.as_ref()
.map(|comments| {
comments.iter().any(|c| {
c.kind == deno_ast::swc::common::comments::CommentKind::Block
&& regex.is_match(c.text.as_str())
})
})
.unwrap_or(false)
};
if !leading_comments_has_re(&JSX_RUNTIME_RE) {
add_text_change(format!(
"/** @jsxRuntime {} */",
jsx_options.jsx_runtime,
));
}
if module_info.jsx_import_source.is_none()
&& let Some(import_source) = jsx_options.jsx_import_source
{
add_text_change(format!("/** @jsxImportSource {} */", import_source));
}
if module_info.jsx_import_source_types.is_none()
&& let Some(import_source) = jsx_options.jsx_import_source_types
{
add_text_change(format!(
"/** @jsxImportSourceTypes {} */",
import_source
));
}
if let Some(classic_options) = &jsx_options.jsx_classic {
if !leading_comments_has_re(&JSX_FACTORY_RE) {
add_text_change(format!(
"/** @jsxFactory {} */",
classic_options.factory,
));
}
if !leading_comments_has_re(&JSX_FRAGMENT_FACTORY_RE) {
add_text_change(format!(
"/** @jsxFragmentFactory {} */",
classic_options.fragment_factory,
));
}
}
Ok(())
}
fn resolve_jsx_options<'a>(
&'a self,
specifier: &Url,
text_info: &SourceTextInfo,
diagnostic_reporter: &mut dyn FnMut(SpecifierUnfurlerDiagnostic),
) -> Result<JsxFolderOptions<'a>, AnyError> {
let compiler_options =
self.compiler_options_resolver.for_specifier(specifier);
let jsx_config = compiler_options.jsx_import_source_config()?;
let transpile_options = &compiler_options.transpile_options()?.transpile;
let jsx_runtime = match &transpile_options.jsx {
Some(
deno_ast::JsxRuntime::Automatic(_)
| deno_ast::JsxRuntime::Precompile(_),
) => "automatic",
None | Some(deno_ast::JsxRuntime::Classic(_)) => "classic",
};
let mut unfurl_import_source =
|import_source: &str, referrer: &Url, resolution_kind: ResolutionKind| {
let maybe_import_source = self
.specifier_unfurler
.unfurl_specifier_reporting_diagnostic(
referrer,
import_source,
resolution_kind,
text_info,
PositionOrSourceRangeRef::PositionRange(
&deno_graph::PositionRange::zeroed(),
),
diagnostic_reporter,
);
maybe_import_source.unwrap_or_else(|| import_source.to_string())
};
let jsx_import_source = jsx_config
.and_then(|c| c.import_source.as_ref())
.map(|jsx_import_source| {
unfurl_import_source(
&jsx_import_source.specifier,
&jsx_import_source.base,
ResolutionKind::Execution,
)
});
let jsx_import_source_types = jsx_config
.and_then(|c| c.import_source_types.as_ref())
.map(|jsx_import_source_types| {
unfurl_import_source(
&jsx_import_source_types.specifier,
&jsx_import_source_types.base,
ResolutionKind::Types,
)
});
let classic_options = match &transpile_options.jsx {
None => Some(Cow::Owned(deno_ast::JsxClassicOptions::default())),
Some(deno_ast::JsxRuntime::Classic(classic_options)) => {
Some(Cow::Borrowed(classic_options))
}
Some(
deno_ast::JsxRuntime::Precompile(_)
| deno_ast::JsxRuntime::Automatic(_),
) => None,
};
Ok(JsxFolderOptions {
jsx_runtime,
jsx_classic: classic_options,
jsx_import_source,
jsx_import_source_types,
})
}
}
#[cfg(test)]
mod test {
use std::path::PathBuf;
use deno_config::workspace::WorkspaceDiscoverStart;
use deno_path_util::url_from_file_path;
use deno_resolver::deno_json::CompilerOptionsOverrides;
use deno_resolver::factory::ConfigDiscoveryOption;
use deno_resolver::npm::ByonmNpmResolverCreateOptions;
use deno_resolver::npm::CreateInNpmPkgCheckerOptions;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_resolver::npm::NpmResolverCreateOptions;
use deno_resolver::workspace::WorkspaceResolver;
use node_resolver::DenoIsBuiltInNodeModuleChecker;
use node_resolver::NodeResolver;
use node_resolver::NodeResolverOptions;
use node_resolver::PackageJsonResolver;
use node_resolver::cache::NodeResolutionSys;
use pretty_assertions::assert_eq;
use sys_traits::FsCreateDirAll;
use sys_traits::FsWrite;
use sys_traits::impls::InMemorySys;
use super::*;
use crate::npm::CliNpmResolver;
#[test]
fn test_module_content_jsx() {
run_test(&[
(
"/deno.json",
r#"{ "workspace": ["package-a", "package-b", "package-c", "package-d"] }"#,
None,
),
(
"/package-a/deno.json",
r#"{ "compilerOptions": {
"jsx": "react-jsx",
"jsxImportSource": "react",
"jsxImportSourceTypes": "@types/react",
},
"imports": {
"react": "npm:react"
"@types/react": "npm:@types/react"
}
}"#,
None,
),
(
"/package-b/deno.json",
r#"{
"compilerOptions": { "jsx": "react-jsx" },
"imports": {
"react": "npm:react"
"@types/react": "npm:@types/react"
}
}"#,
None,
),
(
"/package-c/deno.json",
r#"{
"compilerOptions": {
"jsx": "precompile",
"jsxImportSource": "react",
"jsxImportSourceTypes": "@types/react",
},
"imports": {
"react": "npm:react"
"@types/react": "npm:@types/react"
}
}"#,
None,
),
(
"/package-d/deno.json",
r#"{
"compilerOptions": { "jsx": "react" },
"imports": {
"react": "npm:react"
"@types/react": "npm:@types/react"
}
}"#,
None,
),
(
"/package-a/main.tsx",
"export const component = <div></div>;",
Some(
"/** @jsxRuntime automatic *//** @jsxImportSource npm:react *//** @jsxImportSourceTypes npm:@types/react */export const component = <div></div>;",
),
),
(
"/package-b/main.tsx",
"export const componentB = <div></div>;",
Some(
"/** @jsxRuntime automatic *//** @jsxImportSource npm:react *//** @jsxImportSourceTypes npm:react */export const componentB = <div></div>;",
),
),
(
"/package-a/other.tsx",
"/** @jsxImportSource npm:preact */
/** @jsxFragmentFactory h1 */
/** @jsxImportSourceTypes npm:@types/example */
/** @jsxFactory h2 */
/** @jsxRuntime automatic */
export const component = <div></div>;",
Some(
"/** @jsxImportSource npm:preact */
/** @jsxFragmentFactory h1 */
/** @jsxImportSourceTypes npm:@types/example */
/** @jsxFactory h2 */
/** @jsxRuntime automatic */
export const component = <div></div>;",
),
),
(
"/package-c/main.tsx",
"export const component = <div></div>;",
Some(
"/** @jsxRuntime automatic *//** @jsxImportSource npm:react *//** @jsxImportSourceTypes npm:@types/react */export const component = <div></div>;",
),
),
(
"/package-d/main.tsx",
"export const component = <div></div>;",
Some(
"/** @jsxRuntime classic *//** @jsxFactory React.createElement *//** @jsxFragmentFactory React.Fragment */export const component = <div></div>;",
),
),
]);
}
fn get_path(path: &str) -> PathBuf {
PathBuf::from(if cfg!(windows) {
format!("C:{}", path.replace('/', "\\"))
} else {
path.to_string()
})
}
fn run_test(files: &[(&'static str, &'static str, Option<&'static str>)]) {
let in_memory_sys = InMemorySys::default();
for (path, text, _) in files {
let path = get_path(path);
in_memory_sys
.fs_create_dir_all(path.parent().unwrap())
.unwrap();
in_memory_sys.fs_write(path, text).unwrap();
}
let provider = module_content_provider(in_memory_sys);
for (path, _, expected) in files {
let Some(expected) = expected else {
continue;
};
let path = get_path(path);
let bytes = provider
.resolve_content_maybe_unfurling(
&ModuleGraph::new(deno_graph::GraphKind::All),
&Default::default(),
&path,
&url_from_file_path(&path).unwrap(),
)
.unwrap();
assert_eq!(String::from_utf8_lossy(&bytes), *expected);
}
}
fn module_content_provider(
sys: InMemorySys,
) -> ModuleContentProvider<InMemorySys> {
let workspace_dir = deno_config::workspace::WorkspaceDirectory::discover(
&sys,
WorkspaceDiscoverStart::Paths(&[get_path("/")]),
&Default::default(),
)
.unwrap();
let resolver = Arc::new(
WorkspaceResolver::from_workspace(
&workspace_dir.workspace,
sys.clone(),
Default::default(),
)
.unwrap(),
);
let package_json_resolver =
Arc::new(PackageJsonResolver::new(sys.clone(), None));
let node_resolver = Arc::new(NodeResolver::new(
DenoInNpmPackageChecker::new(CreateInNpmPkgCheckerOptions::Byonm),
DenoIsBuiltInNodeModuleChecker,
CliNpmResolver::new(NpmResolverCreateOptions::Byonm(
ByonmNpmResolverCreateOptions {
root_node_modules_dir: None,
sys: NodeResolutionSys::new(sys.clone(), None),
pkg_json_resolver: package_json_resolver.clone(),
},
)),
package_json_resolver,
NodeResolutionSys::new(sys.clone(), None),
NodeResolverOptions::default(),
));
let compiler_options_resolver = Arc::new(CompilerOptionsResolver::new(
&sys,
&workspace_dir.workspace,
&node_resolver,
&ConfigDiscoveryOption::DiscoverCwd,
&CompilerOptionsOverrides::default(),
));
resolver.set_compiler_options_resolver(compiler_options_resolver.clone());
let specifier_unfurler = SpecifierUnfurler::new(None, resolver, false);
ModuleContentProvider::new(
Arc::new(ParsedSourceCache::default()),
specifier_unfurler,
sys,
compiler_options_resolver,
)
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/publish/mod.rs | cli/tools/publish/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap;
use std::collections::HashSet;
use std::io::IsTerminal;
use std::path::Path;
use std::path::PathBuf;
use std::process::Stdio;
use std::rc::Rc;
use std::sync::Arc;
use base64::Engine;
use base64::prelude::BASE64_STANDARD;
use deno_ast::ModuleSpecifier;
use deno_ast::SourceTextInfo;
use deno_config::deno_json::ConfigFile;
use deno_config::workspace::JsrPackageConfig;
use deno_config::workspace::Workspace;
use deno_core::anyhow::Context;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_core::futures::StreamExt;
use deno_core::futures::future::LocalBoxFuture;
use deno_core::futures::stream::FuturesUnordered;
use deno_core::serde_json;
use deno_core::serde_json::Value;
use deno_core::serde_json::json;
use deno_core::url::Url;
use deno_resolver::collections::FolderScopedMap;
use deno_runtime::deno_fetch;
use deno_terminal::colors;
use http_body_util::BodyExt;
use serde::Deserialize;
use serde::Serialize;
use sha2::Digest;
use tokio::process::Command;
use self::diagnostics::PublishDiagnostic;
use self::diagnostics::PublishDiagnosticsCollector;
use self::diagnostics::RelativePackageImportDiagnosticReferrer;
use self::graph::GraphDiagnosticsCollector;
use self::module_content::ModuleContentProvider;
use self::paths::CollectedPublishPath;
use self::tar::PublishableTarball;
use crate::args::CliOptions;
use crate::args::Flags;
use crate::args::PublishFlags;
use crate::args::jsr_api_url;
use crate::args::jsr_url;
use crate::factory::CliFactory;
use crate::graph_util::CreatePublishGraphOptions;
use crate::graph_util::ModuleGraphCreator;
use crate::http_util::HttpClient;
use crate::registry;
use crate::tools::lint::collect_no_slow_type_diagnostics;
use crate::type_checker::CheckOptions;
use crate::type_checker::TypeChecker;
use crate::util::display::human_size;
mod auth;
mod diagnostics;
mod graph;
mod module_content;
mod paths;
mod provenance;
mod publish_order;
mod tar;
mod unfurl;
use auth::AuthMethod;
use auth::get_auth_method;
use publish_order::PublishOrderGraph;
use unfurl::SpecifierUnfurler;
pub async fn publish(
flags: Arc<Flags>,
publish_flags: PublishFlags,
) -> Result<(), AnyError> {
let cli_factory = CliFactory::from_flags(flags);
let auth_method =
get_auth_method(publish_flags.token, publish_flags.dry_run)?;
let cli_options = cli_factory.cli_options()?;
let directory_path = cli_options.initial_cwd();
let mut publish_configs = cli_options.start_dir.jsr_packages_for_publish();
if publish_configs.is_empty() {
match cli_options.start_dir.member_deno_json() {
Some(deno_json) => {
debug_assert!(!deno_json.is_package() || !deno_json.should_publish());
if deno_json.json.name.is_none() {
bail!("Missing 'name' field in '{}'.", deno_json.specifier);
}
if !deno_json.should_publish() {
bail!(
"Package 'publish' field is false in '{}'.",
deno_json.specifier
);
}
error_missing_exports_field(deno_json)?;
}
None => {
bail!(
"Couldn't find a deno.json, deno.jsonc, jsr.json or jsr.jsonc configuration file in {}.",
directory_path.display()
);
}
}
}
if let Some(version) = &publish_flags.set_version {
if publish_configs.len() > 1 {
bail!(
"Cannot use --set-version when publishing a workspace. Change your cwd to an individual package instead."
);
}
if let Some(publish_config) = publish_configs.get_mut(0) {
let mut config_file = publish_config.config_file.as_ref().clone();
config_file.json.version = Some(version.clone());
publish_config.config_file = Arc::new(config_file);
}
}
let specifier_unfurler = SpecifierUnfurler::new(
Some(cli_factory.node_resolver().await?.clone()),
cli_factory.workspace_resolver().await?.clone(),
cli_options.unstable_bare_node_builtins(),
);
let diagnostics_collector = PublishDiagnosticsCollector::default();
let parsed_source_cache = cli_factory.parsed_source_cache()?;
let module_content_provider = Arc::new(ModuleContentProvider::new(
parsed_source_cache.clone(),
specifier_unfurler,
cli_factory.sys(),
cli_factory.compiler_options_resolver()?.clone(),
));
let publish_preparer = PublishPreparer::new(
GraphDiagnosticsCollector::new(parsed_source_cache.clone()),
cli_factory.module_graph_creator().await?.clone(),
cli_factory.type_checker().await?.clone(),
cli_options.clone(),
module_content_provider,
);
let prepared_data = publish_preparer
.prepare_packages_for_publishing(
publish_flags.allow_slow_types,
&diagnostics_collector,
publish_configs,
)
.await?;
diagnostics_collector.print_and_error()?;
if prepared_data.package_by_name.is_empty() {
bail!("No packages to publish");
}
if std::env::var("DENO_TESTING_DISABLE_GIT_CHECK")
.ok()
.is_none()
&& !publish_flags.allow_dirty
&& let Some(dirty_text) =
check_if_git_repo_dirty(cli_options.initial_cwd()).await
{
log::error!("\nUncommitted changes:\n\n{}\n", dirty_text);
bail!(
"Aborting due to uncommitted changes. Check in source code or run with --allow-dirty"
);
}
if publish_flags.dry_run {
for (_, package) in prepared_data.package_by_name {
log::info!(
"{} of {} with files:",
colors::green_bold("Simulating publish"),
colors::gray(package.display_name()),
);
for file in &package.tarball.files {
log::info!(" {} ({})", file.specifier, human_size(file.size as f64),);
}
}
log::warn!("{} Dry run complete", colors::green("Success"));
return Ok(());
}
perform_publish(
&cli_factory.http_client_provider().get_or_create()?,
prepared_data.publish_order_graph,
prepared_data.package_by_name,
auth_method,
!publish_flags.no_provenance,
)
.await?;
Ok(())
}
struct PreparedPublishPackage {
scope: String,
package: String,
version: String,
tarball: PublishableTarball,
config: String,
exports: HashMap<String, String>,
}
impl PreparedPublishPackage {
pub fn display_name(&self) -> String {
format!("@{}/{}@{}", self.scope, self.package, self.version)
}
}
struct PreparePackagesData {
publish_order_graph: PublishOrderGraph,
package_by_name: HashMap<String, Rc<PreparedPublishPackage>>,
}
struct PublishPreparer {
graph_diagnostics_collector: GraphDiagnosticsCollector,
module_graph_creator: Arc<ModuleGraphCreator>,
type_checker: Arc<TypeChecker>,
cli_options: Arc<CliOptions>,
module_content_provider: Arc<ModuleContentProvider>,
}
impl PublishPreparer {
pub fn new(
graph_diagnostics_collector: GraphDiagnosticsCollector,
module_graph_creator: Arc<ModuleGraphCreator>,
type_checker: Arc<TypeChecker>,
cli_options: Arc<CliOptions>,
module_content_provider: Arc<ModuleContentProvider>,
) -> Self {
Self {
graph_diagnostics_collector,
module_graph_creator,
type_checker,
cli_options,
module_content_provider,
}
}
pub async fn prepare_packages_for_publishing(
&self,
allow_slow_types: bool,
diagnostics_collector: &PublishDiagnosticsCollector,
publish_configs: Vec<JsrPackageConfig>,
) -> Result<PreparePackagesData, AnyError> {
if publish_configs.len() > 1 {
log::info!("Publishing a workspace...");
}
// create the module graph
let graph = self
.build_and_check_graph_for_publish(
allow_slow_types,
diagnostics_collector,
&publish_configs,
)
.await?;
let mut package_by_name = HashMap::with_capacity(publish_configs.len());
let publish_order_graph =
publish_order::build_publish_order_graph(&graph, &publish_configs)?;
let results = publish_configs
.into_iter()
.map(|member| {
let graph = graph.clone();
async move {
let package = self
.prepare_publish(&member, graph, diagnostics_collector)
.await
.with_context(|| format!("Failed preparing '{}'.", member.name))?;
Ok::<_, AnyError>((member.name, package))
}
.boxed()
})
.collect::<Vec<_>>();
let results = deno_core::futures::future::join_all(results).await;
for result in results {
let (package_name, package) = result?;
package_by_name.insert(package_name, package);
}
Ok(PreparePackagesData {
publish_order_graph,
package_by_name,
})
}
async fn build_and_check_graph_for_publish(
&self,
allow_slow_types: bool,
diagnostics_collector: &PublishDiagnosticsCollector,
package_configs: &[JsrPackageConfig],
) -> Result<Arc<deno_graph::ModuleGraph>, deno_core::anyhow::Error> {
let build_fast_check_graph = !allow_slow_types;
let graph = self
.module_graph_creator
.create_publish_graph(CreatePublishGraphOptions {
packages: package_configs,
build_fast_check_graph,
validate_graph: true,
})
.await?;
// todo(dsherret): move to lint rule
self
.graph_diagnostics_collector
.collect_diagnostics_for_graph(&graph, diagnostics_collector)?;
if allow_slow_types {
log::info!(
concat!(
"{} Publishing a library with slow types is not recommended. ",
"This may lead to poor type checking performance for users of ",
"your package, may affect the quality of automatic documentation ",
"generation, and your package will not be shipped with a .d.ts ",
"file for Node.js users."
),
colors::yellow("Warning"),
);
Ok(Arc::new(graph))
} else if std::env::var("DENO_INTERNAL_FAST_CHECK_OVERWRITE").as_deref()
== Ok("1")
{
if check_if_git_repo_dirty(self.cli_options.initial_cwd())
.await
.is_some()
{
bail!(
"When using DENO_INTERNAL_FAST_CHECK_OVERWRITE, the git repo must be in a clean state."
);
}
for module in graph.modules() {
if module.specifier().scheme() != "file" {
continue;
}
let Some(js) = module.js() else {
continue;
};
if let Some(module) = js.fast_check_module() {
std::fs::write(
js.specifier.to_file_path().unwrap(),
module.source.as_ref(),
)?;
}
}
bail!("Exiting due to DENO_INTERNAL_FAST_CHECK_OVERWRITE")
} else {
log::info!("Checking for slow types in the public API...");
for package in package_configs {
let export_urls = package.config_file.resolve_export_value_urls()?;
let diagnostics =
collect_no_slow_type_diagnostics(&graph, &export_urls);
if !diagnostics.is_empty() {
for diagnostic in diagnostics {
diagnostics_collector
.push(PublishDiagnostic::FastCheck(diagnostic));
}
}
}
// skip type checking the slow type graph if there are any errors because
// errors like remote modules existing will cause type checking to crash
if diagnostics_collector.has_error() {
Ok(Arc::new(graph))
} else {
// fast check passed, type check the output as a temporary measure
// until we know that it's reliable and stable
let mut diagnostics_by_folder = self.type_checker.check_diagnostics(
graph,
CheckOptions {
build_fast_check_graph: false, // already built
lib: self.cli_options.ts_type_lib_window(),
reload: self.cli_options.reload_flag(),
type_check_mode: self.cli_options.type_check_mode(),
},
)?;
// ignore unused parameter diagnostics that may occur due to fast check
// not having function body implementations
for result in diagnostics_by_folder.by_ref() {
let check_diagnostics = result?;
let check_diagnostics =
check_diagnostics.filter(|d| d.include_when_remote());
if check_diagnostics.has_diagnostic() {
bail!(
concat!(
"Failed ensuring public API type output is valid.\n\n",
"{:#}\n\n",
"You may have discovered a bug in Deno. Please open an issue at: ",
"https://github.com/denoland/deno/issues/"
),
check_diagnostics
);
}
}
Ok(diagnostics_by_folder.into_graph())
}
}
}
#[allow(clippy::too_many_arguments)]
async fn prepare_publish(
&self,
package: &JsrPackageConfig,
graph: Arc<deno_graph::ModuleGraph>,
diagnostics_collector: &PublishDiagnosticsCollector,
) -> Result<Rc<PreparedPublishPackage>, AnyError> {
let deno_json = &package.config_file;
let config_path = deno_json.specifier.to_file_path().unwrap();
let root_dir = config_path.parent().unwrap().to_path_buf();
let version = deno_json.json.version.clone().ok_or_else(|| {
deno_core::anyhow::anyhow!(
"{} is missing 'version' field",
deno_json.specifier
)
})?;
let Some(name_no_at) = package.name.strip_prefix('@') else {
bail!("Invalid package name, use '@<scope_name>/<package_name> format");
};
let Some((scope, name_no_scope)) = name_no_at.split_once('/') else {
bail!("Invalid package name, use '@<scope_name>/<package_name> format");
};
let file_patterns = package.member_dir.to_publish_config()?.files;
let tarball = deno_core::unsync::spawn_blocking({
let diagnostics_collector = diagnostics_collector.clone();
let module_content_provider = self.module_content_provider.clone();
let cli_options = self.cli_options.clone();
let config_path = config_path.clone();
let config_url = deno_json.specifier.clone();
let has_license_field = package.license.is_some();
let current_package_name = package.name.clone();
move || {
let root_specifier =
ModuleSpecifier::from_directory_path(&root_dir).unwrap();
let mut publish_paths =
paths::collect_publish_paths(paths::CollectPublishPathsOptions {
root_dir: &root_dir,
cli_options: &cli_options,
diagnostics_collector: &diagnostics_collector,
file_patterns,
force_include_paths: vec![config_path],
})?;
let all_jsr_packages = FolderScopedMap::from_map(
cli_options
.workspace()
.jsr_packages()
.map(|pkg| (pkg.member_dir.dir_url().clone(), pkg))
.collect(),
);
collect_excluded_module_diagnostics(
&root_specifier,
&graph,
¤t_package_name,
&all_jsr_packages,
&publish_paths,
&diagnostics_collector,
);
if !has_license_field
&& !has_license_file(publish_paths.iter().map(|p| &p.specifier))
{
if let Some(license_path) =
resolve_license_file(&root_dir, cli_options.workspace())
{
// force including the license file from the package or workspace root
publish_paths.push(CollectedPublishPath {
specifier: ModuleSpecifier::from_file_path(&license_path)
.unwrap(),
relative_path: "/LICENSE".to_string(),
maybe_content: Some(std::fs::read(&license_path).with_context(
|| format!("failed reading '{}'.", license_path.display()),
)?),
path: license_path,
});
} else {
diagnostics_collector.push(PublishDiagnostic::MissingLicense {
config_specifier: config_url,
});
}
}
tar::create_gzipped_tarball(
&module_content_provider,
&graph,
&diagnostics_collector,
publish_paths,
)
.context("Failed to create a tarball")
}
})
.await??;
log::debug!("Tarball size ({}): {}", package.name, tarball.bytes.len());
Ok(Rc::new(PreparedPublishPackage {
scope: scope.to_string(),
package: name_no_scope.to_string(),
version: version.to_string(),
tarball,
exports: match &deno_json.json.exports {
Some(Value::Object(exports)) => exports
.into_iter()
.map(|(k, v)| (k.to_string(), v.as_str().unwrap().to_string()))
.collect(),
Some(Value::String(exports)) => {
let mut map = HashMap::new();
map.insert(".".to_string(), exports.to_string());
map
}
_ => HashMap::new(),
},
// the config file is always at the root of a publishing dir,
// so getting the file name is always correct
config: config_path
.file_name()
.unwrap()
.to_string_lossy()
.into_owned(),
}))
}
}
#[derive(Serialize)]
#[serde(tag = "permission")]
pub enum Permission<'s> {
#[serde(rename = "package/publish", rename_all = "camelCase")]
VersionPublish {
scope: &'s str,
package: &'s str,
version: &'s str,
tarball_hash: &'s str,
},
}
async fn get_auth_headers(
client: &HttpClient,
registry_url: &Url,
packages: &[Rc<PreparedPublishPackage>],
auth_method: AuthMethod,
) -> Result<HashMap<(String, String, String), Rc<str>>, AnyError> {
let permissions = packages
.iter()
.map(|package| Permission::VersionPublish {
scope: &package.scope,
package: &package.package,
version: &package.version,
tarball_hash: &package.tarball.hash,
})
.collect::<Vec<_>>();
let mut authorizations = HashMap::with_capacity(packages.len());
match auth_method {
AuthMethod::Interactive => {
let verifier = uuid::Uuid::new_v4().to_string();
let challenge = BASE64_STANDARD.encode(sha2::Sha256::digest(&verifier));
let response = client
.post_json(
format!("{}authorizations", registry_url).parse()?,
&serde_json::json!({
"challenge": challenge,
"permissions": permissions,
}),
)?
.send()
.await
.context("Failed to create interactive authorization")?;
let auth = registry::parse_response::<
registry::CreateAuthorizationResponse,
>(response)
.await
.context("Failed to create interactive authorization")?;
let auth_url = format!("{}?code={}", auth.verification_url, auth.code);
let pkgs_text = if packages.len() > 1 {
format!("{} packages", packages.len())
} else {
format!("@{}/{}", packages[0].scope, packages[0].package)
};
log::warn!(
"Visit {} to authorize publishing of {}",
colors::cyan(&auth_url),
pkgs_text,
);
ring_bell();
log::info!("{}", colors::gray("Waiting..."));
let _ = open::that_detached(&auth_url);
let interval = std::time::Duration::from_secs(auth.poll_interval);
loop {
tokio::time::sleep(interval).await;
let response = client
.post_json(
format!("{}authorizations/exchange", registry_url).parse()?,
&serde_json::json!({
"exchangeToken": auth.exchange_token,
"verifier": verifier,
}),
)?
.send()
.await
.context("Failed to exchange authorization")?;
let res = registry::parse_response::<
registry::ExchangeAuthorizationResponse,
>(response)
.await;
match res {
Ok(res) => {
log::info!(
"{} {} {}",
colors::green("Authorization successful."),
colors::gray("Authenticated as"),
colors::cyan(res.user.name)
);
let authorization: Rc<str> = format!("Bearer {}", res.token).into();
for pkg in packages {
authorizations.insert(
(pkg.scope.clone(), pkg.package.clone(), pkg.version.clone()),
authorization.clone(),
);
}
break;
}
Err(err) => {
if err.code == "authorizationPending" {
continue;
} else {
return Err(err).context("Failed to exchange authorization");
}
}
}
}
}
AuthMethod::Token(token) => {
let authorization: Rc<str> = format!("Bearer {}", token).into();
for pkg in packages {
authorizations.insert(
(pkg.scope.clone(), pkg.package.clone(), pkg.version.clone()),
authorization.clone(),
);
}
}
AuthMethod::Oidc(oidc_config) => {
let mut chunked_packages = packages.chunks(16);
for permissions in permissions.chunks(16) {
let audience = json!({ "permissions": permissions }).to_string();
let url = format!(
"{}&audience={}",
oidc_config.url,
percent_encoding::percent_encode(
audience.as_bytes(),
percent_encoding::NON_ALPHANUMERIC
)
);
let response = client
.get(url.parse()?)?
.header(
http::header::AUTHORIZATION,
format!("Bearer {}", oidc_config.token).parse()?,
)
.send()
.await
.context("Failed to get OIDC token")?;
let status = response.status();
let text = crate::http_util::body_to_string(response)
.await
.with_context(|| {
format!("Failed to get OIDC token: status {}", status)
})?;
if !status.is_success() {
bail!(
"Failed to get OIDC token: status {}, response: '{}'",
status,
text
);
}
let registry::OidcTokenResponse { value } = serde_json::from_str(&text)
.with_context(|| {
format!(
"Failed to parse OIDC token: '{}' (status {})",
text, status
)
})?;
let authorization: Rc<str> = format!("githuboidc {}", value).into();
for pkg in chunked_packages.next().unwrap() {
authorizations.insert(
(pkg.scope.clone(), pkg.package.clone(), pkg.version.clone()),
authorization.clone(),
);
}
}
}
};
Ok(authorizations)
}
#[derive(Debug)]
struct CreatePackageInfo {
scope: String,
package: String,
create_url: String,
}
/// Check if both `scope` and `package` already exist, if not return
/// a URL to the management panel to create them.
async fn check_if_scope_and_package_exist(
client: &HttpClient,
registry_api_url: &Url,
registry_manage_url: &Url,
scope: &str,
package: &str,
) -> Result<Option<CreatePackageInfo>, AnyError> {
let response =
registry::get_package(client, registry_api_url, scope, package).await?;
if response.status() == 404 {
let create_url = format!(
"{}new?scope={}&package={}&from=cli",
registry_manage_url, scope, package
);
Ok(Some(CreatePackageInfo {
create_url,
scope: scope.to_string(),
package: package.to_string(),
}))
} else {
Ok(None)
}
}
async fn ensure_scopes_and_packages_exist(
client: &HttpClient,
registry_api_url: &Url,
registry_manage_url: &Url,
packages: &[Rc<PreparedPublishPackage>],
) -> Result<(), AnyError> {
let mut futures = FuturesUnordered::new();
for package in packages {
let future = check_if_scope_and_package_exist(
client,
registry_api_url,
registry_manage_url,
&package.scope,
&package.package,
);
futures.push(future);
}
let mut missing_packages = vec![];
while let Some(maybe_create_package_info) = futures.next().await {
if let Some(create_package_info) = maybe_create_package_info? {
missing_packages.push(create_package_info);
};
}
if !std::io::stdin().is_terminal() {
let missing_packages_lines: Vec<_> = missing_packages
.into_iter()
.map(|info| format!("- {}", info.create_url))
.collect();
if !missing_packages_lines.is_empty() {
bail!(
"Following packages don't exist, follow the links and create them:\n{}",
missing_packages_lines.join("\n")
);
}
return Ok(());
}
for create_package_info in missing_packages {
ring_bell();
log::warn!(
"'@{}/{}' doesn't exist yet. Visit {} to create the package",
&create_package_info.scope,
&create_package_info.package,
colors::cyan_with_underline(&create_package_info.create_url)
);
log::warn!("{}", colors::gray("Waiting..."));
let _ = open::that_detached(&create_package_info.create_url);
let package_api_url = registry::get_package_api_url(
registry_api_url,
&create_package_info.scope,
&create_package_info.package,
);
loop {
tokio::time::sleep(std::time::Duration::from_secs(3)).await;
let response = client.get(package_api_url.parse()?)?.send().await?;
if response.status() == 200 {
let name = format!(
"@{}/{}",
create_package_info.scope, create_package_info.package
);
log::info!("Package {} created", colors::green(name));
break;
}
}
}
Ok(())
}
async fn perform_publish(
http_client: &HttpClient,
mut publish_order_graph: PublishOrderGraph,
mut prepared_package_by_name: HashMap<String, Rc<PreparedPublishPackage>>,
auth_method: AuthMethod,
provenance: bool,
) -> Result<(), AnyError> {
let registry_api_url = jsr_api_url();
let registry_url = jsr_url();
let packages = prepared_package_by_name
.values()
.cloned()
.collect::<Vec<_>>();
ensure_scopes_and_packages_exist(
http_client,
registry_api_url,
registry_url,
&packages,
)
.await?;
let mut authorizations =
get_auth_headers(http_client, registry_api_url, &packages, auth_method)
.await?;
assert_eq!(prepared_package_by_name.len(), authorizations.len());
let mut futures: FuturesUnordered<LocalBoxFuture<Result<String, AnyError>>> =
Default::default();
loop {
let next_batch = publish_order_graph.next();
for package_name in next_batch {
let package = prepared_package_by_name.remove(&package_name).unwrap();
// todo(dsherret): output something that looks better than this even not in debug
if log::log_enabled!(log::Level::Debug) {
log::debug!("Publishing {}", package.display_name());
for file in &package.tarball.files {
log::debug!(
" Tarball file {} {}",
human_size(file.size as f64),
file.specifier
);
}
}
let authorization = authorizations
.remove(&(
package.scope.clone(),
package.package.clone(),
package.version.clone(),
))
.unwrap();
futures.push(
async move {
let display_name = package.display_name();
Box::pin(publish_package(
http_client,
package,
registry_api_url,
registry_url,
&authorization,
provenance,
))
.await
.with_context(|| format!("Failed to publish {}", display_name))?;
Ok(package_name)
}
.boxed_local(),
);
}
let Some(result) = futures.next().await else {
// done, ensure no circular dependency
publish_order_graph.ensure_no_pending()?;
break;
};
let package_name = result?;
publish_order_graph.finish_package(&package_name);
}
Ok(())
}
async fn publish_package(
http_client: &HttpClient,
package: Rc<PreparedPublishPackage>,
registry_api_url: &Url,
registry_url: &Url,
authorization: &str,
provenance: bool,
) -> Result<(), AnyError> {
log::info!(
"{} @{}/{}@{} ...",
colors::intense_blue("Publishing"),
package.scope,
package.package,
package.version
);
let url = format!(
"{}scopes/{}/packages/{}/versions/{}?config=/{}",
registry_api_url,
package.scope,
package.package,
package.version,
package.config
);
let body = deno_fetch::ReqBody::full(package.tarball.bytes.clone());
let response = http_client
.post(url.parse()?, body)?
.header(
http::header::AUTHORIZATION,
authorization.parse().map_err(http::Error::from)?,
)
.header(
http::header::CONTENT_ENCODING,
"gzip".parse().map_err(http::Error::from)?,
)
.send()
.await?;
let res =
registry::parse_response::<registry::PublishingTask>(response).await;
let mut task = match res {
Ok(task) => task,
Err(mut err) if err.code == "duplicateVersionPublish" => {
let task = serde_json::from_value::<registry::PublishingTask>(
err.data.get_mut("task").unwrap().take(),
)
.unwrap();
if task.status == "success" {
log::info!(
"{} @{}/{}@{}",
colors::yellow("Warning: Skipping, already published"),
package.scope,
package.package,
package.version
);
return Ok(());
}
log::info!(
"{} @{}/{}@{}",
colors::yellow("Already uploaded, waiting for publishing"),
package.scope,
package.package,
package.version
);
task
}
Err(err) => {
return Err(err).with_context(|| {
format!(
"Failed to publish @{}/{} at {}",
package.scope, package.package, package.version
)
});
}
};
let interval = std::time::Duration::from_secs(2);
while task.status != "success" && task.status != "failure" {
tokio::time::sleep(interval).await;
let resp = http_client
.get(format!("{}publish_status/{}", registry_api_url, task.id).parse()?)?
.send()
.await
.with_context(|| {
format!(
"Failed to get publishing status for @{}/{} at {}",
package.scope, package.package, package.version
)
})?;
task = registry::parse_response::<registry::PublishingTask>(resp)
.await
.with_context(|| {
format!(
"Failed to get publishing status for @{}/{} at {}",
package.scope, package.package, package.version
)
})?;
}
if let Some(error) = task.error {
bail!(
"{} @{}/{} at {}: {}",
colors::red("Failed to publish"),
package.scope,
package.package,
package.version,
error.message
);
}
let enable_provenance = std::env::var("DISABLE_JSR_PROVENANCE").is_err()
&& (auth::is_gha() && auth::gha_oidc_token().is_some() && provenance);
// Enable provenance by default on Github actions with OIDC token
if enable_provenance {
// Get the version manifest from the registry
let meta_url = jsr_url().join(&format!(
"@{}/{}/{}_meta.json",
package.scope, package.package, package.version
))?;
let resp = http_client.get(meta_url)?.send().await?;
let meta_bytes = resp.collect().await?.to_bytes();
if std::env::var("DISABLE_JSR_MANIFEST_VERIFICATION_FOR_TESTING").is_err() {
verify_version_manifest(&meta_bytes, &package)?;
}
let subject = provenance::Subject {
name: format!(
"pkg:jsr/@{}/{}@{}",
package.scope, package.package, package.version
),
digest: provenance::SubjectDigest {
sha256: faster_hex::hex_string(&sha2::Sha256::digest(&meta_bytes)),
},
};
let bundle =
Box::pin(provenance::generate_provenance(http_client, vec![subject]))
.await?;
let tlog_entry = &bundle.verification_material.tlog_entries[0];
log::info!(
"{}",
colors::green(format!(
"Provenance transparency log available at https://search.sigstore.dev/?logIndex={}",
tlog_entry.log_index
))
);
// Submit bundle to JSR
let provenance_url = format!(
"{}scopes/{}/packages/{}/versions/{}/provenance",
registry_api_url, package.scope, package.package, package.version
);
http_client
.post_json(provenance_url.parse()?, &json!({ "bundle": bundle }))?
.header(http::header::AUTHORIZATION, authorization.parse()?)
.send()
.await?;
}
log::info!(
"{} @{}/{}@{}",
colors::green("Successfully published"),
package.scope,
package.package,
package.version
);
log::info!(
"{}",
colors::gray(format!(
"Visit {}@{}/{}@{} for details",
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/publish/graph.rs | cli/tools/publish/graph.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashSet;
use std::sync::Arc;
use deno_ast::ParsedSource;
use deno_ast::SourceRangedForSpanned;
use deno_ast::SourceTextInfo;
use deno_ast::swc::common::comments::CommentKind;
use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_graph::ModuleEntryRef;
use deno_graph::ModuleGraph;
use deno_graph::ResolutionResolved;
use deno_graph::WalkOptions;
use deno_resolver::cache::ParsedSourceCache;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
use super::diagnostics::PublishDiagnostic;
use super::diagnostics::PublishDiagnosticsCollector;
pub struct GraphDiagnosticsCollector {
parsed_source_cache: Arc<ParsedSourceCache>,
}
impl GraphDiagnosticsCollector {
pub fn new(parsed_source_cache: Arc<ParsedSourceCache>) -> Self {
Self {
parsed_source_cache,
}
}
pub fn collect_diagnostics_for_graph(
&self,
graph: &ModuleGraph,
diagnostics_collector: &PublishDiagnosticsCollector,
) -> Result<(), AnyError> {
let mut visited = HashSet::new();
let mut skip_specifiers: HashSet<Url> = HashSet::new();
let mut collect_if_invalid =
|skip_specifiers: &mut HashSet<Url>,
source_text: &Arc<str>,
specifier_text: &str,
resolution: &ResolutionResolved| {
if visited.insert(resolution.specifier.clone()) {
match resolution.specifier.scheme() {
"file" | "data" | "node" | "bun" | "virtual" | "cloudflare" => {}
"jsr" => {
skip_specifiers.insert(resolution.specifier.clone());
// check for a missing version constraint
if let Ok(jsr_req_ref) =
JsrPackageReqReference::from_specifier(&resolution.specifier)
&& jsr_req_ref.req().version_req.version_text() == "*"
{
let maybe_version = graph
.packages
.mappings()
.get(jsr_req_ref.req())
.map(|nv| nv.version.clone());
diagnostics_collector.push(
PublishDiagnostic::MissingConstraint {
specifier: resolution.specifier.clone(),
specifier_text: specifier_text.to_string(),
resolved_version: maybe_version,
text_info: SourceTextInfo::new(source_text.clone()),
referrer: resolution.range.clone(),
},
);
}
}
"npm" => {
skip_specifiers.insert(resolution.specifier.clone());
// check for a missing version constraint
if let Ok(jsr_req_ref) =
NpmPackageReqReference::from_specifier(&resolution.specifier)
&& jsr_req_ref.req().version_req.version_text() == "*"
{
let maybe_version = graph
.get(&resolution.specifier)
.and_then(|m| m.npm())
.map(|n| {
// TODO(dsherret): ok to use for now, but we should use the req in the future
#[allow(deprecated)]
let nv = n.nv_reference.nv();
nv.version.clone()
});
diagnostics_collector.push(
PublishDiagnostic::MissingConstraint {
specifier: resolution.specifier.clone(),
specifier_text: specifier_text.to_string(),
resolved_version: maybe_version,
text_info: SourceTextInfo::new(source_text.clone()),
referrer: resolution.range.clone(),
},
);
}
}
"http" | "https" => {
skip_specifiers.insert(resolution.specifier.clone());
diagnostics_collector.push(
PublishDiagnostic::InvalidExternalImport {
kind: format!("non-JSR '{}'", resolution.specifier.scheme()),
text_info: SourceTextInfo::new(source_text.clone()),
imported: resolution.specifier.clone(),
referrer: resolution.range.clone(),
},
);
}
_ => {
skip_specifiers.insert(resolution.specifier.clone());
diagnostics_collector.push(
PublishDiagnostic::InvalidExternalImport {
kind: format!("'{}'", resolution.specifier.scheme()),
text_info: SourceTextInfo::new(source_text.clone()),
imported: resolution.specifier.clone(),
referrer: resolution.range.clone(),
},
);
}
}
}
};
let options = WalkOptions {
check_js: deno_graph::CheckJsOption::True,
follow_dynamic: true,
// search the entire graph and not just the fast check subset
prefer_fast_check_graph: false,
kind: deno_graph::GraphKind::All,
};
let mut iter = graph.walk(graph.roots.iter(), options);
while let Some((specifier, entry)) = iter.next() {
if skip_specifiers.contains(specifier) {
iter.skip_previous_dependencies();
continue;
}
let ModuleEntryRef::Module(module) = entry else {
continue;
};
let Some(module) = module.js() else {
continue;
};
let parsed_source = self
.parsed_source_cache
.get_parsed_source_from_js_module(module)?;
// surface syntax errors
for diagnostic in parsed_source.diagnostics() {
diagnostics_collector
.push(PublishDiagnostic::SyntaxError(diagnostic.clone()));
}
check_for_banned_triple_slash_directives(
&parsed_source,
diagnostics_collector,
);
for (specifier_text, dep) in &module.dependencies {
for asset_import in
dep.imports.iter().filter(|i| i.attributes.has_asset())
{
diagnostics_collector.push(PublishDiagnostic::UnstableRawImport {
text_info: parsed_source.text_info_lazy().clone(),
referrer: asset_import.specifier_range.clone(),
});
}
if let Some(resolved) = dep.maybe_code.ok() {
collect_if_invalid(
&mut skip_specifiers,
&module.source.text,
specifier_text,
resolved,
);
}
if let Some(resolved) = dep.maybe_type.ok() {
collect_if_invalid(
&mut skip_specifiers,
&module.source.text,
specifier_text,
resolved,
);
}
}
}
Ok(())
}
}
fn check_for_banned_triple_slash_directives(
parsed_source: &ParsedSource,
diagnostics_collector: &PublishDiagnosticsCollector,
) {
let triple_slash_re = lazy_regex::regex!(
r#"^/\s+<reference\s+(no-default-lib\s*=\s*"true"|lib\s*=\s*("[^"]+"|'[^']+'))\s*/>\s*$"#
);
let Some(comments) = parsed_source.get_leading_comments() else {
return;
};
for comment in comments {
if comment.kind != CommentKind::Line {
continue;
}
if triple_slash_re.is_match(&comment.text) {
diagnostics_collector.push(
PublishDiagnostic::BannedTripleSlashDirectives {
specifier: parsed_source.specifier().clone(),
range: comment.range(),
text_info: parsed_source.text_info_lazy().clone(),
},
);
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/publish/unfurl.rs | cli/tools/publish/unfurl.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::sync::Arc;
use deno_ast::ParsedSource;
use deno_ast::SourcePos;
use deno_ast::SourceRange;
use deno_ast::SourceRangedForSpanned;
use deno_ast::SourceTextInfo;
use deno_ast::SourceTextProvider;
use deno_ast::TextChange;
use deno_ast::diagnostics::Diagnostic;
use deno_ast::diagnostics::DiagnosticLevel;
use deno_ast::diagnostics::DiagnosticLocation;
use deno_ast::diagnostics::DiagnosticSnippet;
use deno_ast::diagnostics::DiagnosticSnippetHighlight;
use deno_ast::diagnostics::DiagnosticSnippetHighlightStyle;
use deno_ast::diagnostics::DiagnosticSourcePos;
use deno_ast::diagnostics::DiagnosticSourceRange;
use deno_ast::swc::ast::Callee;
use deno_ast::swc::ast::Expr;
use deno_ast::swc::ast::Lit;
use deno_ast::swc::ast::MemberProp;
use deno_ast::swc::ast::MetaPropKind;
use deno_ast::swc::atoms::Atom;
use deno_ast::swc::ecma_visit::Visit;
use deno_ast::swc::ecma_visit::VisitWith;
use deno_ast::swc::ecma_visit::noop_visit_type;
use deno_core::ModuleSpecifier;
use deno_core::anyhow;
use deno_graph::analysis::DependencyDescriptor;
use deno_graph::analysis::DynamicTemplatePart;
use deno_graph::analysis::StaticDependencyKind;
use deno_graph::analysis::TypeScriptReference;
use deno_package_json::PackageJsonDepValue;
use deno_package_json::PackageJsonDepWorkspaceReq;
use deno_resolver::workspace::MappedResolution;
use deno_resolver::workspace::PackageJsonDepResolution;
use deno_resolver::workspace::WorkspaceResolver;
use deno_runtime::deno_node::is_builtin_node_module;
use deno_semver::Version;
use deno_semver::VersionReq;
use sys_traits::FsMetadata;
use sys_traits::FsRead;
use crate::node::CliNodeResolver;
use crate::sys::CliSys;
#[derive(Debug, Clone)]
pub enum SpecifierUnfurlerDiagnostic {
UnanalyzableDynamicImport {
specifier: ModuleSpecifier,
text_info: SourceTextInfo,
range: SourceRange,
},
UnanalyzableImportMetaResolve {
specifier: ModuleSpecifier,
text_info: SourceTextInfo,
range: SourceRange,
},
ResolvingNpmWorkspacePackage {
specifier: ModuleSpecifier,
package_name: String,
text_info: SourceTextInfo,
range: SourceRange,
reason: String,
},
UnsupportedPkgJsonFileSpecifier {
specifier: ModuleSpecifier,
text_info: SourceTextInfo,
range: SourceRange,
package_name: String,
},
UnsupportedPkgJsonJsrSpecifier {
specifier: ModuleSpecifier,
text_info: SourceTextInfo,
range: SourceRange,
package_name: String,
},
}
impl Diagnostic for SpecifierUnfurlerDiagnostic {
fn level(&self) -> DiagnosticLevel {
match self {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport { .. } => {
DiagnosticLevel::Warning
}
SpecifierUnfurlerDiagnostic::UnanalyzableImportMetaResolve { .. } => {
DiagnosticLevel::Warning
}
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage { .. } => {
DiagnosticLevel::Error
}
SpecifierUnfurlerDiagnostic::UnsupportedPkgJsonFileSpecifier {
..
} => DiagnosticLevel::Error,
SpecifierUnfurlerDiagnostic::UnsupportedPkgJsonJsrSpecifier {
..
} => DiagnosticLevel::Error,
}
}
fn code(&self) -> Cow<'_, str> {
match self {
Self::UnanalyzableDynamicImport { .. } => "unanalyzable-dynamic-import",
Self::UnanalyzableImportMetaResolve { .. } => {
"unanalyzable-import-meta-resolve"
}
Self::ResolvingNpmWorkspacePackage { .. } => "npm-workspace-package",
Self::UnsupportedPkgJsonFileSpecifier { .. } => {
"unsupported-file-specifier"
}
Self::UnsupportedPkgJsonJsrSpecifier { .. } => {
"unsupported-jsr-specifier"
}
}
.into()
}
fn message(&self) -> Cow<'_, str> {
match self {
Self::UnanalyzableDynamicImport { .. } => {
"unable to analyze dynamic import".into()
}
Self::UnanalyzableImportMetaResolve { .. } => {
"unable to analyze import.meta.resolve".into()
}
Self::ResolvingNpmWorkspacePackage {
package_name,
reason,
..
} => format!(
"failed resolving npm workspace package '{}': {}",
package_name, reason
)
.into(),
Self::UnsupportedPkgJsonFileSpecifier { package_name, .. } => format!(
"unsupported package.json file specifier for '{}'",
package_name
)
.into(),
Self::UnsupportedPkgJsonJsrSpecifier { package_name, .. } => format!(
"unsupported package.json JSR specifier for '{}'",
package_name
)
.into(),
}
}
fn location(&self) -> deno_ast::diagnostics::DiagnosticLocation<'_> {
match self {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport {
specifier,
text_info,
range,
} => DiagnosticLocation::ModulePosition {
specifier: Cow::Borrowed(specifier),
text_info: Cow::Borrowed(text_info),
source_pos: DiagnosticSourcePos::SourcePos(range.start),
},
SpecifierUnfurlerDiagnostic::UnanalyzableImportMetaResolve {
specifier,
text_info,
range,
} => DiagnosticLocation::ModulePosition {
specifier: Cow::Borrowed(specifier),
text_info: Cow::Borrowed(text_info),
source_pos: DiagnosticSourcePos::SourcePos(range.start),
},
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage {
specifier,
text_info,
range,
..
} => DiagnosticLocation::ModulePosition {
specifier: Cow::Borrowed(specifier),
text_info: Cow::Borrowed(text_info),
source_pos: DiagnosticSourcePos::SourcePos(range.start),
},
SpecifierUnfurlerDiagnostic::UnsupportedPkgJsonFileSpecifier {
specifier,
text_info,
range,
..
} => DiagnosticLocation::ModulePosition {
specifier: Cow::Borrowed(specifier),
text_info: Cow::Borrowed(text_info),
source_pos: DiagnosticSourcePos::SourcePos(range.start),
},
SpecifierUnfurlerDiagnostic::UnsupportedPkgJsonJsrSpecifier {
specifier,
text_info,
range,
..
} => DiagnosticLocation::ModulePosition {
specifier: Cow::Borrowed(specifier),
text_info: Cow::Borrowed(text_info),
source_pos: DiagnosticSourcePos::SourcePos(range.start),
},
}
}
fn snippet(&self) -> Option<deno_ast::diagnostics::DiagnosticSnippet<'_>> {
match self {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport {
text_info,
range,
..
} => Some(DiagnosticSnippet {
source: Cow::Borrowed(text_info),
highlights: vec![DiagnosticSnippetHighlight {
style: DiagnosticSnippetHighlightStyle::Warning,
range: DiagnosticSourceRange {
start: DiagnosticSourcePos::SourcePos(range.start),
end: DiagnosticSourcePos::SourcePos(range.end),
},
description: Some("the unanalyzable dynamic import".into()),
}],
}),
SpecifierUnfurlerDiagnostic::UnanalyzableImportMetaResolve {
text_info,
range,
..
} => Some(DiagnosticSnippet {
source: Cow::Borrowed(text_info),
highlights: vec![DiagnosticSnippetHighlight {
style: DiagnosticSnippetHighlightStyle::Warning,
range: DiagnosticSourceRange {
start: DiagnosticSourcePos::SourcePos(range.start),
end: DiagnosticSourcePos::SourcePos(range.end),
},
description: Some("the unanalyzable import.meta.resolve call".into()),
}],
}),
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage {
text_info,
range,
..
} => Some(DiagnosticSnippet {
source: Cow::Borrowed(text_info),
highlights: vec![DiagnosticSnippetHighlight {
style: DiagnosticSnippetHighlightStyle::Warning,
range: DiagnosticSourceRange {
start: DiagnosticSourcePos::SourcePos(range.start),
end: DiagnosticSourcePos::SourcePos(range.end),
},
description: Some("the unresolved import".into()),
}],
}),
SpecifierUnfurlerDiagnostic::UnsupportedPkgJsonFileSpecifier {
text_info,
range,
..
} => Some(DiagnosticSnippet {
source: Cow::Borrowed(text_info),
highlights: vec![DiagnosticSnippetHighlight {
style: DiagnosticSnippetHighlightStyle::Warning,
range: DiagnosticSourceRange {
start: DiagnosticSourcePos::SourcePos(range.start),
end: DiagnosticSourcePos::SourcePos(range.end),
},
description: Some("the import".into()),
}],
}),
SpecifierUnfurlerDiagnostic::UnsupportedPkgJsonJsrSpecifier {
text_info,
range,
..
} => Some(DiagnosticSnippet {
source: Cow::Borrowed(text_info),
highlights: vec![DiagnosticSnippetHighlight {
style: DiagnosticSnippetHighlightStyle::Warning,
range: DiagnosticSourceRange {
start: DiagnosticSourcePos::SourcePos(range.start),
end: DiagnosticSourcePos::SourcePos(range.end),
},
description: Some("the import".into()),
}],
}),
}
}
fn hint(&self) -> Option<Cow<'_, str>> {
match self {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport { .. } | SpecifierUnfurlerDiagnostic::UnanalyzableImportMetaResolve { .. } => {
None
}
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage { .. } => Some(
"make sure the npm workspace package is resolvable and has a version field in its package.json".into()
),
SpecifierUnfurlerDiagnostic::UnsupportedPkgJsonFileSpecifier { .. } => Some(
"change the package dependency to point to something on npm instead".into()
),
SpecifierUnfurlerDiagnostic::UnsupportedPkgJsonJsrSpecifier { .. } => Some(
"move the JSR package dependency to deno.json instead".into()
),
}
}
fn snippet_fixed(
&self,
) -> Option<deno_ast::diagnostics::DiagnosticSnippet<'_>> {
None
}
fn info(&self) -> Cow<'_, [Cow<'_, str>]> {
match self {
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport { .. } => {
Cow::Borrowed(&[
Cow::Borrowed(
"after publishing this package, imports from the local import map / package.json do not work",
),
Cow::Borrowed(
"dynamic imports that can not be analyzed at publish time will not be rewritten automatically",
),
Cow::Borrowed(
"make sure the dynamic import is resolvable at runtime without an import map / package.json",
),
])
}
SpecifierUnfurlerDiagnostic::UnanalyzableImportMetaResolve { .. } => {
Cow::Borrowed(&[
Cow::Borrowed(
"after publishing this package, import.meta.resolve calls from the local import map / package.json do not work",
),
Cow::Borrowed(
"import.meta.resolve calls that can not be analyzed at publish time will not be rewritten automatically",
),
Cow::Borrowed(
"make sure the import.meta.resolve call is resolvable at runtime without an import map / package.json",
),
])
}
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage { .. } => {
Cow::Borrowed(&[])
}
SpecifierUnfurlerDiagnostic::UnsupportedPkgJsonFileSpecifier {
..
} => Cow::Borrowed(&[]),
SpecifierUnfurlerDiagnostic::UnsupportedPkgJsonJsrSpecifier {
..
} => Cow::Borrowed(&[]),
}
}
fn docs_url(&self) -> Option<Cow<'_, str>> {
None
}
}
enum UnfurlSpecifierError {
UnsupportedPkgJsonFileSpecifier {
package_name: String,
},
UnsupportedPkgJsonJsrSpecifier {
package_name: String,
},
Workspace {
package_name: String,
reason: String,
},
}
#[derive(Copy, Clone)]
pub enum PositionOrSourceRangeRef<'a> {
PositionRange(&'a deno_graph::PositionRange),
SourceRange(SourceRange<SourcePos>),
}
#[sys_traits::auto_impl]
pub trait SpecifierUnfurlerSys: FsMetadata + FsRead {}
pub struct SpecifierUnfurler<TSys: SpecifierUnfurlerSys = CliSys> {
// optional for testing
node_resolver: Option<Arc<CliNodeResolver>>,
workspace_resolver: Arc<WorkspaceResolver<TSys>>,
bare_node_builtins: bool,
}
impl<TSys: SpecifierUnfurlerSys> SpecifierUnfurler<TSys> {
pub fn new(
node_resolver: Option<Arc<CliNodeResolver>>,
workspace_resolver: Arc<WorkspaceResolver<TSys>>,
bare_node_builtins: bool,
) -> Self {
debug_assert_eq!(
workspace_resolver.pkg_json_dep_resolution(),
PackageJsonDepResolution::Enabled
);
Self {
node_resolver,
workspace_resolver,
bare_node_builtins,
}
}
pub fn unfurl_specifier_reporting_diagnostic(
&self,
referrer: &ModuleSpecifier,
specifier: &str,
resolution_kind: deno_resolver::workspace::ResolutionKind,
text_info: &SourceTextInfo,
range: PositionOrSourceRangeRef<'_>,
diagnostic_reporter: &mut dyn FnMut(SpecifierUnfurlerDiagnostic),
) -> Option<String> {
match self.unfurl_specifier(referrer, specifier, resolution_kind) {
Ok(maybe_unfurled) => maybe_unfurled,
Err(diagnostic) => {
let range = match range {
PositionOrSourceRangeRef::PositionRange(position_range) => {
let range = to_range(text_info, position_range);
SourceRange::new(
text_info.start_pos() + range.start,
text_info.start_pos() + range.end,
)
}
PositionOrSourceRangeRef::SourceRange(source_range) => source_range,
};
match diagnostic {
UnfurlSpecifierError::UnsupportedPkgJsonFileSpecifier {
package_name,
} => {
diagnostic_reporter(
SpecifierUnfurlerDiagnostic::UnsupportedPkgJsonFileSpecifier {
specifier: referrer.clone(),
package_name,
text_info: text_info.clone(),
range,
},
);
None
}
UnfurlSpecifierError::UnsupportedPkgJsonJsrSpecifier {
package_name,
} => {
diagnostic_reporter(
SpecifierUnfurlerDiagnostic::UnsupportedPkgJsonJsrSpecifier {
specifier: referrer.clone(),
package_name,
text_info: text_info.clone(),
range,
},
);
None
}
UnfurlSpecifierError::Workspace {
package_name,
reason,
} => {
diagnostic_reporter(
SpecifierUnfurlerDiagnostic::ResolvingNpmWorkspacePackage {
specifier: referrer.clone(),
package_name,
text_info: text_info.clone(),
range,
reason,
},
);
None
}
}
}
}
}
fn unfurl_specifier(
&self,
referrer: &ModuleSpecifier,
specifier: &str,
resolution_kind: deno_resolver::workspace::ResolutionKind,
) -> Result<Option<String>, UnfurlSpecifierError> {
let resolved = match self.workspace_resolver.resolve(
specifier,
referrer,
resolution_kind,
) {
Ok(resolved) => {
match resolved {
MappedResolution::Normal { specifier, .. } => Some(specifier),
MappedResolution::WorkspaceJsrPackage { pkg_req_ref, .. } => {
Some(ModuleSpecifier::parse(&pkg_req_ref.to_string()).unwrap())
}
MappedResolution::WorkspaceNpmPackage {
target_pkg_json: pkg_json,
pkg_name,
sub_path,
} => {
// todo(#24612): consider warning or error when this is also a jsr package?
ModuleSpecifier::parse(&format!(
"npm:{}{}{}",
pkg_name,
pkg_json
.version
.as_ref()
.map(|v| format!("@^{}", v))
.unwrap_or_default(),
sub_path
.as_ref()
.map(|s| format!("/{}", s))
.unwrap_or_default()
))
.ok()
}
MappedResolution::PackageJson {
alias,
sub_path,
dep_result,
..
} => match dep_result {
Ok(dep) => match dep {
PackageJsonDepValue::File(_) => {
return Err(
UnfurlSpecifierError::UnsupportedPkgJsonFileSpecifier {
package_name: alias.to_string(),
},
);
}
PackageJsonDepValue::JsrReq(_) => {
return Err(
UnfurlSpecifierError::UnsupportedPkgJsonJsrSpecifier {
package_name: alias.to_string(),
},
);
}
PackageJsonDepValue::Req(pkg_req) => {
// todo(#24612): consider warning or error when this is an npm workspace
// member that's also a jsr package?
ModuleSpecifier::parse(&format!(
"npm:{}{}",
pkg_req,
sub_path
.as_ref()
.map(|s| format!("/{}", s))
.unwrap_or_default()
))
.ok()
}
PackageJsonDepValue::Workspace(workspace_version_req) => {
let version_req = match workspace_version_req {
PackageJsonDepWorkspaceReq::VersionReq(version_req) => {
Cow::Borrowed(version_req)
}
PackageJsonDepWorkspaceReq::Caret => {
let version = self
.find_workspace_npm_dep_version(alias)
.map_err(|err| UnfurlSpecifierError::Workspace {
package_name: alias.to_string(),
reason: err.to_string(),
})?;
// version was validated, so ok to unwrap
Cow::Owned(
VersionReq::parse_from_npm(&format!("^{}", version))
.unwrap(),
)
}
PackageJsonDepWorkspaceReq::Tilde => {
let version = self
.find_workspace_npm_dep_version(alias)
.map_err(|err| UnfurlSpecifierError::Workspace {
package_name: alias.to_string(),
reason: err.to_string(),
})?;
// version was validated, so ok to unwrap
Cow::Owned(
VersionReq::parse_from_npm(&format!("~{}", version))
.unwrap(),
)
}
};
// todo(#24612): warn when this is also a jsr package telling
// people to map the specifiers in the import map
ModuleSpecifier::parse(&format!(
"npm:{}@{}{}",
alias,
version_req,
sub_path
.as_ref()
.map(|s| format!("/{}", s))
.unwrap_or_default()
))
.ok()
}
},
Err(err) => {
log::warn!(
"Ignoring failed to resolve package.json dependency. {:#}",
err
);
None
}
},
MappedResolution::PackageJsonImport { pkg_json } => {
self.node_resolver.as_ref().and_then(|resolver| {
resolver
.resolve_package_import(
specifier,
Some(&node_resolver::UrlOrPathRef::from_url(referrer)),
Some(pkg_json),
node_resolver::ResolutionMode::Import,
node_resolver::NodeResolutionKind::Execution,
)
.ok()
.and_then(|s| s.into_url().ok())
})
}
}
}
Err(_) => None,
};
let resolved = match resolved {
Some(resolved) => resolved,
None if self.bare_node_builtins && is_builtin_node_module(specifier) => {
format!("node:{specifier}").parse().unwrap()
}
None => match ModuleSpecifier::options()
.base_url(Some(referrer))
.parse(specifier)
.ok()
{
Some(value) => value,
None => return Ok(None),
},
};
// TODO(lucacasonato): this requires integration in deno_graph first
// let resolved = if let Ok(specifier) =
// NpmPackageReqReference::from_specifier(&resolved)
// {
// if let Some(scope_name) = specifier.req().name.strip_prefix("@jsr/") {
// let (scope, name) = scope_name.split_once("__")?;
// let new_specifier = JsrPackageReqReference::new(PackageReqReference {
// req: PackageReq {
// name: format!("@{scope}/{name}"),
// version_req: specifier.req().version_req.clone(),
// },
// sub_path: specifier.sub_path().map(ToOwned::to_owned),
// })
// .to_string();
// ModuleSpecifier::parse(&new_specifier).unwrap()
// } else {
// resolved
// }
// } else {
// resolved
// };
let relative_resolved = relative_url(&resolved, referrer);
if relative_resolved == specifier {
Ok(None) // nothing to unfurl
} else {
log::debug!(
"Unfurled specifier: {} from {} -> {}",
specifier,
referrer,
relative_resolved
);
Ok(Some(relative_resolved))
}
}
fn find_workspace_npm_dep_version(
&self,
pkg_name: &str,
) -> Result<Version, anyhow::Error> {
// todo(#24612): warn when this is also a jsr package telling
// people to map the specifiers in the import map
let pkg_json = self
.workspace_resolver
.package_jsons()
.find(|pkg| pkg.name.as_deref() == Some(pkg_name))
.ok_or_else(|| {
anyhow::anyhow!("unable to find npm package in workspace")
})?;
if let Some(version) = &pkg_json.version {
Ok(Version::parse_from_npm(version)?)
} else {
Err(anyhow::anyhow!(
"missing version in package.json of npm package",
))
}
}
/// Attempts to unfurl the dynamic dependency returning `true` on success
/// or `false` when the import was not analyzable.
fn try_unfurl_dynamic_dep(
&self,
module_url: &ModuleSpecifier,
text_info: &SourceTextInfo,
dep: &deno_graph::analysis::DynamicDependencyDescriptor,
text_changes: &mut Vec<deno_ast::TextChange>,
diagnostic_reporter: &mut dyn FnMut(SpecifierUnfurlerDiagnostic),
) -> bool {
match &dep.argument {
deno_graph::analysis::DynamicArgument::String(specifier) => {
let range = to_range(text_info, &dep.argument_range);
let maybe_relative_index =
text_info.text_str()[range.start..range.end].find(specifier);
let Some(relative_index) = maybe_relative_index else {
return true; // always say it's analyzable for a string
};
let maybe_unfurled = self.unfurl_specifier_reporting_diagnostic(
module_url,
specifier,
deno_resolver::workspace::ResolutionKind::Execution, // dynamic imports are always execution
text_info,
PositionOrSourceRangeRef::PositionRange(&dep.argument_range),
diagnostic_reporter,
);
if let Some(unfurled) = maybe_unfurled {
let start = range.start + relative_index;
text_changes.push(deno_ast::TextChange {
range: start..start + specifier.len(),
new_text: unfurled,
});
}
true
}
deno_graph::analysis::DynamicArgument::Template(parts) => {
match parts.first() {
Some(DynamicTemplatePart::String { value: specifier }) => {
// relative doesn't need to be modified
let is_relative =
specifier.starts_with("./") || specifier.starts_with("../");
if is_relative {
return true;
}
if !specifier.ends_with('/') {
return false;
}
let unfurled = self.unfurl_specifier_reporting_diagnostic(
module_url,
specifier,
deno_resolver::workspace::ResolutionKind::Execution, // dynamic imports are always execution
text_info,
PositionOrSourceRangeRef::PositionRange(&dep.argument_range),
diagnostic_reporter,
);
let Some(unfurled) = unfurled else {
return true; // nothing to unfurl
};
let range = to_range(text_info, &dep.argument_range);
let maybe_relative_index =
text_info.text_str()[range.start..].find(specifier);
let Some(relative_index) = maybe_relative_index else {
return false;
};
let start = range.start + relative_index;
text_changes.push(deno_ast::TextChange {
range: start..start + specifier.len(),
new_text: unfurled,
});
true
}
Some(DynamicTemplatePart::Expr) => {
false // failed analyzing
}
None => {
true // ignore
}
}
}
deno_graph::analysis::DynamicArgument::Expr => {
false // failed analyzing
}
}
}
pub fn unfurl_to_changes(
&self,
url: &ModuleSpecifier,
parsed_source: &ParsedSource,
module_info: &deno_graph::analysis::ModuleInfo,
text_changes: &mut Vec<TextChange>,
diagnostic_reporter: &mut dyn FnMut(SpecifierUnfurlerDiagnostic),
) {
let text_info = parsed_source.text_info_lazy();
let analyze_specifier =
|specifier: &str,
range: PositionOrSourceRangeRef,
resolution_kind: deno_resolver::workspace::ResolutionKind,
text_changes: &mut Vec<deno_ast::TextChange>,
diagnostic_reporter: &mut dyn FnMut(SpecifierUnfurlerDiagnostic)| {
if let Some(unfurled) = self.unfurl_specifier_reporting_diagnostic(
url,
specifier,
resolution_kind,
text_info,
range,
diagnostic_reporter,
) {
text_changes.push(deno_ast::TextChange {
range: match range {
PositionOrSourceRangeRef::PositionRange(position_range) => {
to_range(text_info, position_range)
}
PositionOrSourceRangeRef::SourceRange(source_range) => {
source_range.as_byte_range(parsed_source.start_pos())
}
},
new_text: unfurled,
});
}
};
for dep in &module_info.dependencies {
match dep {
DependencyDescriptor::Static(dep) => {
let resolution_kind = if parsed_source.media_type().is_declaration() {
deno_resolver::workspace::ResolutionKind::Types
} else {
match dep.kind {
StaticDependencyKind::Export
| StaticDependencyKind::Import
| StaticDependencyKind::ImportSource
| StaticDependencyKind::ExportEquals
| StaticDependencyKind::ImportEquals => {
deno_resolver::workspace::ResolutionKind::Execution
}
StaticDependencyKind::ExportType
| StaticDependencyKind::ImportType
| StaticDependencyKind::MaybeTsModuleAugmentation => {
deno_resolver::workspace::ResolutionKind::Types
}
}
};
analyze_specifier(
&dep.specifier,
PositionOrSourceRangeRef::PositionRange(&dep.specifier_range),
resolution_kind,
text_changes,
diagnostic_reporter,
);
}
DependencyDescriptor::Dynamic(dep) => {
let success = self.try_unfurl_dynamic_dep(
url,
text_info,
dep,
text_changes,
diagnostic_reporter,
);
if !success {
let start_pos = text_info.line_start(dep.argument_range.start.line)
+ dep.argument_range.start.character;
let end_pos = text_info.line_start(dep.argument_range.end.line)
+ dep.argument_range.end.character;
diagnostic_reporter(
SpecifierUnfurlerDiagnostic::UnanalyzableDynamicImport {
specifier: url.to_owned(),
range: SourceRange::new(start_pos, end_pos),
text_info: text_info.clone(),
},
);
}
}
}
}
for ts_ref in &module_info.ts_references {
let specifier_with_range = match ts_ref {
TypeScriptReference::Path(s) => s,
TypeScriptReference::Types { specifier, .. } => specifier,
};
analyze_specifier(
&specifier_with_range.text,
PositionOrSourceRangeRef::PositionRange(&specifier_with_range.range),
deno_resolver::workspace::ResolutionKind::Types,
text_changes,
diagnostic_reporter,
);
}
for jsdoc in &module_info.jsdoc_imports {
analyze_specifier(
&jsdoc.specifier.text,
PositionOrSourceRangeRef::PositionRange(&jsdoc.specifier.range),
deno_resolver::workspace::ResolutionKind::Types,
text_changes,
diagnostic_reporter,
);
}
if let Some(specifier_with_range) = &module_info.jsx_import_source {
analyze_specifier(
&specifier_with_range.text,
PositionOrSourceRangeRef::PositionRange(&specifier_with_range.range),
deno_resolver::workspace::ResolutionKind::Execution,
text_changes,
diagnostic_reporter,
);
}
if let Some(specifier_with_range) = &module_info.jsx_import_source_types {
analyze_specifier(
&specifier_with_range.text,
PositionOrSourceRangeRef::PositionRange(&specifier_with_range.range),
deno_resolver::workspace::ResolutionKind::Types,
text_changes,
diagnostic_reporter,
);
}
let mut collector = ImportMetaResolveCollector::default();
parsed_source.program().visit_with(&mut collector);
for (range, specifier) in collector.specifiers {
analyze_specifier(
&specifier,
PositionOrSourceRangeRef::SourceRange(range),
deno_resolver::workspace::ResolutionKind::Execution,
text_changes,
diagnostic_reporter,
);
}
for range in collector.diagnostic_ranges {
diagnostic_reporter(
SpecifierUnfurlerDiagnostic::UnanalyzableImportMetaResolve {
specifier: url.to_owned(),
range,
text_info: text_info.clone(),
},
);
}
}
}
fn relative_url(
resolved: &ModuleSpecifier,
referrer: &ModuleSpecifier,
) -> String {
if resolved.scheme() == "file" {
let relative = referrer.make_relative(resolved).unwrap();
if relative.is_empty() {
let last = resolved.path_segments().unwrap().next_back().unwrap();
format!("./{last}")
} else if relative.starts_with("../") {
relative
} else {
format!("./{relative}")
}
} else {
resolved.to_string()
}
}
fn to_range(
text_info: &SourceTextInfo,
range: &deno_graph::PositionRange,
) -> std::ops::Range<usize> {
let mut range = range
.as_source_range(text_info)
.as_byte_range(text_info.range().start);
let text = &text_info.text_str()[range.clone()];
if text.starts_with('"') || text.starts_with('\'') {
range.start += 1;
}
if text.ends_with('"') || text.ends_with('\'') {
range.end -= 1;
}
range
}
#[derive(Default)]
struct ImportMetaResolveCollector {
specifiers: Vec<(SourceRange<SourcePos>, Atom)>,
diagnostic_ranges: Vec<SourceRange<SourcePos>>,
}
impl Visit for ImportMetaResolveCollector {
noop_visit_type!();
fn visit_call_expr(&mut self, node: &deno_ast::swc::ast::CallExpr) {
if node.args.len() == 1
&& let Some(first_arg) = node.args.first()
&& let Callee::Expr(callee) = &node.callee
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/jupyter/install.rs | cli/tools/jupyter/install.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::env::current_exe;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use deno_core::anyhow::Context;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::serde_json::json;
static TEST_ENV_VAR_NAME: &str = "DENO_TEST_JUPYTER_PATH";
const DENO_ICON_32: &[u8] = include_bytes!("./resources/deno-logo-32x32.png");
const DENO_ICON_64: &[u8] = include_bytes!("./resources/deno-logo-64x64.png");
const DENO_ICON_SVG: &[u8] = include_bytes!("./resources/deno-logo-svg.svg");
fn get_user_data_dir() -> Result<PathBuf, AnyError> {
Ok(if let Some(env_var) = std::env::var_os(TEST_ENV_VAR_NAME) {
PathBuf::from(env_var)
} else {
jupyter_runtime::dirs::user_data_dir()?
})
}
pub fn status(maybe_name: Option<&str>) -> Result<(), AnyError> {
let user_data_dir = get_user_data_dir()?;
let kernel_name = maybe_name.unwrap_or("deno");
let kernel_spec_dir_path = user_data_dir.join("kernels").join(kernel_name);
let kernel_spec_path = kernel_spec_dir_path.join("kernel.json");
if kernel_spec_path.exists() {
log::info!(
"✅ Deno kernel already installed at {}",
kernel_spec_dir_path.display()
);
Ok(())
} else {
let mut install_cmd = "deno jupyter --install".to_string();
if let Some(name) = maybe_name {
install_cmd.push_str(" --name ");
install_cmd.push_str(name);
}
log::warn!(
"ℹ️ Deno kernel is not yet installed, run `{}` to set it up",
install_cmd
);
Ok(())
}
}
fn install_icon(
dir_path: &Path,
filename: &str,
icon_data: &[u8],
) -> Result<(), AnyError> {
let path = dir_path.join(filename);
let mut file = std::fs::File::create(path)?;
file.write_all(icon_data)?;
Ok(())
}
pub fn install(
maybe_name: Option<&str>,
maybe_display_name: Option<&str>,
force: bool,
) -> Result<(), AnyError> {
let user_data_dir = get_user_data_dir()?;
let kernel_name = maybe_name.unwrap_or("deno");
let kernel_spec_dir_path = user_data_dir.join("kernels").join(kernel_name);
let kernel_spec_path = kernel_spec_dir_path.join("kernel.json");
std::fs::create_dir_all(&kernel_spec_dir_path).with_context(|| {
format!(
"Failed to create kernel directory at {}",
kernel_spec_dir_path.display()
)
})?;
if kernel_spec_path.exists() && !force {
bail!(
"Deno kernel already exists at {}, run again with `--force` to overwrite it",
kernel_spec_dir_path.display()
);
}
let display_name = maybe_display_name.unwrap_or("Deno");
let current_exe_path = current_exe()
.context("Failed to get current executable path")?
.to_string_lossy()
.into_owned();
// TODO(bartlomieju): add remaining fields as per
// https://jupyter-client.readthedocs.io/en/stable/kernels.html#kernel-specs
let json_data = json!({
"argv": [current_exe_path, "jupyter", "--kernel", "--conn", "{connection_file}"],
"display_name": display_name,
"language": "typescript",
});
let f = std::fs::File::create(&kernel_spec_path).with_context(|| {
format!(
"Failed to create kernelspec file at {}",
kernel_spec_path.display()
)
})?;
serde_json::to_writer_pretty(f, &json_data).with_context(|| {
format!(
"Failed to write kernelspec file at {}",
kernel_spec_path.display()
)
})?;
let failed_icon_fn =
|| format!("Failed to copy icon to {}", kernel_spec_dir_path.display());
install_icon(&kernel_spec_dir_path, "logo-32x32.png", DENO_ICON_32)
.with_context(failed_icon_fn)?;
install_icon(&kernel_spec_dir_path, "logo-64x64.png", DENO_ICON_64)
.with_context(failed_icon_fn)?;
install_icon(&kernel_spec_dir_path, "logo-svg.svg", DENO_ICON_SVG)
.with_context(failed_icon_fn)?;
log::info!(
"✅ Deno kernelspec installed successfully at {}.",
kernel_spec_dir_path.display()
);
Ok(())
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/jupyter/mod.rs | cli/tools/jupyter/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::sync::Arc;
use deno_core::anyhow::Context;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_core::located_script_name;
use deno_core::serde_json;
use deno_core::serde_json::json;
use deno_core::url::Url;
use deno_path_util::resolve_url_or_path;
use deno_runtime::WorkerExecutionMode;
use deno_runtime::deno_io::Stdio;
use deno_runtime::deno_io::StdioPipe;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_terminal::colors;
use jupyter_protocol::messaging::StreamContent;
use jupyter_runtime::ConnectionInfo;
use tokio::sync::mpsc;
use tokio::sync::mpsc::UnboundedSender;
use tokio::sync::oneshot;
use tokio_util::sync::CancellationToken;
use crate::CliFactory;
use crate::args::Flags;
use crate::args::JupyterFlags;
use crate::cdp;
use crate::lsp::ReplCompletionItem;
use crate::ops;
use crate::tools::repl;
use crate::tools::test::TestEventWorkerSender;
use crate::tools::test::TestFailureFormatOptions;
use crate::tools::test::create_single_test_event_channel;
use crate::tools::test::reporters::PrettyTestReporter;
mod install;
pub mod server;
pub async fn kernel(
flags: Arc<Flags>,
jupyter_flags: JupyterFlags,
) -> Result<(), AnyError> {
log::info!(
"{} \"deno jupyter\" is unstable and might change in the future.",
colors::yellow("Warning"),
);
if !jupyter_flags.install && !jupyter_flags.kernel {
install::status(jupyter_flags.name.as_deref())?;
return Ok(());
}
if jupyter_flags.install {
install::install(
jupyter_flags.name.as_deref(),
jupyter_flags.display.as_deref(),
jupyter_flags.force,
)?;
return Ok(());
}
let connection_filepath = jupyter_flags.conn_file.unwrap();
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let main_module =
resolve_url_or_path("./$deno$jupyter.mts", cli_options.initial_cwd())
.unwrap();
// TODO(bartlomieju): should we run with all permissions?
let permissions =
PermissionsContainer::allow_all(factory.permission_desc_parser()?.clone());
let npm_installer = factory.npm_installer_if_managed().await?.cloned();
let compiler_options_resolver = factory.compiler_options_resolver()?;
let resolver = factory.resolver().await?.clone();
let worker_factory = factory.create_cli_main_worker_factory().await?;
let (stdio_tx, stdio_rx) = mpsc::unbounded_channel();
let conn_file =
std::fs::read_to_string(&connection_filepath).with_context(|| {
format!("Couldn't read connection file: {:?}", connection_filepath)
})?;
let spec: ConnectionInfo =
serde_json::from_str(&conn_file).with_context(|| {
format!(
"Connection file is not a valid JSON: {:?}",
connection_filepath
)
})?;
let (worker, test_event_receiver) = create_single_test_event_channel();
let TestEventWorkerSender {
sender: test_event_sender,
stdout,
stderr,
} = worker;
let mut worker = worker_factory
.create_custom_worker(
WorkerExecutionMode::Jupyter,
main_module.clone(),
// `deno jupyter` doesn't support preloading modules
vec![],
// `deno jupyter` doesn't support require modules
vec![],
permissions,
vec![
ops::jupyter::deno_jupyter::init(stdio_tx.clone()),
ops::testing::deno_test::init(test_event_sender),
],
// FIXME(nayeemrmn): Test output capturing currently doesn't work.
Stdio {
stdin: StdioPipe::inherit(),
stdout: StdioPipe::file(stdout),
stderr: StdioPipe::file(stderr),
},
None,
)
.await?;
worker.setup_repl().await?;
worker.execute_script_static(
located_script_name!(),
"Deno[Deno.internal].enableJupyter();",
)?;
let worker = worker.into_main_worker();
let mut repl_session = repl::ReplSession::initialize(
cli_options,
npm_installer,
resolver,
compiler_options_resolver,
worker,
main_module,
test_event_receiver,
)
.await?;
struct TestWriter(UnboundedSender<StreamContent>);
impl std::io::Write for TestWriter {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
self
.0
.send(StreamContent::stdout(&String::from_utf8_lossy(buf)))
.ok();
Ok(buf.len())
}
fn flush(&mut self) -> std::io::Result<()> {
Ok(())
}
}
let cwd_url =
Url::from_directory_path(cli_options.initial_cwd()).map_err(|_| {
anyhow!(
"Unable to construct URL from the path of cwd: {}",
cli_options.initial_cwd().to_string_lossy(),
)
})?;
repl_session.set_test_reporter_factory(Box::new(move || {
Box::new(
PrettyTestReporter::new(
false,
true,
false,
true,
cwd_url.clone(),
TestFailureFormatOptions::default(),
)
.with_writer(Box::new(TestWriter(stdio_tx.clone()))),
)
}));
let (tx1, rx1) = mpsc::unbounded_channel();
let (tx2, rx2) = mpsc::unbounded_channel();
let (startup_data_tx, startup_data_rx) =
oneshot::channel::<server::StartupData>();
let mut repl_session_proxy = JupyterReplSession {
repl_session,
rx: rx1,
tx: tx2,
};
let repl_session_proxy_channels = JupyterReplProxy { tx: tx1, rx: rx2 };
let join_handle = std::thread::spawn(move || {
let fut = server::JupyterServer::start(
spec,
stdio_rx,
repl_session_proxy_channels,
startup_data_tx,
)
.boxed_local();
deno_runtime::tokio_util::create_and_run_current_thread(fut)
});
let Ok(startup_data) = startup_data_rx.await else {
bail!("Failed to acquire startup data");
};
{
let op_state_rc =
repl_session_proxy.repl_session.worker.js_runtime.op_state();
let mut op_state = op_state_rc.borrow_mut();
op_state.put(startup_data.iopub_connection.clone());
op_state.put(startup_data.last_execution_request.clone());
op_state.put(startup_data.stdin_connection_proxy.clone());
}
repl_session_proxy.start().await;
let server_result = join_handle.join();
match server_result {
Ok(result) => {
result?;
}
Err(e) => {
bail!("Jupyter kernel error: {:?}", e);
}
};
Ok(())
}
pub enum JupyterReplRequest {
LspCompletions {
line_text: String,
position: usize,
},
JsGetProperties {
object_id: String,
},
JsEvaluate {
expr: String,
},
JsGlobalLexicalScopeNames,
JsEvaluateLineWithObjectWrapping {
line: String,
},
JsCallFunctionOnArgs {
function_declaration: String,
args: Vec<cdp::RemoteObject>,
},
JsCallFunctionOn {
arg0: cdp::CallArgument,
arg1: cdp::CallArgument,
},
}
pub enum JupyterReplResponse {
LspCompletions(Vec<ReplCompletionItem>),
JsGetProperties(Option<cdp::GetPropertiesResponse>),
JsEvaluate(Option<cdp::EvaluateResponse>),
JsGlobalLexicalScopeNames(cdp::GlobalLexicalScopeNamesResponse),
JsEvaluateLineWithObjectWrapping(Result<repl::TsEvaluateResponse, AnyError>),
JsCallFunctionOnArgs(Result<cdp::CallFunctionOnResponse, AnyError>),
JsCallFunctionOn(Option<cdp::CallFunctionOnResponse>),
}
pub struct JupyterReplProxy {
tx: mpsc::UnboundedSender<JupyterReplRequest>,
rx: mpsc::UnboundedReceiver<JupyterReplResponse>,
}
impl JupyterReplProxy {
pub async fn lsp_completions(
&mut self,
line_text: String,
position: usize,
) -> Vec<ReplCompletionItem> {
let _ = self.tx.send(JupyterReplRequest::LspCompletions {
line_text,
position,
});
let Some(JupyterReplResponse::LspCompletions(resp)) = self.rx.recv().await
else {
unreachable!()
};
resp
}
pub async fn get_properties(
&mut self,
object_id: String,
) -> Option<cdp::GetPropertiesResponse> {
let _ = self
.tx
.send(JupyterReplRequest::JsGetProperties { object_id });
let Some(JupyterReplResponse::JsGetProperties(resp)) = self.rx.recv().await
else {
unreachable!()
};
resp
}
pub async fn evaluate(
&mut self,
expr: String,
) -> Option<cdp::EvaluateResponse> {
let _ = self.tx.send(JupyterReplRequest::JsEvaluate { expr });
let Some(JupyterReplResponse::JsEvaluate(resp)) = self.rx.recv().await
else {
unreachable!()
};
resp
}
pub async fn global_lexical_scope_names(
&mut self,
) -> cdp::GlobalLexicalScopeNamesResponse {
let _ = self.tx.send(JupyterReplRequest::JsGlobalLexicalScopeNames);
let Some(JupyterReplResponse::JsGlobalLexicalScopeNames(resp)) =
self.rx.recv().await
else {
unreachable!()
};
resp
}
pub async fn evaluate_line_with_object_wrapping(
&mut self,
line: String,
) -> Result<repl::TsEvaluateResponse, AnyError> {
let _ = self
.tx
.send(JupyterReplRequest::JsEvaluateLineWithObjectWrapping { line });
let Some(JupyterReplResponse::JsEvaluateLineWithObjectWrapping(resp)) =
self.rx.recv().await
else {
unreachable!()
};
resp
}
pub async fn call_function_on_args(
&mut self,
function_declaration: String,
args: Vec<cdp::RemoteObject>,
) -> Result<cdp::CallFunctionOnResponse, AnyError> {
let _ = self.tx.send(JupyterReplRequest::JsCallFunctionOnArgs {
function_declaration,
args,
});
let Some(JupyterReplResponse::JsCallFunctionOnArgs(resp)) =
self.rx.recv().await
else {
unreachable!()
};
resp
}
// TODO(bartlomieju): rename to "broadcast_result"?
pub async fn call_function_on(
&mut self,
arg0: cdp::CallArgument,
arg1: cdp::CallArgument,
) -> Option<cdp::CallFunctionOnResponse> {
let _ = self
.tx
.send(JupyterReplRequest::JsCallFunctionOn { arg0, arg1 });
let Some(JupyterReplResponse::JsCallFunctionOn(resp)) =
self.rx.recv().await
else {
unreachable!()
};
resp
}
}
pub struct JupyterReplSession {
repl_session: repl::ReplSession,
rx: mpsc::UnboundedReceiver<JupyterReplRequest>,
tx: mpsc::UnboundedSender<JupyterReplResponse>,
}
impl JupyterReplSession {
pub async fn start(&mut self) {
let mut poll_worker = true;
loop {
tokio::select! {
biased;
maybe_message = self.rx.recv() => {
let Some(msg) = maybe_message else {
break;
};
if self.handle_message(msg).await.is_err() {
break;
}
poll_worker = true;
},
_ = self.repl_session.run_event_loop(), if poll_worker => {
poll_worker = false;
}
}
}
}
async fn handle_message(
&mut self,
msg: JupyterReplRequest,
) -> Result<(), AnyError> {
let resp = match msg {
JupyterReplRequest::LspCompletions {
line_text,
position,
} => JupyterReplResponse::LspCompletions(
self
.lsp_completions(&line_text, position, CancellationToken::new())
.await,
),
JupyterReplRequest::JsGetProperties { object_id } => {
JupyterReplResponse::JsGetProperties(
self.get_properties(object_id).await,
)
}
JupyterReplRequest::JsEvaluate { expr } => {
JupyterReplResponse::JsEvaluate(self.evaluate(expr).await)
}
JupyterReplRequest::JsGlobalLexicalScopeNames => {
JupyterReplResponse::JsGlobalLexicalScopeNames(
self.global_lexical_scope_names().await,
)
}
JupyterReplRequest::JsEvaluateLineWithObjectWrapping { line } => {
JupyterReplResponse::JsEvaluateLineWithObjectWrapping(
self.evaluate_line_with_object_wrapping(&line).await,
)
}
JupyterReplRequest::JsCallFunctionOnArgs {
function_declaration,
args,
} => JupyterReplResponse::JsCallFunctionOnArgs(
self
.call_function_on_args(function_declaration, &args)
.await,
),
JupyterReplRequest::JsCallFunctionOn { arg0, arg1 } => {
JupyterReplResponse::JsCallFunctionOn(
self.call_function_on(arg0, arg1).await,
)
}
};
self.tx.send(resp).map_err(|e| e.into())
}
pub async fn lsp_completions(
&mut self,
line_text: &str,
position: usize,
token: CancellationToken,
) -> Vec<ReplCompletionItem> {
self
.repl_session
.language_server
.completions(line_text, position, token)
.await
}
pub async fn get_properties(
&mut self,
object_id: String,
) -> Option<cdp::GetPropertiesResponse> {
let get_properties_response = self
.repl_session
.post_message_with_event_loop(
"Runtime.getProperties",
Some(cdp::GetPropertiesArgs {
object_id,
own_properties: None,
accessor_properties_only: None,
generate_preview: None,
non_indexed_properties_only: Some(true),
}),
)
.await;
serde_json::from_value(get_properties_response).ok()
}
pub async fn evaluate(
&mut self,
expr: String,
) -> Option<cdp::EvaluateResponse> {
let evaluate_response: serde_json::Value = self
.repl_session
.post_message_with_event_loop(
"Runtime.evaluate",
Some(cdp::EvaluateArgs {
expression: expr,
object_group: None,
include_command_line_api: None,
silent: None,
context_id: Some(self.repl_session.context_id),
return_by_value: None,
generate_preview: None,
user_gesture: None,
await_promise: None,
throw_on_side_effect: Some(true),
timeout: Some(200),
disable_breaks: None,
repl_mode: None,
allow_unsafe_eval_blocked_by_csp: None,
unique_context_id: None,
}),
)
.await;
serde_json::from_value(evaluate_response).ok()
}
pub async fn global_lexical_scope_names(
&mut self,
) -> cdp::GlobalLexicalScopeNamesResponse {
let evaluate_response = self
.repl_session
.post_message_with_event_loop(
"Runtime.globalLexicalScopeNames",
Some(cdp::GlobalLexicalScopeNamesArgs {
execution_context_id: Some(self.repl_session.context_id),
}),
)
.await;
serde_json::from_value(evaluate_response).unwrap()
}
pub async fn evaluate_line_with_object_wrapping(
&mut self,
line: &str,
) -> Result<repl::TsEvaluateResponse, AnyError> {
self
.repl_session
.evaluate_line_with_object_wrapping(line)
.await
}
pub async fn call_function_on_args(
&mut self,
function_declaration: String,
args: &[cdp::RemoteObject],
) -> Result<cdp::CallFunctionOnResponse, AnyError> {
self
.repl_session
.call_function_on_args(function_declaration, args)
.await
}
// TODO(bartlomieju): rename to "broadcast_result"?
pub async fn call_function_on(
&mut self,
arg0: cdp::CallArgument,
arg1: cdp::CallArgument,
) -> Option<cdp::CallFunctionOnResponse> {
let response = self.repl_session
.post_message_with_event_loop(
"Runtime.callFunctionOn",
Some(json!({
"functionDeclaration": r#"async function (execution_count, result) {
await Deno[Deno.internal].jupyter.broadcastResult(execution_count, result);
}"#,
"arguments": [arg0, arg1],
"executionContextId": self.repl_session.context_id,
"awaitPromise": true,
})),
)
.await;
serde_json::from_value(response).ok()
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/jupyter/server.rs | cli/tools/jupyter/server.rs | // Copyright 2018-2025 the Deno authors. MIT license.
// This file is forked/ported from <https://github.com/evcxr/evcxr>
// Copyright 2020 The Evcxr Authors. MIT license.
// NOTE(bartlomieju): unfortunately it appears that clippy is broken
// and can't allow a single line ignore for `await_holding_lock`.
#![allow(clippy::await_holding_lock)]
use std::collections::HashMap;
use std::rc::Rc;
use std::sync::Arc;
use deno_core::CancelFuture;
use deno_core::CancelHandle;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::futures;
use deno_core::parking_lot::Mutex;
use deno_core::serde_json;
use deno_lib::version::DENO_VERSION_INFO;
use jupyter_protocol::messaging;
use jupyter_runtime::ConnectionInfo;
use jupyter_runtime::ExecutionCount;
use jupyter_runtime::JupyterMessage;
use jupyter_runtime::JupyterMessageContent;
use jupyter_runtime::KernelControlConnection;
use jupyter_runtime::KernelIoPubConnection;
use jupyter_runtime::KernelShellConnection;
use jupyter_runtime::ReplyError;
use jupyter_runtime::ReplyStatus;
use jupyter_runtime::StreamContent;
use tokio::sync::mpsc;
use tokio::sync::oneshot;
use uuid::Uuid;
use super::JupyterReplProxy;
use crate::cdp;
pub struct JupyterServer {
execution_count: ExecutionCount,
last_execution_request: Arc<Mutex<Option<JupyterMessage>>>,
iopub_connection: Arc<Mutex<KernelIoPubConnection>>,
repl_session_proxy: JupyterReplProxy,
}
pub struct StdinConnectionProxy {
pub tx: mpsc::UnboundedSender<JupyterMessage>,
pub rx: mpsc::UnboundedReceiver<JupyterMessage>,
}
pub struct StartupData {
pub iopub_connection: Arc<Mutex<KernelIoPubConnection>>,
pub stdin_connection_proxy: Arc<Mutex<StdinConnectionProxy>>,
pub last_execution_request: Arc<Mutex<Option<JupyterMessage>>>,
}
impl JupyterServer {
pub async fn start(
connection_info: ConnectionInfo,
mut stdio_rx: mpsc::UnboundedReceiver<StreamContent>,
repl_session_proxy: JupyterReplProxy,
setup_tx: oneshot::Sender<StartupData>,
) -> Result<(), AnyError> {
let session_id = Uuid::new_v4().to_string();
let mut heartbeat =
jupyter_runtime::create_kernel_heartbeat_connection(&connection_info)
.await?;
let shell_connection = jupyter_runtime::create_kernel_shell_connection(
&connection_info,
&session_id,
)
.await?;
let control_connection = jupyter_runtime::create_kernel_control_connection(
&connection_info,
&session_id,
)
.await?;
let mut stdin_connection = jupyter_runtime::create_kernel_stdin_connection(
&connection_info,
&session_id,
)
.await?;
let iopub_connection = jupyter_runtime::create_kernel_iopub_connection(
&connection_info,
&session_id,
)
.await?;
let iopub_connection = Arc::new(Mutex::new(iopub_connection));
let last_execution_request = Arc::new(Mutex::new(None));
let (stdin_tx1, mut stdin_rx1) =
mpsc::unbounded_channel::<JupyterMessage>();
let (stdin_tx2, stdin_rx2) = mpsc::unbounded_channel::<JupyterMessage>();
let stdin_connection_proxy = Arc::new(Mutex::new(StdinConnectionProxy {
tx: stdin_tx1,
rx: stdin_rx2,
}));
let Ok(()) = setup_tx.send(StartupData {
iopub_connection: iopub_connection.clone(),
last_execution_request: last_execution_request.clone(),
stdin_connection_proxy,
}) else {
bail!("Failed to send startup data");
};
let cancel_handle = CancelHandle::new_rc();
let mut server = Self {
execution_count: ExecutionCount::new(0),
iopub_connection: iopub_connection.clone(),
last_execution_request: last_execution_request.clone(),
repl_session_proxy,
};
let stdin_fut = deno_core::unsync::spawn(async move {
loop {
let Some(msg) = stdin_rx1.recv().await else {
return;
};
let Ok(()) = stdin_connection.send(msg).await else {
return;
};
let Ok(msg) = stdin_connection.read().await else {
return;
};
let Ok(()) = stdin_tx2.send(msg) else {
return;
};
}
});
let hearbeat_fut = deno_core::unsync::spawn(async move {
loop {
if let Err(err) = heartbeat.single_heartbeat().await {
log::error!(
"Heartbeat error: {}\nBacktrace:\n{}",
err,
err.backtrace()
);
}
}
});
let control_fut = deno_core::unsync::spawn({
let cancel_handle = cancel_handle.clone();
async move {
if let Err(err) =
Self::handle_control(control_connection, cancel_handle).await
{
log::error!(
"Control error: {}\nBacktrace:\n{}",
err,
err.backtrace()
);
}
}
});
let shell_fut = deno_core::unsync::spawn(async move {
if let Err(err) = server.handle_shell(shell_connection).await {
log::error!("Shell error: {}\nBacktrace:\n{}", err, err.backtrace());
}
});
let stdio_fut = deno_core::unsync::spawn(async move {
while let Some(stdio_msg) = stdio_rx.recv().await {
Self::handle_stdio_msg(
iopub_connection.clone(),
last_execution_request.clone(),
stdio_msg,
)
.await;
}
});
let repl_session_fut = deno_core::unsync::spawn(async move {});
let join_fut = futures::future::try_join_all(vec![
hearbeat_fut,
control_fut,
shell_fut,
stdio_fut,
repl_session_fut,
stdin_fut,
]);
if let Ok(result) = join_fut.or_cancel(cancel_handle).await {
result?;
}
Ok(())
}
async fn handle_stdio_msg(
iopub_connection: Arc<Mutex<KernelIoPubConnection>>,
last_execution_request: Arc<Mutex<Option<JupyterMessage>>>,
stdio_msg: StreamContent,
) {
let maybe_exec_result = last_execution_request.lock().clone();
let Some(exec_request) = maybe_exec_result else {
return;
};
let result = iopub_connection
.lock()
.send(stdio_msg.as_child_of(&exec_request))
.await;
if let Err(err) = result {
log::error!("Output error: {}", err);
}
}
async fn handle_control(
mut connection: KernelControlConnection,
cancel_handle: Rc<CancelHandle>,
) -> Result<(), AnyError> {
loop {
let msg = connection.read().await?;
match msg.content {
JupyterMessageContent::KernelInfoRequest(_) => {
// normally kernel info is sent from the shell channel
// however, some frontends will send it on the control channel
// and it's no harm to send a kernel info reply on control
connection.send(kernel_info().as_child_of(&msg)).await?;
}
JupyterMessageContent::ShutdownRequest(_) => {
cancel_handle.cancel();
}
JupyterMessageContent::InterruptRequest(_) => {
log::error!("Interrupt request currently not supported");
}
JupyterMessageContent::DebugRequest(_) => {
log::error!("Debug request currently not supported");
// See https://jupyter-client.readthedocs.io/en/latest/messaging.html#debug-request
// and https://microsoft.github.io/debug-adapter-protocol/
}
_ => {
log::error!(
"Unrecognized control message type: {}",
msg.message_type()
);
}
}
}
}
async fn handle_shell(
&mut self,
mut connection: KernelShellConnection,
) -> Result<(), AnyError> {
loop {
let msg = connection.read().await?;
self.handle_shell_message(msg, &mut connection).await?;
}
}
async fn handle_shell_message(
&mut self,
msg: JupyterMessage,
connection: &mut KernelShellConnection,
) -> Result<(), AnyError> {
let parent = &msg.clone();
self
.send_iopub(messaging::Status::busy().as_child_of(parent))
.await?;
match msg.content {
JupyterMessageContent::ExecuteRequest(execute_request) => {
self
.handle_execution_request(execute_request, parent, connection)
.await?;
}
JupyterMessageContent::CompleteRequest(req) => {
let user_code = req.code;
let cursor_pos = req.cursor_pos;
let lsp_completions = self
.repl_session_proxy
.lsp_completions(user_code.clone(), cursor_pos)
.await;
if !lsp_completions.is_empty() {
let matches: Vec<String> = lsp_completions
.iter()
.map(|item| item.new_text.clone())
.collect();
let cursor_start = lsp_completions
.first()
.map(|item| item.range.start)
.unwrap_or(cursor_pos);
let cursor_end = lsp_completions
.last()
.map(|item| item.range.end)
.unwrap_or(cursor_pos);
connection
.send(
messaging::CompleteReply {
matches,
cursor_start,
cursor_end,
metadata: Default::default(),
status: ReplyStatus::Ok,
error: None,
}
.as_child_of(parent),
)
.await?;
} else {
let expr = get_expr_from_line_at_pos(&user_code, cursor_pos);
// check if the expression is in the form `obj.prop`
let (completions, cursor_start) = if let Some(index) = expr.rfind('.')
{
let sub_expr = &expr[..index];
let prop_name = &expr[index + 1..];
let candidates = get_expression_property_names(
&mut self.repl_session_proxy,
sub_expr,
)
.await
.into_iter()
.filter(|n| !n.starts_with("Symbol(") && n.starts_with(prop_name))
.collect();
if prop_name.len() > cursor_pos {
// TODO(bartlomieju): most likely not correct, but better than panicking because of sub with overflow
(candidates, cursor_pos)
} else {
(candidates, cursor_pos - prop_name.len())
}
} else {
// combine results of declarations and globalThis properties
let mut candidates = get_expression_property_names(
&mut self.repl_session_proxy,
"globalThis",
)
.await
.into_iter()
.chain(
get_global_lexical_scope_names(&mut self.repl_session_proxy)
.await,
)
.filter(|n| n.starts_with(expr))
.collect::<Vec<_>>();
// sort and remove duplicates
candidates.sort();
candidates.dedup(); // make sure to sort first
if expr.len() > cursor_pos {
// TODO(bartlomieju): most likely not correct, but better than panicking because of sub with overflow
(candidates, cursor_pos)
} else {
(candidates, cursor_pos - expr.len())
}
};
connection
.send(
messaging::CompleteReply {
matches: completions,
cursor_start,
cursor_end: cursor_pos,
metadata: Default::default(),
status: ReplyStatus::Ok,
error: None,
}
.as_child_of(parent),
)
.await?;
}
}
JupyterMessageContent::InspectRequest(_req) => {
// TODO(bartlomieju?): implement introspection request
// The inspect request is used to get information about an object at cursor position.
// There are two detail levels: 0 is typically documentation, 1 is typically source code
// The response includes a MimeBundle to render the object:
// {
// "status": "ok",
// "found": true,
// "data": {
// "text/plain": "Plain documentation here",
// "text/html": "<div>Rich documentation here</div>",
// "application/json": {
// "key1": "value1",
// "key2": "value2"
// }
// },
// }
connection
.send(
messaging::InspectReply {
status: ReplyStatus::Ok,
found: false,
data: Default::default(),
metadata: Default::default(),
error: None,
}
.as_child_of(parent),
)
.await?;
}
JupyterMessageContent::IsCompleteRequest(_) => {
connection
.send(messaging::IsCompleteReply::complete().as_child_of(parent))
.await?;
}
JupyterMessageContent::KernelInfoRequest(_) => {
connection.send(kernel_info().as_child_of(parent)).await?;
}
JupyterMessageContent::CommOpen(comm) => {
connection
.send(
messaging::CommClose {
comm_id: comm.comm_id,
data: Default::default(),
}
.as_child_of(parent),
)
.await?;
}
JupyterMessageContent::HistoryRequest(_req) => {
connection
.send(
messaging::HistoryReply {
history: vec![],
error: None,
status: ReplyStatus::Ok,
}
.as_child_of(parent),
)
.await?;
}
JupyterMessageContent::InputReply(_rep) => {
// TODO(@zph): implement input reply from https://github.com/denoland/deno/pull/23592
// NOTE: This will belong on the stdin channel, not the shell channel
}
JupyterMessageContent::CommInfoRequest(_req) => {
connection
.send(
messaging::CommInfoReply {
comms: Default::default(),
status: ReplyStatus::Ok,
error: None,
}
.as_child_of(parent),
)
.await?;
}
JupyterMessageContent::CommMsg(_)
| JupyterMessageContent::CommClose(_) => {
// Do nothing with regular comm messages
}
// Any unknown message type is ignored
_ => {
log::error!(
"Unrecognized shell message type: {}",
msg.content.message_type()
);
}
}
self
.send_iopub(messaging::Status::idle().as_child_of(parent))
.await?;
Ok(())
}
async fn handle_execution_request(
&mut self,
execute_request: messaging::ExecuteRequest,
parent_message: &JupyterMessage,
connection: &mut KernelShellConnection,
) -> Result<(), AnyError> {
if !execute_request.silent && execute_request.store_history {
self.execution_count.increment();
}
*self.last_execution_request.lock() = Some(parent_message.clone());
self
.send_iopub(
messaging::ExecuteInput {
execution_count: self.execution_count,
code: execute_request.code.clone(),
}
.as_child_of(parent_message),
)
.await?;
let result = self
.repl_session_proxy
.evaluate_line_with_object_wrapping(execute_request.code)
.await;
let evaluate_response = match result {
Ok(eval_response) => eval_response,
Err(err) => {
self
.send_iopub(
messaging::ErrorOutput {
ename: err.to_string(),
evalue: err.to_string(),
traceback: vec![],
}
.as_child_of(parent_message),
)
.await?;
connection
.send(
messaging::ExecuteReply {
execution_count: self.execution_count,
status: ReplyStatus::Error,
payload: Default::default(),
user_expressions: None,
error: None,
}
.as_child_of(parent_message),
)
.await?;
return Ok(());
}
};
let cdp::EvaluateResponse {
result,
exception_details,
} = evaluate_response.value;
if exception_details.is_none() {
publish_result(
&mut self.repl_session_proxy,
&result,
self.execution_count,
)
.await?;
connection
.send(
messaging::ExecuteReply {
execution_count: self.execution_count,
status: ReplyStatus::Ok,
user_expressions: None,
payload: Default::default(),
error: None,
}
.as_child_of(parent_message),
)
.await?;
// Let's sleep here for a few ms, so we give a chance to the task that is
// handling stdout and stderr streams to receive and flush the content.
// Otherwise, executing multiple cells one-by-one might lead to output
// from various cells be grouped together in another cell result.
tokio::time::sleep(std::time::Duration::from_millis(5)).await;
} else if let Some(exception_details) = exception_details {
// Determine the exception value and name
let (name, message, stack) = if let Some(exception) =
exception_details.exception
{
let result = self
.repl_session_proxy
.call_function_on_args(
r#"
function(object) {
if (object instanceof Error) {
const name = "name" in object ? String(object.name) : "";
const message = "message" in object ? String(object.message) : "";
const stack = "stack" in object ? String(object.stack) : "";
return JSON.stringify({ name, message, stack });
} else {
const message = String(object);
return JSON.stringify({ name: "", message, stack: "" });
}
}
"#
.into(),
vec![exception],
)
.await?;
match result.result.value {
Some(serde_json::Value::String(str)) => {
if let Ok(object) =
serde_json::from_str::<HashMap<String, String>>(&str)
{
let get = |k| object.get(k).cloned().unwrap_or_default();
(get("name"), get("message"), get("stack"))
} else {
log::error!("Unexpected result while parsing JSON {str}");
("".into(), "".into(), "".into())
}
}
_ => {
log::error!("Unexpected result while parsing exception {result:?}");
("".into(), "".into(), "".into())
}
}
} else {
log::error!("Unexpectedly missing exception {exception_details:?}");
("".into(), "".into(), "".into())
};
let stack = if stack.is_empty() {
format!(
"{}\n at <unknown>",
serde_json::to_string(&message).unwrap()
)
} else {
stack
};
let traceback = format!("Stack trace:\n{stack}")
.split('\n')
.map(|s| s.to_owned())
.collect::<Vec<_>>();
let ename = if name.is_empty() {
"Unknown error".into()
} else {
name
};
let evalue = if message.is_empty() {
"(none)".into()
} else {
message
};
self
.send_iopub(
messaging::ErrorOutput {
ename: ename.clone(),
evalue: evalue.clone(),
traceback: traceback.clone(),
}
.as_child_of(parent_message),
)
.await?;
connection
.send(
messaging::ExecuteReply {
execution_count: self.execution_count,
status: ReplyStatus::Error,
error: Some(Box::new(ReplyError {
ename,
evalue,
traceback,
})),
user_expressions: None,
payload: Default::default(),
}
.as_child_of(parent_message),
)
.await?;
}
Ok(())
}
async fn send_iopub(
&mut self,
message: JupyterMessage,
) -> Result<(), AnyError> {
self.iopub_connection.lock().send(message.clone()).await
}
}
fn kernel_info() -> messaging::KernelInfoReply {
messaging::KernelInfoReply {
status: ReplyStatus::Ok,
protocol_version: "5.3".to_string(),
implementation: "Deno kernel".to_string(),
implementation_version: DENO_VERSION_INFO.deno.to_string(),
language_info: messaging::LanguageInfo {
name: "typescript".to_string(),
version: DENO_VERSION_INFO.typescript.to_string(),
mimetype: "text/x.typescript".to_string(),
file_extension: ".ts".to_string(),
pygments_lexer: "typescript".to_string(),
codemirror_mode: messaging::CodeMirrorMode::typescript(),
nbconvert_exporter: "script".to_string(),
},
banner: "Welcome to Deno kernel".to_string(),
help_links: vec![messaging::HelpLink {
text: "Visit Deno manual".to_string(),
url: "https://docs.deno.com".to_string(),
}],
debugger: false,
error: None,
}
}
async fn publish_result(
repl_session_proxy: &mut JupyterReplProxy,
evaluate_result: &cdp::RemoteObject,
execution_count: ExecutionCount,
) -> Result<Option<HashMap<String, serde_json::Value>>, AnyError> {
let arg0 = cdp::CallArgument {
value: Some(execution_count.into()),
unserializable_value: None,
object_id: None,
};
let arg1 = cdp::CallArgument::from(evaluate_result);
let Some(response) = repl_session_proxy.call_function_on(arg0, arg1).await
else {
return Ok(None);
};
if let Some(exception_details) = &response.exception_details {
// If the object doesn't have a Jupyter.display method or it throws an
// exception, we just ignore it and let the caller handle it.
log::error!("Exception encountered: {}", exception_details.text);
return Ok(None);
}
Ok(None)
}
// TODO(bartlomieju): dedup with repl::editor
fn get_expr_from_line_at_pos(line: &str, cursor_pos: usize) -> &str {
let start = line[..cursor_pos].rfind(is_word_boundary).unwrap_or(0);
let end = line[cursor_pos..]
.rfind(is_word_boundary)
.map(|i| cursor_pos + i)
.unwrap_or(cursor_pos);
let word = &line[start..end];
let word = word.strip_prefix(is_word_boundary).unwrap_or(word);
(word.strip_suffix(is_word_boundary).unwrap_or(word)) as _
}
// TODO(bartlomieju): dedup with repl::editor
fn is_word_boundary(c: char) -> bool {
if matches!(c, '.' | '_' | '$') {
false
} else {
char::is_ascii_whitespace(&c) || char::is_ascii_punctuation(&c)
}
}
// TODO(bartlomieju): dedup with repl::editor
async fn get_global_lexical_scope_names(
repl_session_proxy: &mut JupyterReplProxy,
) -> Vec<String> {
repl_session_proxy.global_lexical_scope_names().await.names
}
// TODO(bartlomieju): dedup with repl::editor
async fn get_expression_property_names(
repl_session_proxy: &mut JupyterReplProxy,
expr: &str,
) -> Vec<String> {
// try to get the properties from the expression
if let Some(properties) =
get_object_expr_properties(repl_session_proxy, expr).await
{
return properties;
}
// otherwise fall back to the prototype
let expr_type = get_expression_type(repl_session_proxy, expr).await;
let object_expr = match expr_type.as_deref() {
// possibilities: https://chromedevtools.github.io/devtools-protocol/v8/Runtime/#type-RemoteObject
Some("object") => "Object.prototype",
Some("function") => "Function.prototype",
Some("string") => "String.prototype",
Some("boolean") => "Boolean.prototype",
Some("bigint") => "BigInt.prototype",
Some("number") => "Number.prototype",
_ => return Vec::new(), // undefined, symbol, and unhandled
};
get_object_expr_properties(repl_session_proxy, object_expr)
.await
.unwrap_or_default()
}
// TODO(bartlomieju): dedup with repl::editor
async fn get_expression_type(
repl_session_proxy: &mut JupyterReplProxy,
expr: &str,
) -> Option<String> {
evaluate_expression(repl_session_proxy, expr)
.await
.map(|res| res.result.kind)
}
// TODO(bartlomieju): dedup with repl::editor
async fn get_object_expr_properties(
repl_session_proxy: &mut JupyterReplProxy,
object_expr: &str,
) -> Option<Vec<String>> {
let evaluate_result =
evaluate_expression(repl_session_proxy, object_expr).await?;
let object_id = evaluate_result.result.object_id?;
let get_properties_response =
repl_session_proxy.get_properties(object_id.clone()).await?;
Some(
get_properties_response
.result
.into_iter()
.map(|prop| prop.name)
.collect(),
)
}
// TODO(bartlomieju): dedup with repl::editor
async fn evaluate_expression(
repl_session_proxy: &mut JupyterReplProxy,
expr: &str,
) -> Option<cdp::EvaluateResponse> {
let evaluate_response = repl_session_proxy.evaluate(expr.to_string()).await?;
if evaluate_response.exception_details.is_some() {
None
} else {
Some(evaluate_response)
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/lint/linter.rs | cli/tools/lint/linter.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::HashSet;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use ::tokio_util::sync::CancellationToken;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_ast::ParsedSource;
use deno_ast::SourceTextInfo;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt as _;
use deno_core::parking_lot::Mutex;
use deno_graph::ModuleGraph;
use deno_lint::diagnostic::LintDiagnostic;
use deno_lint::linter::ExternalLinterCb;
use deno_lint::linter::ExternalLinterResult;
use deno_lint::linter::LintConfig as DenoLintConfig;
use deno_lint::linter::LintFileOptions;
use deno_lint::linter::Linter as DenoLintLinter;
use deno_lint::linter::LinterOptions;
use deno_path_util::fs::atomic_write_file_with_retries;
use deno_runtime::tokio_util;
use super::ConfiguredRules;
use super::plugins;
use super::plugins::PluginHostProxy;
use super::rules::FileOrPackageLintRule;
use super::rules::PackageLintRule;
use crate::sys::CliSys;
use crate::util::fs::specifier_from_file_path;
use crate::util::text_encoding::Utf16Map;
pub struct CliLinterOptions {
pub configured_rules: ConfiguredRules,
pub fix: bool,
pub deno_lint_config: DenoLintConfig,
pub maybe_plugin_runner: Option<Arc<PluginHostProxy>>,
}
#[derive(Debug)]
pub struct CliLinter {
fix: bool,
package_rules: Vec<Box<dyn PackageLintRule>>,
linter: DenoLintLinter,
deno_lint_config: DenoLintConfig,
maybe_plugin_runner: Option<Arc<PluginHostProxy>>,
}
impl CliLinter {
pub fn new(options: CliLinterOptions) -> Self {
let rules = options.configured_rules.rules;
let mut deno_lint_rules = Vec::with_capacity(rules.len());
let mut package_rules = Vec::with_capacity(rules.len());
for rule in rules {
match rule.into_file_or_pkg_rule() {
FileOrPackageLintRule::File(rule) => {
deno_lint_rules.push(rule);
}
FileOrPackageLintRule::Package(rule) => {
package_rules.push(rule);
}
}
}
Self {
fix: options.fix,
package_rules,
linter: DenoLintLinter::new(LinterOptions {
rules: deno_lint_rules,
all_rule_codes: options.configured_rules.all_rule_codes,
custom_ignore_file_directive: None,
custom_ignore_diagnostic_directive: None,
}),
deno_lint_config: options.deno_lint_config,
maybe_plugin_runner: options.maybe_plugin_runner,
}
}
pub fn has_package_rules(&self) -> bool {
!self.package_rules.is_empty()
}
pub fn lint_package(
&self,
graph: &ModuleGraph,
entrypoints: &[ModuleSpecifier],
) -> Vec<LintDiagnostic> {
let mut diagnostics = Vec::new();
for rule in &self.package_rules {
diagnostics.extend(rule.lint_package(graph, entrypoints));
}
diagnostics
}
pub fn lint_with_ast(
&self,
parsed_source: &ParsedSource,
token: CancellationToken,
) -> Result<Vec<LintDiagnostic>, AnyError> {
let external_linter_container = ExternalLinterContainer::new(
self.maybe_plugin_runner.clone(),
Some(token),
);
let d = self.linter.lint_with_ast(
parsed_source,
self.deno_lint_config.clone(),
external_linter_container.get_callback(),
);
if let Some(err) = external_linter_container.take_error() {
return Err(err);
}
Ok(d)
}
pub fn lint_file(
&self,
file_path: &Path,
source_code: String,
ext: Option<&str>,
) -> Result<(ParsedSource, Vec<LintDiagnostic>), AnyError> {
let specifier = specifier_from_file_path(file_path)?;
let media_type = if let Some(ext) = ext {
MediaType::from_str(&format!("placeholder.{ext}"))
} else if file_path.extension().is_none() {
MediaType::TypeScript
} else {
MediaType::from_specifier(&specifier)
};
let external_linter_container =
ExternalLinterContainer::new(self.maybe_plugin_runner.clone(), None);
if self.fix {
self.lint_file_and_fix(
&specifier,
media_type,
source_code,
file_path,
external_linter_container,
)
} else {
let (source, diagnostics) = self
.linter
.lint_file(LintFileOptions {
specifier,
media_type,
source_code,
config: self.deno_lint_config.clone(),
external_linter: external_linter_container.get_callback(),
})
.map_err(AnyError::from)?;
if let Some(err) = external_linter_container.take_error() {
return Err(err);
}
Ok((source, diagnostics))
}
}
fn lint_file_and_fix(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
source_code: String,
file_path: &Path,
external_linter_container: ExternalLinterContainer,
) -> Result<(ParsedSource, Vec<LintDiagnostic>), deno_core::anyhow::Error> {
// initial lint
let (source, diagnostics) = self.linter.lint_file(LintFileOptions {
specifier: specifier.clone(),
media_type,
source_code,
config: self.deno_lint_config.clone(),
external_linter: external_linter_container.get_callback(),
})?;
if let Some(err) = external_linter_container.take_error() {
return Err(err);
}
// Try applying fixes repeatedly until the file has none left or
// a maximum number of iterations is reached. This is necessary
// because lint fixes may overlap and so we can't always apply
// them in one pass.
let mut source = source;
let mut diagnostics = diagnostics;
let mut fix_iterations = 0;
loop {
let change = apply_lint_fixes_and_relint(
specifier,
media_type,
&self.linter,
self.deno_lint_config.clone(),
&source,
&diagnostics,
&external_linter_container,
)?;
match change {
Some(change) => {
source = change.0;
diagnostics = change.1;
}
None => {
break;
}
}
fix_iterations += 1;
if fix_iterations > 5 {
log::warn!(
concat!(
"Reached maximum number of fix iterations for '{}'. There's ",
"probably a bug in the lint rule. Please fix this file manually.",
),
specifier,
);
break;
}
}
if fix_iterations > 0 {
// everything looks good and the file still parses, so write it out
atomic_write_file_with_retries(
&CliSys::default(),
file_path,
source.text().as_bytes(),
crate::cache::CACHE_PERM,
)
.context("Failed writing fix to file.")?;
}
Ok((source, diagnostics))
}
}
fn apply_lint_fixes_and_relint(
specifier: &ModuleSpecifier,
media_type: MediaType,
linter: &DenoLintLinter,
config: DenoLintConfig,
original_source: &ParsedSource,
diagnostics: &[LintDiagnostic],
external_linter_container: &ExternalLinterContainer,
) -> Result<Option<(ParsedSource, Vec<LintDiagnostic>)>, AnyError> {
let text_info = original_source.text_info_lazy();
let Some(new_text) = apply_lint_fixes(text_info, diagnostics) else {
return Ok(None);
};
let lint_with_text = |new_text: String| {
let (source, diagnostics) = linter.lint_file(LintFileOptions {
specifier: specifier.clone(),
source_code: new_text,
media_type,
config: config.clone(),
external_linter: external_linter_container.get_callback(),
})?;
let mut new_diagnostics = source.diagnostics().clone();
new_diagnostics.retain(|d| !original_source.diagnostics().contains(d));
if let Some(diagnostic) = new_diagnostics.pop() {
return Err(AnyError::from(diagnostic));
}
Ok((source, diagnostics))
};
let (source, diagnostics) = match lint_with_text(new_text) {
Ok(result) => result,
Err(err) => {
let utf16_map = Utf16Map::new(text_info.text_str());
// figure out which diagnostic caused a syntax error
let mut diagnostics = diagnostics.to_vec();
while let Some(last_diagnostic) = diagnostics.pop() {
let Some(lint_fix) = last_diagnostic.details.fixes.first() else {
continue;
};
let success = match apply_lint_fixes(text_info, &diagnostics) {
Some(new_text) => lint_with_text(new_text).is_ok(),
None => true,
};
if success {
let mut changes_text = String::new();
for change in &lint_fix.changes {
let utf8_start =
(change.range.start - text_info.range().start) as u32;
let utf8_end = (change.range.end - text_info.range().start) as u32;
let utf16_start = utf16_map
.utf8_to_utf16_offset(utf8_start.into())
.unwrap_or(utf8_start.into());
let utf16_end = utf16_map
.utf8_to_utf16_offset(utf8_end.into())
.unwrap_or(utf8_end.into());
changes_text.push_str(&format!(
"Range: [{}, {}]\n",
u32::from(utf16_start),
u32::from(utf16_end)
));
changes_text.push_str(&format!("Text: {:?}\n\n", &change.new_text));
}
return Err(err).context(format!(
"The '{}' rule caused a syntax error applying '{}'.\n\n{}",
last_diagnostic.details.code, lint_fix.description, changes_text
));
}
}
return Err(err).context(
"A lint fix caused a syntax error. This is a bug in a lint rule.",
);
}
};
if let Some(err) = external_linter_container.take_error() {
return Err(err);
}
Ok(Some((source, diagnostics)))
}
fn apply_lint_fixes(
text_info: &SourceTextInfo,
diagnostics: &[LintDiagnostic],
) -> Option<String> {
if diagnostics.is_empty() {
return None;
}
let file_start = text_info.range().start;
let quick_fixes = diagnostics
.iter()
// use the first quick fix
.filter_map(|d| d.details.fixes.first())
.flat_map(|fix| fix.changes.iter())
.map(|change| deno_ast::TextChange {
range: change.range.as_byte_range(file_start),
new_text: change.new_text.to_string(),
})
.collect::<Vec<_>>();
if quick_fixes.is_empty() {
return None;
}
Some(deno_ast::apply_text_changes(
text_info.text_str(),
// remove any overlapping text changes, we'll circle
// back for another pass to fix the remaining
filter_overlapping_text_changes(quick_fixes),
))
}
fn filter_overlapping_text_changes(
mut text_changes: Vec<deno_ast::TextChange>,
) -> Vec<deno_ast::TextChange> {
let mut seen_imports = HashSet::new();
text_changes.sort_by_key(|change| change.range.start);
let mut filtered: Vec<deno_ast::TextChange> =
Vec::with_capacity(text_changes.len());
for change in text_changes.into_iter() {
let overlaps_last = filtered
.last()
.map(|prev| change.range.start <= prev.range.end)
.unwrap_or(false);
let is_duplicate_import =
change.new_text.trim_start().starts_with("import ")
&& seen_imports.contains(change.new_text.trim());
if overlaps_last || is_duplicate_import {
// skip this edit
continue;
}
// remember any import we keep so we can drop later duplicates
if change.new_text.trim_start().starts_with("import ") {
seen_imports.insert(change.new_text.trim().to_owned());
}
filtered.push(change);
}
filtered
}
fn run_plugins(
plugin_runner: Arc<PluginHostProxy>,
parsed_source: ParsedSource,
file_path: PathBuf,
maybe_token: Option<CancellationToken>,
) -> Result<ExternalLinterResult, AnyError> {
let source_text_info = parsed_source.text_info_lazy().clone();
let plugin_info = plugin_runner
.get_plugin_rules()
.into_iter()
.map(Cow::from)
.collect();
let fut = async move {
let utf16_map = Utf16Map::new(parsed_source.text().as_ref());
let serialized_ast =
plugin_runner.serialize_ast(&parsed_source, &utf16_map)?;
plugins::run_rules_for_ast(
&plugin_runner,
&file_path,
serialized_ast,
source_text_info,
utf16_map,
maybe_token,
)
.await
}
.boxed_local();
let plugin_diagnostics = tokio_util::create_and_run_current_thread(fut)?;
Ok(ExternalLinterResult {
diagnostics: plugin_diagnostics,
rules: plugin_info,
})
}
struct ExternalLinterContainer {
cb: Option<ExternalLinterCb>,
error: Option<Arc<Mutex<Option<AnyError>>>>,
}
impl ExternalLinterContainer {
pub fn new(
maybe_plugin_runner: Option<Arc<PluginHostProxy>>,
maybe_token: Option<CancellationToken>,
) -> Self {
let mut s = Self {
cb: None,
error: None,
};
if let Some(plugin_runner) = maybe_plugin_runner {
s.error = Some(Arc::new(Mutex::new(None)));
let error_ = s.error.clone();
let cb = Arc::new(move |parsed_source: ParsedSource| {
let token_ = maybe_token.clone();
let file_path =
match deno_path_util::url_to_file_path(parsed_source.specifier()) {
Ok(path) => path,
Err(err) => {
*error_.as_ref().unwrap().lock() = Some(err.into());
return None;
}
};
let r =
run_plugins(plugin_runner.clone(), parsed_source, file_path, token_);
match r {
Ok(d) => Some(d),
Err(err) => {
*error_.as_ref().unwrap().lock() = Some(err);
None
}
}
});
s.cb = Some(cb);
}
s
}
pub fn get_callback(&self) -> Option<ExternalLinterCb> {
self.cb.clone()
}
pub fn take_error(&self) -> Option<AnyError> {
self.error.as_ref().and_then(|e| e.lock().take())
}
}
#[cfg(test)]
mod test {
use deno_ast::TextChange;
use super::*;
#[test]
fn test_filter_overlapping_text_changes() {
let changes = filter_overlapping_text_changes(vec![
TextChange {
range: 0..125,
new_text: "".into(),
},
TextChange {
range: 0..0,
new_text: "".into(),
},
TextChange {
range: 81..96,
new_text: "".into(),
},
]);
assert_eq!(changes.len(), 1);
assert_eq!(changes[0].range, 0..125);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/lint/mod.rs | cli/tools/lint/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
//! This module provides file linting utilities using
//! [`deno_lint`](https://github.com/denoland/deno_lint).
use std::collections::HashSet;
use std::fs;
use std::io::Read;
use std::io::stdin;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_ast::ParsedSource;
use deno_config::deno_json::LintRulesConfig;
use deno_config::glob::FileCollector;
use deno_config::glob::FilePatterns;
use deno_config::workspace::WorkspaceDirectory;
use deno_config::workspace::WorkspaceDirectoryRc;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_core::futures::future::LocalBoxFuture;
use deno_core::parking_lot::Mutex;
use deno_core::serde_json;
use deno_core::unsync::future::LocalFutureExt;
use deno_core::unsync::future::SharedLocal;
use deno_graph::ModuleGraph;
use deno_lib::util::hash::FastInsecureHasher;
use deno_lint::diagnostic::LintDiagnostic;
use deno_resolver::deno_json::CompilerOptionsResolver;
use log::debug;
use reporters::LintReporter;
use reporters::create_reporter;
use crate::args::CliOptions;
use crate::args::Flags;
use crate::args::LintFlags;
use crate::args::LintOptions;
use crate::args::WorkspaceLintOptions;
use crate::cache::CacheDBHash;
use crate::cache::Caches;
use crate::cache::IncrementalCache;
use crate::colors;
use crate::factory::CliFactory;
use crate::graph_util::CreatePublishGraphOptions;
use crate::graph_util::ModuleGraphCreator;
use crate::sys::CliSys;
use crate::tools::fmt::run_parallelized;
use crate::util::display;
use crate::util::file_watcher;
use crate::util::file_watcher::WatcherCommunicator;
use crate::util::fs::canonicalize_path;
use crate::util::path::is_script_ext;
use crate::util::sync::AtomicFlag;
mod ast_buffer;
mod linter;
mod plugins;
mod reporters;
mod rules;
// TODO(bartlomieju): remove once we wire plugins through the CLI linter
pub use ast_buffer::serialize_ast_to_buffer;
pub use linter::CliLinter;
pub use linter::CliLinterOptions;
pub use plugins::PluginHostProxy;
pub use plugins::PluginLogger;
pub use plugins::create_runner_and_load_plugins;
pub use rules::ConfiguredRules;
pub use rules::LintRuleProvider;
pub use rules::collect_no_slow_type_diagnostics;
const JSON_SCHEMA_VERSION: u8 = 1;
static STDIN_FILE_NAME: &str = "$deno$stdin.mts";
pub async fn lint(
flags: Arc<Flags>,
lint_flags: LintFlags,
) -> Result<(), AnyError> {
if lint_flags.watch.is_some() {
if lint_flags.is_stdin() {
return Err(anyhow!("Lint watch on standard input is not supported.",));
}
return lint_with_watch(flags, lint_flags).await;
}
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let lint_rule_provider = factory.lint_rule_provider().await?;
let is_stdin = lint_flags.is_stdin();
let compiler_options_resolver = factory.compiler_options_resolver()?;
let workspace_lint_options =
cli_options.resolve_workspace_lint_options(&lint_flags)?;
let success = if is_stdin {
lint_stdin(
cli_options,
lint_rule_provider,
workspace_lint_options,
lint_flags,
compiler_options_resolver,
)?
} else {
let mut linter = WorkspaceLinter::new(
factory.caches()?.clone(),
lint_rule_provider,
factory.module_graph_creator().await?.clone(),
compiler_options_resolver.clone(),
cli_options.start_dir.clone(),
&workspace_lint_options,
);
let paths_with_options_batches =
resolve_paths_with_options_batches(cli_options, &lint_flags)?;
for paths_with_options in paths_with_options_batches {
linter
.lint_files(
cli_options,
paths_with_options.options,
paths_with_options.dir,
paths_with_options.paths,
)
.await?;
}
linter.finish()
};
if !success {
deno_runtime::exit(1);
}
Ok(())
}
async fn lint_with_watch_inner(
flags: Arc<Flags>,
lint_flags: LintFlags,
watcher_communicator: Arc<WatcherCommunicator>,
changed_paths: Option<Vec<PathBuf>>,
) -> Result<(), AnyError> {
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let mut paths_with_options_batches =
resolve_paths_with_options_batches(cli_options, &lint_flags)?;
for paths_with_options in &mut paths_with_options_batches {
_ = watcher_communicator.watch_paths(paths_with_options.paths.clone());
let files = std::mem::take(&mut paths_with_options.paths);
paths_with_options.paths = if let Some(paths) = &changed_paths {
// lint all files on any changed (https://github.com/denoland/deno/issues/12446)
if files.iter().any(|path| {
canonicalize_path(path)
.map(|p| paths.contains(&p))
.unwrap_or(false)
}) {
files
} else {
[].to_vec()
}
} else {
files
};
}
let mut linter = WorkspaceLinter::new(
factory.caches()?.clone(),
factory.lint_rule_provider().await?,
factory.module_graph_creator().await?.clone(),
factory.compiler_options_resolver()?.clone(),
cli_options.start_dir.clone(),
&cli_options.resolve_workspace_lint_options(&lint_flags)?,
);
for paths_with_options in paths_with_options_batches {
linter
.lint_files(
cli_options,
paths_with_options.options,
paths_with_options.dir,
paths_with_options.paths,
)
.await?;
}
linter.finish();
Ok(())
}
async fn lint_with_watch(
flags: Arc<Flags>,
lint_flags: LintFlags,
) -> Result<(), AnyError> {
let watch_flags = lint_flags.watch.as_ref().unwrap();
file_watcher::watch_func(
flags,
file_watcher::PrintConfig::new("Lint", !watch_flags.no_clear_screen),
move |flags, watcher_communicator, changed_paths| {
let lint_flags = lint_flags.clone();
watcher_communicator.show_path_changed(changed_paths.clone());
Ok(lint_with_watch_inner(
flags,
lint_flags,
watcher_communicator,
changed_paths,
))
},
)
.await
}
struct PathsWithOptions {
dir: WorkspaceDirectoryRc,
paths: Vec<PathBuf>,
options: LintOptions,
}
fn resolve_paths_with_options_batches(
cli_options: &CliOptions,
lint_flags: &LintFlags,
) -> Result<Vec<PathsWithOptions>, AnyError> {
let members_lint_options =
cli_options.resolve_lint_options_for_members(lint_flags)?;
let mut paths_with_options_batches =
Vec::with_capacity(members_lint_options.len());
for (dir, lint_options) in members_lint_options {
let files = collect_lint_files(cli_options, lint_options.files.clone());
if !files.is_empty() {
paths_with_options_batches.push(PathsWithOptions {
dir,
paths: files,
options: lint_options,
});
}
}
if paths_with_options_batches.is_empty() && !lint_flags.permit_no_files {
return Err(anyhow!("No target files found."));
}
Ok(paths_with_options_batches)
}
type WorkspaceModuleGraphFuture =
SharedLocal<LocalBoxFuture<'static, Result<Rc<ModuleGraph>, Rc<AnyError>>>>;
struct WorkspaceLinter {
caches: Arc<Caches>,
lint_rule_provider: LintRuleProvider,
module_graph_creator: Arc<ModuleGraphCreator>,
compiler_options_resolver: Arc<CompilerOptionsResolver>,
workspace_dir: Arc<WorkspaceDirectory>,
reporter_lock: Arc<Mutex<Box<dyn LintReporter + Send>>>,
workspace_module_graph: Option<WorkspaceModuleGraphFuture>,
has_error: Arc<AtomicFlag>,
file_count: usize,
}
impl WorkspaceLinter {
pub fn new(
caches: Arc<Caches>,
lint_rule_provider: LintRuleProvider,
module_graph_creator: Arc<ModuleGraphCreator>,
compiler_options_resolver: Arc<CompilerOptionsResolver>,
workspace_dir: Arc<WorkspaceDirectory>,
workspace_options: &WorkspaceLintOptions,
) -> Self {
let reporter_lock =
Arc::new(Mutex::new(create_reporter(workspace_options.reporter_kind)));
Self {
caches,
lint_rule_provider,
module_graph_creator,
compiler_options_resolver,
workspace_dir,
reporter_lock,
workspace_module_graph: None,
has_error: Default::default(),
file_count: 0,
}
}
pub async fn lint_files(
&mut self,
cli_options: &Arc<CliOptions>,
lint_options: LintOptions,
member_dir: WorkspaceDirectoryRc,
paths: Vec<PathBuf>,
) -> Result<(), AnyError> {
self.file_count += paths.len();
let exclude = lint_options.rules.exclude.clone();
let plugin_specifiers = lint_options.plugins.clone();
let lint_rules = self
.lint_rule_provider
.resolve_lint_rules(lint_options.rules, Some(&member_dir));
let mut maybe_incremental_cache = None;
// TODO(bartlomieju): for now we don't support incremental caching if plugins are being used.
// https://github.com/denoland/deno/issues/28025
if lint_rules.supports_incremental_cache() && plugin_specifiers.is_empty() {
let mut hasher = FastInsecureHasher::new_deno_versioned();
hasher.write_hashable(lint_rules.incremental_cache_state());
if !plugin_specifiers.is_empty() {
hasher.write_hashable(&plugin_specifiers);
}
let state_hash = hasher.finish();
maybe_incremental_cache = Some(Arc::new(IncrementalCache::new(
self.caches.lint_incremental_cache_db(),
CacheDBHash::new(state_hash),
&paths,
)));
}
#[allow(clippy::print_stdout)]
#[allow(clippy::print_stderr)]
fn logger_printer(msg: &str, is_err: bool) {
if is_err {
eprint!("{}", msg);
} else {
print!("{}", msg);
}
}
let mut plugin_runner = None;
if !plugin_specifiers.is_empty() {
let logger = plugins::PluginLogger::new(logger_printer);
let runner = plugins::create_runner_and_load_plugins(
plugin_specifiers,
logger,
exclude,
)
.await?;
plugin_runner = Some(Arc::new(runner));
} else if lint_rules.rules.is_empty() {
bail!("No rules have been configured")
}
let linter = Arc::new(CliLinter::new(CliLinterOptions {
configured_rules: lint_rules,
fix: lint_options.fix,
deno_lint_config: resolve_lint_config(
&self.compiler_options_resolver,
member_dir.dir_url(),
)?,
maybe_plugin_runner: plugin_runner,
}));
let has_error = self.has_error.clone();
let reporter_lock = self.reporter_lock.clone();
let mut futures = Vec::with_capacity(2);
if linter.has_package_rules()
&& let Some(fut) = self.run_package_rules(&linter, &member_dir, &paths)
{
futures.push(fut);
}
let maybe_incremental_cache_ = maybe_incremental_cache.clone();
let linter = linter.clone();
let cli_options = cli_options.clone();
let fut = async move {
let operation = move |file_path: PathBuf| {
let file_text = deno_ast::strip_bom(fs::read_to_string(&file_path)?);
// don't bother rechecking this file if it didn't have any diagnostics before
if let Some(incremental_cache) = &maybe_incremental_cache_
&& incremental_cache.is_file_same(&file_path, &file_text)
{
return Ok(());
}
let r = linter.lint_file(
&file_path,
file_text,
cli_options.ext_flag().as_deref(),
);
if let Ok((file_source, file_diagnostics)) = &r
&& let Some(incremental_cache) = &maybe_incremental_cache_
&& file_diagnostics.is_empty()
{
// update the incremental cache if there were no diagnostics
incremental_cache.update_file(
&file_path,
// ensure the returned text is used here as it may have been modified via --fix
file_source.text(),
)
}
let success = handle_lint_result(
&file_path.to_string_lossy(),
r,
reporter_lock.clone(),
);
if !success {
has_error.raise();
}
Ok(())
};
run_parallelized(paths, operation).await
}
.boxed_local();
futures.push(fut);
if lint_options.fix {
// run sequentially when using `--fix` to lower the chances of weird
// bugs where a file level fix affects a package level diagnostic though
// it probably will happen anyway
for future in futures {
future.await?;
}
} else {
deno_core::futures::future::try_join_all(futures).await?;
}
if let Some(incremental_cache) = &maybe_incremental_cache {
incremental_cache.wait_completion().await;
}
Ok(())
}
fn run_package_rules(
&mut self,
linter: &Arc<CliLinter>,
member_dir: &WorkspaceDirectoryRc,
paths: &[PathBuf],
) -> Option<LocalBoxFuture<'_, Result<(), AnyError>>> {
if self.workspace_module_graph.is_none() {
let module_graph_creator = self.module_graph_creator.clone();
let packages = self.workspace_dir.jsr_packages_for_publish();
self.workspace_module_graph = Some(
async move {
module_graph_creator
.create_publish_graph(CreatePublishGraphOptions {
packages: &packages,
build_fast_check_graph: true,
validate_graph: false,
})
.await
.map(Rc::new)
.map_err(Rc::new)
}
.boxed_local()
.shared_local(),
);
}
let workspace_module_graph_future =
self.workspace_module_graph.as_ref().unwrap().clone();
let maybe_publish_config = member_dir.maybe_package_config();
let publish_config = maybe_publish_config?;
let has_error = self.has_error.clone();
let reporter_lock = self.reporter_lock.clone();
let linter = linter.clone();
let path_urls = paths
.iter()
.filter_map(|p| ModuleSpecifier::from_file_path(p).ok())
.collect::<HashSet<_>>();
let fut = async move {
let graph = workspace_module_graph_future
.await
.map_err(|err| anyhow!("{:#}", err))?;
let export_urls =
publish_config.config_file.resolve_export_value_urls()?;
if !export_urls.iter().any(|url| path_urls.contains(url)) {
return Ok(()); // entrypoint is not specified, so skip
}
let diagnostics = linter.lint_package(&graph, &export_urls);
if !diagnostics.is_empty() {
has_error.raise();
let mut reporter = reporter_lock.lock();
for diagnostic in &diagnostics {
reporter.visit_diagnostic(diagnostic);
}
}
Ok(())
}
.boxed_local();
Some(fut)
}
pub fn finish(self) -> bool {
debug!("Found {} files", self.file_count);
self.reporter_lock.lock().close(self.file_count);
!self.has_error.is_raised() // success
}
}
fn collect_lint_files(
cli_options: &CliOptions,
files: FilePatterns,
) -> Vec<PathBuf> {
FileCollector::new(|e| {
is_script_ext(e.path)
|| (e.path.extension().is_none() && cli_options.ext_flag().is_some())
})
.ignore_git_folder()
.ignore_node_modules()
.use_gitignore()
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
.collect_file_patterns(&CliSys::default(), &files)
}
#[allow(clippy::print_stdout)]
pub fn print_rules_list(json: bool, maybe_rules_tags: Option<Vec<String>>) {
let rule_provider = LintRuleProvider::new(None);
let mut all_rules = rule_provider.all_rules();
let configured_rules = rule_provider.resolve_lint_rules(
LintRulesConfig {
tags: maybe_rules_tags.clone(),
include: None,
exclude: None,
},
None,
);
all_rules.sort_by_cached_key(|rule| rule.code().to_string());
if json {
let json_output = serde_json::json!({
"version": JSON_SCHEMA_VERSION,
"rules": all_rules
.iter()
.map(|rule| {
// TODO(bartlomieju): print if rule enabled
serde_json::json!({
"code": rule.code(),
"tags": rule.tags().iter().map(|t| t.display()).collect::<Vec<_>>(),
"docs": rule.help_docs_url(),
})
})
.collect::<Vec<serde_json::Value>>(),
});
display::write_json_to_stdout(&json_output).unwrap();
} else {
// The rules should still be printed even if `--quiet` option is enabled,
// so use `println!` here instead of `info!`.
println!("Available rules:");
for rule in all_rules.iter() {
// TODO(bartlomieju): this is O(n) search, fix before landing
let enabled = if configured_rules.rules.contains(rule) {
"✓"
} else {
""
};
println!("- {} {}", rule.code(), colors::green(enabled),);
println!(
"{}",
colors::gray(format!(" help: {}", rule.help_docs_url()))
);
if rule.tags().is_empty() {
println!(" {}", colors::gray("tags:"));
} else {
println!(
" {}",
colors::gray(format!(
"tags: {}",
rule
.tags()
.iter()
.map(|t| t.display())
.collect::<Vec<_>>()
.join(", ")
))
);
}
println!();
}
}
}
/// Lint stdin and write result to stdout.
/// Treats input as TypeScript.
/// Compatible with `--json` flag.
fn lint_stdin(
cli_options: &Arc<CliOptions>,
lint_rule_provider: LintRuleProvider,
workspace_lint_options: WorkspaceLintOptions,
lint_flags: LintFlags,
compiler_options_resolver: &CompilerOptionsResolver,
) -> Result<bool, AnyError> {
let start_dir = &cli_options.start_dir;
let reporter_lock = Arc::new(Mutex::new(create_reporter(
workspace_lint_options.reporter_kind,
)));
let lint_config = start_dir
.to_lint_config(FilePatterns::new_with_base(start_dir.dir_path()))?;
let deno_lint_config =
resolve_lint_config(compiler_options_resolver, start_dir.dir_url())?;
let lint_options = LintOptions::resolve(lint_config, &lint_flags)?;
let configured_rules = lint_rule_provider
.resolve_lint_rules_err_empty(lint_options.rules, Some(start_dir))?;
let mut file_path = cli_options.initial_cwd().join(STDIN_FILE_NAME);
if let Some(ext) = cli_options.ext_flag() {
file_path.set_extension(ext);
}
let mut source_code = String::new();
if stdin().read_to_string(&mut source_code).is_err() {
return Err(anyhow!("Failed to read from stdin"));
}
let linter = CliLinter::new(CliLinterOptions {
fix: false,
configured_rules,
deno_lint_config,
maybe_plugin_runner: None,
});
let r = linter.lint_file(&file_path, deno_ast::strip_bom(source_code), None);
let success =
handle_lint_result(&file_path.to_string_lossy(), r, reporter_lock.clone());
reporter_lock.lock().close(1);
Ok(success)
}
fn handle_lint_result(
file_path: &str,
result: Result<(ParsedSource, Vec<LintDiagnostic>), AnyError>,
reporter_lock: Arc<Mutex<Box<dyn LintReporter + Send>>>,
) -> bool {
let mut reporter = reporter_lock.lock();
match result {
Ok((source, mut file_diagnostics)) => {
if !source.diagnostics().is_empty() {
for parse_diagnostic in source.diagnostics() {
log::warn!("{}: {}", colors::yellow("warn"), parse_diagnostic);
}
}
file_diagnostics.sort_by(|a, b| match a.specifier.cmp(&b.specifier) {
std::cmp::Ordering::Equal => {
let a_start = a.range.as_ref().map(|r| r.range.start);
let b_start = b.range.as_ref().map(|r| r.range.start);
match a_start.cmp(&b_start) {
std::cmp::Ordering::Equal => a.details.code.cmp(&b.details.code),
other => other,
}
}
file_order => file_order,
});
for d in &file_diagnostics {
reporter.visit_diagnostic(d);
}
file_diagnostics.is_empty()
}
Err(err) => {
reporter.visit_error(file_path, &err);
false
}
}
}
fn resolve_lint_config(
compiler_options_resolver: &CompilerOptionsResolver,
specifier: &ModuleSpecifier,
) -> Result<deno_lint::linter::LintConfig, AnyError> {
let transpile_options = &compiler_options_resolver
.for_specifier(specifier)
.transpile_options()?
.transpile;
let jsx_classic_options =
transpile_options.jsx.as_ref().and_then(|jsx| match jsx {
deno_ast::JsxRuntime::Classic(classic) => Some(classic),
_ => None,
});
Ok(deno_lint::linter::LintConfig {
default_jsx_factory: jsx_classic_options.map(|o| o.factory.clone()),
default_jsx_fragment_factory: jsx_classic_options
.map(|o| o.fragment_factory.clone()),
})
}
#[cfg(test)]
mod tests {
use pretty_assertions::assert_eq;
use serde::Deserialize;
use serde::Serialize;
use test_util as util;
use super::*;
#[derive(Serialize, Deserialize)]
struct RulesPattern {
r#type: String,
pattern: String,
}
#[derive(Serialize, Deserialize)]
struct RulesEnum {
r#enum: Vec<String>,
}
#[derive(Serialize, Deserialize)]
struct RulesSchema {
#[serde(rename = "$schema")]
schema: String,
#[serde(rename = "oneOf")]
one_of: (RulesPattern, RulesEnum),
}
fn get_all_rules() -> Vec<String> {
let rule_provider = LintRuleProvider::new(None);
let configured_rules =
rule_provider.resolve_lint_rules(Default::default(), None);
let mut all_rules = configured_rules
.all_rule_codes
.into_iter()
.map(|s| s.to_string())
.collect::<Vec<String>>();
all_rules.sort();
all_rules
}
// TODO(bartlomieju): do the same for tags, once https://github.com/denoland/deno/pull/27162 lands
#[test]
fn all_lint_rules_are_listed_in_schema_file() {
let all_rules = get_all_rules();
let rules_schema_path =
util::root_path().join("cli/schemas/lint-rules.v1.json");
let rules_schema_file =
std::fs::read_to_string(&rules_schema_path).unwrap();
let schema: RulesSchema = serde_json::from_str(&rules_schema_file).unwrap();
const UPDATE_ENV_VAR_NAME: &str = "UPDATE_EXPECTED";
let rules_list = schema.one_of.1.r#enum;
if std::env::var(UPDATE_ENV_VAR_NAME).ok().is_none() {
assert_eq!(
rules_list, all_rules,
"Lint rules schema file not up to date. Run again with {}=1 to update the expected output",
UPDATE_ENV_VAR_NAME
);
return;
}
let new_schema = RulesSchema {
schema: schema.schema,
one_of: (schema.one_of.0, RulesEnum { r#enum: all_rules }),
};
std::fs::write(
&rules_schema_path,
format!("{}\n", serde_json::to_string_pretty(&new_schema).unwrap(),),
)
.unwrap();
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/lint/plugins.rs | cli/tools/lint/plugins.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
use ::tokio_util::sync::CancellationToken;
use deno_ast::ModuleSpecifier;
use deno_ast::ParsedSource;
use deno_ast::SourceTextInfo;
use deno_core::PollEventLoopOptions;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::error::JsError;
use deno_core::futures::FutureExt;
use deno_core::parking_lot::Mutex;
use deno_core::v8;
use deno_lint::diagnostic::LintDiagnostic;
use deno_path_util::resolve_url_or_path;
use deno_path_util::url_from_file_path;
use deno_runtime::WorkerExecutionMode;
use deno_runtime::tokio_util;
use deno_runtime::worker::MainWorker;
use tokio::sync::mpsc;
use tokio::sync::oneshot;
use crate::args::DenoSubcommand;
use crate::args::Flags;
use crate::args::LintFlags;
use crate::args::PermissionFlags;
use crate::factory::CliFactory;
use crate::ops::lint::LintPluginContainer;
use crate::tools::lint::serialize_ast_to_buffer;
use crate::util::text_encoding::Utf16Map;
#[derive(Debug)]
pub enum PluginHostRequest {
LoadPlugins {
specifiers: Vec<ModuleSpecifier>,
exclude_rules: Option<Vec<String>>,
tx: oneshot::Sender<PluginHostResponse>,
},
Run {
serialized_ast: Vec<u8>,
file_path: PathBuf,
source_text_info: SourceTextInfo,
utf16_map: Utf16Map,
maybe_token: Option<CancellationToken>,
tx: oneshot::Sender<PluginHostResponse>,
},
}
pub enum PluginHostResponse {
// TODO: write to structs
LoadPlugin(Result<Vec<PluginInfo>, AnyError>),
Run(Result<Vec<LintDiagnostic>, AnyError>),
}
impl std::fmt::Debug for PluginHostResponse {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::LoadPlugin(_arg0) => f.debug_tuple("LoadPlugin").finish(),
Self::Run(_arg0) => f.debug_tuple("Run").finish(),
}
}
}
#[derive(Clone, Debug)]
pub struct PluginLogger {
print: fn(&str, bool),
}
impl PluginLogger {
pub fn new(print: fn(&str, bool)) -> Self {
Self { print }
}
pub fn log(&self, msg: &str) {
(self.print)(msg, false);
}
pub fn error(&self, msg: &str) {
(self.print)(msg, true);
}
}
macro_rules! v8_static_strings {
($($ident:ident = $str:literal),* $(,)?) => {
$(
pub static $ident: deno_core::FastStaticString = deno_core::ascii_str!($str);
)*
};
}
v8_static_strings! {
DEFAULT = "default",
INSTALL_PLUGINS = "installPlugins",
RUN_PLUGINS_FOR_FILE = "runPluginsForFile",
}
#[derive(Debug)]
pub struct PluginHostProxy {
tx: mpsc::Sender<PluginHostRequest>,
pub(crate) plugin_info: Arc<Mutex<Vec<PluginInfo>>>,
#[allow(unused)]
join_handle: std::thread::JoinHandle<Result<(), AnyError>>,
}
impl PluginHostProxy {
pub fn get_plugin_rules(&self) -> Vec<String> {
let infos = self.plugin_info.lock();
let mut all_names = vec![];
for info in infos.iter() {
all_names.extend_from_slice(&info.get_rules());
}
all_names
}
}
pub struct PluginHost {
worker: MainWorker,
install_plugins_fn: Rc<v8::Global<v8::Function>>,
run_plugins_for_file_fn: Rc<v8::Global<v8::Function>>,
rx: mpsc::Receiver<PluginHostRequest>,
}
async fn create_plugin_runner_inner(
logger: PluginLogger,
rx_req: mpsc::Receiver<PluginHostRequest>,
) -> Result<PluginHost, AnyError> {
let flags = Flags {
subcommand: DenoSubcommand::Lint(LintFlags::default()),
// Lint plugins have unconditional access to env vars and file reading.
permissions: PermissionFlags {
allow_env: Some(vec![]),
allow_read: Some(vec![]),
no_prompt: true,
..Default::default()
},
..Default::default()
};
let flags = Arc::new(flags);
let factory = CliFactory::from_flags(flags.clone());
let cli_options = factory.cli_options()?;
let main_module =
resolve_url_or_path("./$deno$lint.mts", cli_options.initial_cwd()).unwrap();
let permissions = factory.root_permissions_container()?.clone();
// let npm_resolver = factory.npm_resolver().await?.clone();
// let resolver = factory.resolver().await?.clone();
let worker_factory = factory.create_cli_main_worker_factory().await?;
let worker = worker_factory
.create_custom_worker(
// TODO(bartlomieju): add "lint" execution mode
WorkerExecutionMode::Run,
main_module.clone(),
// `deno lint` doesn't support preloading modules
vec![],
// `deno lint` doesn't support require modules
vec![],
permissions,
vec![crate::ops::lint::deno_lint_ext::init(logger.clone())],
Default::default(),
None,
)
.await?;
let mut worker = worker.into_main_worker();
let runtime = &mut worker.js_runtime;
let obj = runtime.execute_script("lint.js", "Deno[Deno.internal]")?;
log::debug!("Lint plugins loaded, capturing default exports");
let (install_plugins_fn, run_plugins_for_file_fn) = {
deno_core::scope!(scope, runtime);
let module_exports: v8::Local<v8::Object> =
v8::Local::new(scope, obj).try_into().unwrap();
let install_plugins_fn_name = INSTALL_PLUGINS.v8_string(scope).unwrap();
let install_plugins_fn_val = module_exports
.get(scope, install_plugins_fn_name.into())
.unwrap();
let install_plugins_fn: v8::Local<v8::Function> =
install_plugins_fn_val.try_into().unwrap();
let run_plugins_for_file_fn_name =
RUN_PLUGINS_FOR_FILE.v8_string(scope).unwrap();
let run_plugins_for_file_fn_val = module_exports
.get(scope, run_plugins_for_file_fn_name.into())
.unwrap();
let run_plugins_for_file_fn: v8::Local<v8::Function> =
run_plugins_for_file_fn_val.try_into().unwrap();
(
Rc::new(v8::Global::new(scope, install_plugins_fn)),
Rc::new(v8::Global::new(scope, run_plugins_for_file_fn)),
)
};
Ok(PluginHost {
worker,
install_plugins_fn,
run_plugins_for_file_fn,
rx: rx_req,
})
}
#[derive(Debug, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PluginInfo {
pub name: String,
pub rule_names: Vec<String>,
}
impl PluginInfo {
pub fn get_rules(&self) -> Vec<String> {
let mut rules = Vec::with_capacity(self.rule_names.len());
for rule_name in &self.rule_names {
rules.push(format!("{}/{}", self.name, rule_name));
}
rules
}
}
impl PluginHost {
fn create(logger: PluginLogger) -> Result<PluginHostProxy, AnyError> {
let (tx_req, rx_req) = mpsc::channel(10);
let logger_ = logger.clone();
let join_handle = std::thread::spawn(move || {
let logger = logger_;
log::debug!("Lint PluginHost thread spawned");
let start = std::time::Instant::now();
let fut = async move {
let runner = create_plugin_runner_inner(logger.clone(), rx_req).await?;
log::debug!("Lint PlugibnHost running loop");
runner.run_loop().await?;
log::debug!(
"Lint PluginHost thread finished, took {:?}",
std::time::Instant::now() - start
);
Ok(())
}
.boxed_local();
tokio_util::create_and_run_current_thread(fut)
});
let proxy = PluginHostProxy {
tx: tx_req,
plugin_info: Arc::new(Mutex::new(vec![])),
join_handle,
};
Ok(proxy)
}
async fn run_loop(mut self) -> Result<(), AnyError> {
log::debug!("Lint PluginHost is waiting for message");
while let Some(req) = self.rx.recv().await {
log::debug!("Lint PluginHost has received a message");
match req {
PluginHostRequest::LoadPlugins {
specifiers,
exclude_rules,
tx,
} => {
let r = self.load_plugins(specifiers, exclude_rules).await;
let _ = tx.send(PluginHostResponse::LoadPlugin(r));
}
PluginHostRequest::Run {
serialized_ast,
file_path,
source_text_info,
utf16_map,
maybe_token,
tx,
} => {
let start = std::time::Instant::now();
let r = match self.run_plugins(
&file_path,
serialized_ast,
source_text_info,
utf16_map,
maybe_token,
) {
Ok(()) => Ok(self.take_diagnostics()),
Err(err) => Err(err),
};
log::debug!(
"Running plugins lint rules took {:?}",
std::time::Instant::now() - start
);
let _ = tx.send(PluginHostResponse::Run(r));
}
}
}
log::debug!("Lint PluginHost run loop finished");
Ok(())
}
fn take_diagnostics(&mut self) -> Vec<LintDiagnostic> {
let op_state = self.worker.js_runtime.op_state();
let mut state = op_state.borrow_mut();
let container = state.borrow_mut::<LintPluginContainer>();
std::mem::take(&mut container.diagnostics)
}
fn run_plugins(
&mut self,
file_path: &Path,
serialized_ast: Vec<u8>,
source_text_info: SourceTextInfo,
utf16_map: Utf16Map,
maybe_token: Option<CancellationToken>,
) -> Result<(), AnyError> {
{
let state = self.worker.js_runtime.op_state();
let mut state = state.borrow_mut();
let container = state.borrow_mut::<LintPluginContainer>();
container.set_info_for_file(
url_from_file_path(file_path)?,
source_text_info,
utf16_map,
maybe_token,
);
}
deno_core::scope!(scope, &mut self.worker.js_runtime);
let file_name_v8: v8::Local<v8::Value> =
v8::String::new(scope, &file_path.display().to_string())
.unwrap()
.into();
let store = v8::ArrayBuffer::new_backing_store_from_vec(serialized_ast);
let ast_buf =
v8::ArrayBuffer::with_backing_store(scope, &store.make_shared());
let ast_bin_v8: v8::Local<v8::Value> =
v8::Uint8Array::new(scope, ast_buf, 0, ast_buf.byte_length())
.unwrap()
.into();
let run_plugins_for_file =
v8::Local::new(scope, &*self.run_plugins_for_file_fn);
let undefined = v8::undefined(scope);
let _run_plugins_result = {
v8::tc_scope!(tc_scope, scope);
let _run_plugins_result = run_plugins_for_file.call(
tc_scope,
undefined.into(),
&[file_name_v8, ast_bin_v8],
);
if let Some(exception) = tc_scope.exception() {
let error = JsError::from_v8_exception(tc_scope, exception);
return Err(error.into());
}
_run_plugins_result
};
Ok(())
}
async fn load_plugins(
&mut self,
plugin_specifiers: Vec<ModuleSpecifier>,
exclude: Option<Vec<String>>,
) -> Result<Vec<PluginInfo>, AnyError> {
let mut load_futures = Vec::with_capacity(plugin_specifiers.len());
for specifier in plugin_specifiers {
let mod_id = self
.worker
.js_runtime
.load_side_es_module(&specifier)
.await?;
let mod_future =
self.worker.js_runtime.mod_evaluate(mod_id).boxed_local();
load_futures.push((mod_future, mod_id));
}
self
.worker
.js_runtime
.run_event_loop(PollEventLoopOptions::default())
.await?;
let mut plugin_handles = Vec::with_capacity(load_futures.len());
for (fut, mod_id) in load_futures {
fut.await?;
let module = self.worker.js_runtime.get_module_namespace(mod_id).unwrap();
deno_core::scope!(scope, &mut self.worker.js_runtime);
let module_local = v8::Local::new(scope, module);
let default_export_str = DEFAULT.v8_string(scope).unwrap();
let default_export =
module_local.get(scope, default_export_str.into()).unwrap();
let default_export_global = v8::Global::new(scope, default_export);
plugin_handles.push(default_export_global);
}
deno_core::scope!(scope, &mut self.worker.js_runtime);
let install_plugins_local =
v8::Local::new(scope, &*self.install_plugins_fn.clone());
let exclude_v8: v8::Local<v8::Value> =
exclude.map_or(v8::null(scope).into(), |v| {
let elems = v
.iter()
.map(|item| v8::String::new(scope, item).unwrap().into())
.collect::<Vec<_>>();
v8::Array::new_with_elements(scope, elems.as_slice()).into()
});
let undefined = v8::undefined(scope);
let local_handles = {
let arr = v8::Array::new(scope, plugin_handles.len().try_into().unwrap());
for (idx, plugin_handle) in plugin_handles.into_iter().enumerate() {
let handle = v8::Local::new(scope, plugin_handle);
arr
.set_index(scope, idx.try_into().unwrap(), handle)
.unwrap();
}
arr
};
let args = &[local_handles.into(), exclude_v8];
log::debug!("Installing lint plugins...");
let plugins_info_result = {
v8::tc_scope!(tc_scope, scope);
let plugins_info_result =
install_plugins_local.call(tc_scope, undefined.into(), args);
if let Some(exception) = tc_scope.exception() {
let error = JsError::from_v8_exception(tc_scope, exception);
return Err(error.into());
}
plugins_info_result
};
let plugins_info = plugins_info_result.unwrap();
let infos: Vec<PluginInfo> =
deno_core::serde_v8::from_v8(scope, plugins_info)?;
log::debug!("Plugins installed: {}", infos.len());
Ok(infos)
}
}
impl PluginHostProxy {
pub async fn load_plugins(
&self,
specifiers: Vec<ModuleSpecifier>,
exclude_rules: Option<Vec<String>>,
) -> Result<(), AnyError> {
let (tx, rx) = oneshot::channel();
self
.tx
.send(PluginHostRequest::LoadPlugins {
specifiers,
exclude_rules,
tx,
})
.await?;
if let Ok(val) = rx.await {
let PluginHostResponse::LoadPlugin(result) = val else {
unreachable!()
};
let infos = result?;
*self.plugin_info.lock() = infos;
return Ok(());
}
bail!("Plugin host has closed")
}
pub async fn run_rules(
&self,
specifier: &Path,
serialized_ast: Vec<u8>,
source_text_info: SourceTextInfo,
utf16_map: Utf16Map,
maybe_token: Option<CancellationToken>,
) -> Result<Vec<LintDiagnostic>, AnyError> {
let (tx, rx) = oneshot::channel();
self
.tx
.send(PluginHostRequest::Run {
serialized_ast,
file_path: specifier.to_path_buf(),
source_text_info,
utf16_map,
maybe_token,
tx,
})
.await?;
if let Ok(PluginHostResponse::Run(diagnostics_result)) = rx.await {
return diagnostics_result;
}
bail!("Plugin host has closed")
}
pub fn serialize_ast(
&self,
parsed_source: &ParsedSource,
utf16_map: &Utf16Map,
) -> Result<Vec<u8>, AnyError> {
let start = std::time::Instant::now();
let r = serialize_ast_to_buffer(parsed_source, utf16_map);
log::debug!(
"Serializing an AST took {:?}",
std::time::Instant::now() - start
);
Ok(r)
}
}
pub async fn create_runner_and_load_plugins(
plugin_specifiers: Vec<ModuleSpecifier>,
logger: PluginLogger,
exclude: Option<Vec<String>>,
) -> Result<PluginHostProxy, AnyError> {
let host_proxy = PluginHost::create(logger)?;
host_proxy.load_plugins(plugin_specifiers, exclude).await?;
Ok(host_proxy)
}
pub async fn run_rules_for_ast(
host_proxy: &PluginHostProxy,
specifier: &Path,
serialized_ast: Vec<u8>,
source_text_info: SourceTextInfo,
utf16_map: Utf16Map,
maybe_token: Option<CancellationToken>,
) -> Result<Vec<LintDiagnostic>, AnyError> {
let d = host_proxy
.run_rules(
specifier,
serialized_ast,
source_text_info,
utf16_map,
maybe_token,
)
.await?;
Ok(d)
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/lint/reporters.rs | cli/tools/lint/reporters.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use deno_ast::diagnostics::Diagnostic;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_lib::util::result::js_error_downcast_ref;
use deno_lint::diagnostic::LintDiagnostic;
use deno_runtime::colors;
use deno_runtime::fmt_errors::format_js_error;
use log::info;
use serde::Serialize;
use crate::args::LintReporterKind;
const JSON_SCHEMA_VERSION: u8 = 1;
pub fn create_reporter(kind: LintReporterKind) -> Box<dyn LintReporter + Send> {
match kind {
LintReporterKind::Pretty => Box::new(PrettyLintReporter::new()),
LintReporterKind::Json => Box::new(JsonLintReporter::new()),
LintReporterKind::Compact => Box::new(CompactLintReporter::new()),
}
}
pub trait LintReporter {
fn visit_diagnostic(&mut self, d: &LintDiagnostic);
fn visit_error(&mut self, file_path: &str, err: &AnyError);
fn close(&mut self, check_count: usize);
}
struct PrettyLintReporter {
lint_count: u32,
fixable_diagnostics: u32,
}
impl PrettyLintReporter {
fn new() -> PrettyLintReporter {
PrettyLintReporter {
lint_count: 0,
fixable_diagnostics: 0,
}
}
}
impl LintReporter for PrettyLintReporter {
fn visit_diagnostic(&mut self, d: &LintDiagnostic) {
self.lint_count += 1;
if !d.details.fixes.is_empty() {
self.fixable_diagnostics += 1;
}
log::error!("{}\n", d.display());
}
fn visit_error(&mut self, file_path: &str, err: &AnyError) {
log::error!("Error linting: {file_path}");
let text = match js_error_downcast_ref(err) {
Some(js_error) => format_js_error(js_error, None),
None => format!("{err:#}"),
};
for line in text.split('\n') {
if line.is_empty() {
log::error!("");
} else {
log::error!(" {}", line);
}
}
}
fn close(&mut self, check_count: usize) {
let fixable_suffix = if self.fixable_diagnostics > 0 {
colors::gray(format!(" ({} fixable via --fix)", self.fixable_diagnostics))
.to_string()
} else {
"".to_string()
};
match self.lint_count {
1 => info!("Found 1 problem{}", fixable_suffix),
n if n > 1 => {
info!("Found {} problems{}", self.lint_count, fixable_suffix)
}
_ => (),
}
match check_count {
1 => info!("Checked 1 file"),
n => info!("Checked {} files", n),
}
}
}
struct CompactLintReporter {
lint_count: u32,
}
impl CompactLintReporter {
fn new() -> CompactLintReporter {
CompactLintReporter { lint_count: 0 }
}
}
impl LintReporter for CompactLintReporter {
fn visit_diagnostic(&mut self, d: &LintDiagnostic) {
self.lint_count += 1;
match &d.range {
Some(range) => {
let text_info = &range.text_info;
let range = &range.range;
let line_and_column = text_info.line_and_column_display(range.start);
log::error!(
"{}: line {}, col {} - {} ({})",
d.specifier,
line_and_column.line_number,
line_and_column.column_number,
d.message(),
d.code(),
)
}
None => {
log::error!("{}: {} ({})", d.specifier, d.message(), d.code())
}
}
}
fn visit_error(&mut self, file_path: &str, err: &AnyError) {
log::error!("Error linting: {file_path}");
log::error!(" {err}");
}
fn close(&mut self, check_count: usize) {
match self.lint_count {
1 => info!("Found 1 problem"),
n if n > 1 => info!("Found {} problems", self.lint_count),
_ => (),
}
match check_count {
1 => info!("Checked 1 file"),
n => info!("Checked {} files", n),
}
}
}
// WARNING: Ensure doesn't change because it's used in the JSON output
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct JsonDiagnosticLintPosition {
/// The 1-indexed line number.
pub line: usize,
/// The 0-indexed column index.
pub col: usize,
pub byte_pos: usize,
}
impl JsonDiagnosticLintPosition {
pub fn new(byte_index: usize, loc: deno_ast::LineAndColumnIndex) -> Self {
JsonDiagnosticLintPosition {
line: loc.line_index + 1,
col: loc.column_index,
byte_pos: byte_index,
}
}
}
// WARNING: Ensure doesn't change because it's used in the JSON output
#[derive(Debug, Clone, PartialEq, Eq, Serialize)]
struct JsonLintDiagnosticRange {
pub start: JsonDiagnosticLintPosition,
pub end: JsonDiagnosticLintPosition,
}
// WARNING: Ensure doesn't change because it's used in the JSON output
#[derive(Clone, Serialize)]
struct JsonLintDiagnostic {
pub filename: String,
pub range: Option<JsonLintDiagnosticRange>,
pub message: String,
pub code: String,
pub hint: Option<String>,
}
#[derive(Serialize)]
struct LintError {
file_path: String,
message: String,
}
#[derive(Serialize)]
struct JsonLintReporter {
version: u8,
diagnostics: Vec<JsonLintDiagnostic>,
errors: Vec<LintError>,
checked_files: Vec<String>,
}
impl JsonLintReporter {
fn new() -> JsonLintReporter {
JsonLintReporter {
version: JSON_SCHEMA_VERSION,
diagnostics: Vec::new(),
errors: Vec::new(),
checked_files: Vec::new(),
}
}
}
impl LintReporter for JsonLintReporter {
fn visit_diagnostic(&mut self, d: &LintDiagnostic) {
self.diagnostics.push(JsonLintDiagnostic {
filename: d.specifier.to_string(),
range: d.range.as_ref().map(|range| {
let text_info = &range.text_info;
let range = range.range;
JsonLintDiagnosticRange {
start: JsonDiagnosticLintPosition::new(
range.start.as_byte_index(text_info.range().start),
text_info.line_and_column_index(range.start),
),
end: JsonDiagnosticLintPosition::new(
range.end.as_byte_index(text_info.range().start),
text_info.line_and_column_index(range.end),
),
}
}),
message: d.message().to_string(),
code: d.code().to_string(),
hint: d.hint().map(|h| h.to_string()),
});
let file_path = d
.specifier
.to_file_path()
.unwrap()
.to_string_lossy()
.into_owned();
if !self.checked_files.contains(&file_path) {
self.checked_files.push(file_path);
}
}
fn visit_error(&mut self, file_path: &str, err: &AnyError) {
self.errors.push(LintError {
file_path: file_path.to_string(),
message: err.to_string(),
});
if !self.checked_files.contains(&file_path.to_string()) {
self.checked_files.push(file_path.to_string());
}
}
fn close(&mut self, _check_count: usize) {
sort_diagnostics(&mut self.diagnostics);
self.checked_files.sort();
let json = serde_json::to_string_pretty(&self);
#[allow(clippy::print_stdout)]
{
println!("{}", json.unwrap());
}
}
}
fn sort_diagnostics(diagnostics: &mut [JsonLintDiagnostic]) {
// Sort so that we guarantee a deterministic output which is useful for tests
diagnostics.sort_by(|a, b| {
use std::cmp::Ordering;
let file_order = a.filename.cmp(&b.filename);
match file_order {
Ordering::Equal => match &a.range {
Some(a_range) => match &b.range {
Some(b_range) => {
let line_order = a_range.start.line.cmp(&b_range.start.line);
match line_order {
Ordering::Equal => a_range.start.col.cmp(&b_range.start.col),
_ => line_order,
}
}
None => Ordering::Less,
},
None => match &b.range {
Some(_) => Ordering::Greater,
None => Ordering::Equal,
},
},
_ => file_order,
}
});
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/lint/ast_buffer/ts_estree.rs | cli/tools/lint/ast_buffer/ts_estree.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::fmt;
use std::fmt::Debug;
use std::fmt::Display;
use deno_ast::swc::common::Span;
use deno_ast::view::TruePlusMinus;
use super::buffer::AstBufSerializer;
use super::buffer::CommentKind;
use super::buffer::NodeRef;
use super::buffer::SerializeCtx;
use crate::util::text_encoding::Utf16Map;
#[derive(Debug, Clone, PartialEq)]
pub enum AstNode {
// First node must always be the empty/invalid node
Invalid,
RefArray,
// Typically the
Program,
// Module declarations
ExportAllDeclaration,
ExportDefaultDeclaration,
ExportNamedDeclaration,
ImportDeclaration,
ImportSpecifier,
ImportAttribute,
ImportDefaultSpecifier,
ImportNamespaceSpecifier,
TSExportAssignment,
TSImportEqualss,
TSNamespaceExport,
TSNamespaceExportDeclaration,
TSImportEqualsDeclaration,
TSExternalModuleReference,
TSModuleDeclaration,
TSModuleBlock,
// Decls
ClassDeclaration,
FunctionDeclaration,
TSEnumDeclaration,
TSInterface,
TSInterfaceDeclaration,
TSModule,
TSTypeAliasDeclaration,
Using,
VariableDeclaration,
// Statements
BlockStatement,
BreakStatement,
ContinueStatement,
DebuggerStatement,
DoWhileStatement,
EmptyStatement,
ExpressionStatement,
ForInStatement,
ForOfStatement,
ForStatement,
IfStatement,
LabeledStatement,
ReturnStatement,
SwitchCase,
SwitchStatement,
ThrowStatement,
TryStatement,
WhileStatement,
WithStatement,
// Expressions
ArrayExpression,
ArrowFunctionExpression,
AssignmentExpression,
AwaitExpression,
BinaryExpression,
CallExpression,
ChainExpression,
ClassExpression,
ConditionalExpression,
EmptyExpr,
FunctionExpression,
Identifier,
ImportExpression,
LogicalExpression,
MemberExpression,
MetaProperty,
NewExpression,
ObjectExpression,
PrivateIdentifier,
SequenceExpression,
Super,
TaggedTemplateExpression,
TemplateLiteral,
ThisExpression,
TSAsExpression,
TSNonNullExpression,
TSSatisfiesExpression,
TSTypeAssertion,
UnaryExpression,
UpdateExpression,
YieldExpression,
// Other
Literal,
SpreadElement,
Property,
VariableDeclarator,
CatchClause,
RestElement,
ExportSpecifier,
TemplateElement,
MethodDefinition,
ClassBody,
PropertyDefinition,
Decorator,
StaticBlock,
AccessorProperty,
// Patterns
ArrayPattern,
AssignmentPattern,
ObjectPattern,
// JSX
JSXAttribute,
JSXClosingElement,
JSXClosingFragment,
JSXElement,
JSXEmptyExpression,
JSXExpressionContainer,
JSXFragment,
JSXIdentifier,
JSXMemberExpression,
JSXNamespacedName,
JSXOpeningElement,
JSXOpeningFragment,
JSXSpreadAttribute,
JSXSpreadChild,
JSXText,
TSTypeAnnotation,
TSTypeParameterDeclaration,
TSTypeParameter,
TSTypeParameterInstantiation,
TSEnumMember,
TSInterfaceBody,
TSInterfaceHeritage,
TSTypeReference,
TSThisType,
TSLiteralType,
TSTypeLiteral,
TSInferType,
TSConditionalType,
TSUnionType,
TSIntersectionType,
TSMappedType,
TSTypeQuery,
TSTupleType,
TSNamedTupleMember,
TSFunctionType,
TSCallSignatureDeclaration,
TSPropertySignature,
TSMethodSignature,
TSIndexSignature,
TSIndexedAccessType,
TSTypeOperator,
TSTypePredicate,
TSImportType,
TSRestType,
TSArrayType,
TSClassImplements,
TSAbstractMethodDefinition,
TSAbstractPropertyDefinition,
TSEmptyBodyFunctionExpression,
TSParameterProperty,
TSConstructSignatureDeclaration,
TSQualifiedName,
TSOptionalType,
TSTemplateLiteralType,
TSDeclareFunction,
TSInstantiationExpression,
TSAnyKeyword,
TSBigIntKeyword,
TSBooleanKeyword,
TSIntrinsicKeyword,
TSNeverKeyword,
TSNullKeyword,
TSNumberKeyword,
TSObjectKeyword,
TSStringKeyword,
TSSymbolKeyword,
TSUndefinedKeyword,
TSUnknownKeyword,
TSVoidKeyword,
TSEnumBody, // Last value is used for max value
}
impl Display for AstNode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Debug::fmt(self, f)
}
}
impl From<AstNode> for u8 {
fn from(m: AstNode) -> u8 {
m as u8
}
}
#[derive(Debug, Clone)]
pub enum AstProp {
// Base, these must be in sync with JS in the same order.
Invalid,
Type,
Parent,
Range,
Length, // Not used in AST, but can be used in attr selectors
// Starting from here the order doesn't matter.
// Following are all possible AST node properties.
Abstract,
Accessibility,
Alternate,
Argument,
Arguments,
Asserts,
Async,
Attributes,
Await,
BigInt,
Block,
Body,
Callee,
Cases,
Children,
CheckType,
ClosingElement,
ClosingFragment,
Computed,
Consequent,
Const,
Constraint,
Cooked,
Declaration,
Declarations,
Declare,
Decorators,
Default,
Definite,
Delegate,
Discriminant,
Elements,
ElementType,
ElementTypes,
ExprName,
Expression,
Expressions,
Exported,
ExportKind,
Extends,
ExtendsType,
FalseType,
Finalizer,
Flags,
Generator,
Global,
Handler,
Id,
In,
IndexType,
Init,
Initializer,
Implements,
Imported,
ImportKind,
Key,
Kind,
Label,
Left,
Literal,
Local,
Members,
Meta,
Method,
ModuleReference,
Name,
Namespace,
NameType,
Object,
ObjectType,
OpeningElement,
OpeningFragment,
Operator,
Optional,
Options,
Out,
Override,
Param,
Parameter,
Parameters,
ParameterName,
Params,
Pattern,
Prefix,
Properties,
Property,
Qualifier,
Quasi,
Quasis,
Raw,
Readonly,
Regex,
ReturnType,
Right,
SelfClosing,
Shorthand,
Source,
SourceType,
Specifiers,
Static,
SuperClass,
SuperTypeArguments,
Tag,
Tail,
Test,
TrueType,
TypeAnnotation,
TypeArguments,
TypeName,
TypeParameter,
TypeParameters,
Types,
Update,
Value, // Last value is used for max value
}
impl Display for AstProp {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let s = match self {
AstProp::Invalid => "__invalid__", // unused
AstProp::Parent => "parent",
AstProp::Range => "range",
AstProp::Type => "type",
AstProp::Length => "length",
AstProp::Abstract => "abstract",
AstProp::Accessibility => "accessibility",
AstProp::Alternate => "alternate",
AstProp::Argument => "argument",
AstProp::Arguments => "arguments",
AstProp::Asserts => "asserts",
AstProp::Async => "async",
AstProp::Attributes => "attributes",
AstProp::Await => "await",
AstProp::BigInt => "bigint",
AstProp::Block => "block",
AstProp::Body => "body",
AstProp::Callee => "callee",
AstProp::Cases => "cases",
AstProp::Children => "children",
AstProp::CheckType => "checkType",
AstProp::ClosingElement => "closingElement",
AstProp::ClosingFragment => "closingFragment",
AstProp::Computed => "computed",
AstProp::Consequent => "consequent",
AstProp::Const => "const",
AstProp::Constraint => "constraint",
AstProp::Cooked => "cooked",
AstProp::Declaration => "declaration",
AstProp::Declarations => "declarations",
AstProp::Declare => "declare",
AstProp::Decorators => "decorators",
AstProp::Default => "default",
AstProp::Definite => "definite",
AstProp::Delegate => "delegate",
AstProp::Discriminant => "discriminant",
AstProp::Elements => "elements",
AstProp::ElementType => "elementType",
AstProp::ElementTypes => "elementTypes",
AstProp::ExprName => "exprName",
AstProp::Expression => "expression",
AstProp::Expressions => "expressions",
AstProp::ExportKind => "exportKind",
AstProp::Exported => "exported",
AstProp::Extends => "extends",
AstProp::ExtendsType => "extendsType",
AstProp::FalseType => "falseType",
AstProp::Finalizer => "finalizer",
AstProp::Flags => "flags",
AstProp::Generator => "generator",
AstProp::Global => "global",
AstProp::Handler => "handler",
AstProp::Id => "id",
AstProp::In => "in",
AstProp::IndexType => "indexType",
AstProp::Init => "init",
AstProp::Initializer => "initializer",
AstProp::Implements => "implements",
AstProp::Imported => "imported",
AstProp::ImportKind => "importKind",
AstProp::Key => "key",
AstProp::Kind => "kind",
AstProp::Label => "label",
AstProp::Left => "left",
AstProp::Literal => "literal",
AstProp::Local => "local",
AstProp::Members => "members",
AstProp::Meta => "meta",
AstProp::Method => "method",
AstProp::ModuleReference => "moduleReference",
AstProp::Name => "name",
AstProp::Namespace => "namespace",
AstProp::NameType => "nameType",
AstProp::Object => "object",
AstProp::ObjectType => "objectType",
AstProp::OpeningElement => "openingElement",
AstProp::OpeningFragment => "openingFragment",
AstProp::Operator => "operator",
AstProp::Optional => "optional",
AstProp::Options => "options",
AstProp::Out => "out",
AstProp::Override => "override",
AstProp::Param => "param",
AstProp::Parameter => "parameter",
AstProp::Parameters => "parameters",
AstProp::ParameterName => "parameterName",
AstProp::Params => "params",
AstProp::Pattern => "pattern",
AstProp::Prefix => "prefix",
AstProp::Properties => "properties",
AstProp::Property => "property",
AstProp::Qualifier => "qualifier",
AstProp::Quasi => "quasi",
AstProp::Quasis => "quasis",
AstProp::Raw => "raw",
AstProp::Readonly => "readonly",
AstProp::Regex => "regex",
AstProp::ReturnType => "returnType",
AstProp::Right => "right",
AstProp::SelfClosing => "selfClosing",
AstProp::Shorthand => "shorthand",
AstProp::Source => "source",
AstProp::SourceType => "sourceType",
AstProp::Specifiers => "specifiers",
AstProp::Static => "static",
AstProp::SuperClass => "superClass",
AstProp::SuperTypeArguments => "superTypeArguments",
AstProp::Tag => "tag",
AstProp::Tail => "tail",
AstProp::Test => "test",
AstProp::TrueType => "trueType",
AstProp::TypeAnnotation => "typeAnnotation",
AstProp::TypeArguments => "typeArguments",
AstProp::TypeName => "typeName",
AstProp::TypeParameter => "typeParameter",
AstProp::TypeParameters => "typeParameters",
AstProp::Types => "types",
AstProp::Update => "update",
AstProp::Value => "value",
};
write!(f, "{}", s)
}
}
impl From<AstProp> for u8 {
fn from(m: AstProp) -> u8 {
m as u8
}
}
pub struct TsEsTreeBuilder {
ctx: SerializeCtx,
}
impl AstBufSerializer for TsEsTreeBuilder {
fn serialize(&mut self) -> Vec<u8> {
self.ctx.serialize()
}
}
impl TsEsTreeBuilder {
pub fn new() -> Self {
// Max values
let kind_max_count: u8 = u8::from(AstNode::TSEnumBody) + 1;
let prop_max_count: u8 = u8::from(AstProp::Value) + 1;
Self {
ctx: SerializeCtx::new(kind_max_count, prop_max_count),
}
}
pub fn map_utf8_spans_to_utf16(&mut self, map: &Utf16Map) {
self.ctx.map_utf8_spans_to_utf16(map);
}
pub fn write_program(
&mut self,
span: &Span,
source_kind: SourceKind,
body: Vec<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::Program, span);
let kind = match source_kind {
SourceKind::Module => "module",
SourceKind::Script => "script",
};
self.ctx.write_str(AstProp::SourceType, kind);
self.ctx.write_ref_vec(AstProp::Body, &id, body);
self.ctx.set_root_idx(id.0);
self.ctx.commit_node(id)
}
pub fn write_import_decl(
&mut self,
span: &Span,
type_only: bool,
source: NodeRef,
specifiers: Vec<NodeRef>,
attributes: Vec<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::ImportDeclaration, span);
let kind = if type_only { "type" } else { "value" };
self.ctx.write_str(AstProp::ImportKind, kind);
self.ctx.write_ref(AstProp::Source, &id, source);
self.ctx.write_ref_vec(AstProp::Specifiers, &id, specifiers);
self.ctx.write_ref_vec(AstProp::Attributes, &id, attributes);
self.ctx.commit_node(id)
}
pub fn write_import_spec(
&mut self,
span: &Span,
type_only: bool,
local: NodeRef,
imported: NodeRef,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::ImportSpecifier, span);
let kind = if type_only { "type" } else { "value" };
self.ctx.write_str(AstProp::ImportKind, kind);
self.ctx.write_ref(AstProp::Imported, &id, imported);
self.ctx.write_ref(AstProp::Local, &id, local);
self.ctx.commit_node(id)
}
pub fn write_import_attr(
&mut self,
span: &Span,
key: NodeRef,
value: NodeRef,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::ImportAttribute, span);
self.ctx.write_ref(AstProp::Key, &id, key);
self.ctx.write_ref(AstProp::Value, &id, value);
self.ctx.commit_node(id)
}
pub fn write_import_default_spec(
&mut self,
span: &Span,
local: NodeRef,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::ImportDefaultSpecifier, span);
self.ctx.write_ref(AstProp::Local, &id, local);
self.ctx.commit_node(id)
}
pub fn write_import_ns_spec(
&mut self,
span: &Span,
local: NodeRef,
) -> NodeRef {
let id = self
.ctx
.append_node(AstNode::ImportNamespaceSpecifier, span);
self.ctx.write_ref(AstProp::Local, &id, local);
self.ctx.commit_node(id)
}
pub fn write_export_all_decl(
&mut self,
span: &Span,
is_type_only: bool,
source: NodeRef,
exported: Option<NodeRef>,
attributes: Vec<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::ExportAllDeclaration, span);
let value = if is_type_only { "type" } else { "value" };
self.ctx.write_str(AstProp::ExportKind, value);
self.ctx.write_maybe_ref(AstProp::Exported, &id, exported);
self.ctx.write_ref(AstProp::Source, &id, source);
self.ctx.write_ref_vec(AstProp::Attributes, &id, attributes);
self.ctx.commit_node(id)
}
pub fn write_export_default_decl(
&mut self,
span: &Span,
is_type_only: bool,
decl: NodeRef,
) -> NodeRef {
let id = self
.ctx
.append_node(AstNode::ExportDefaultDeclaration, span);
let value = if is_type_only { "type" } else { "value" };
self.ctx.write_str(AstProp::ExportKind, value);
self.ctx.write_ref(AstProp::Declaration, &id, decl);
self.ctx.commit_node(id)
}
pub fn write_export_named_decl(
&mut self,
span: &Span,
is_type_only: bool,
specifiers: Vec<NodeRef>,
source: Option<NodeRef>,
attributes: Vec<NodeRef>,
declaration: Option<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::ExportNamedDeclaration, span);
let value = if is_type_only { "type" } else { "value" };
self.ctx.write_str(AstProp::ExportKind, value);
self.ctx.write_ref_vec(AstProp::Specifiers, &id, specifiers);
self.ctx.write_maybe_ref(AstProp::Source, &id, source);
self
.ctx
.write_maybe_ref(AstProp::Declaration, &id, declaration);
self.ctx.write_ref_vec(AstProp::Attributes, &id, attributes);
self.ctx.commit_node(id)
}
pub fn write_export_ts_namespace(
&mut self,
span: &Span,
ident: NodeRef,
) -> NodeRef {
let id = self
.ctx
.append_node(AstNode::TSNamespaceExportDeclaration, span);
self.ctx.write_ref(AstProp::Id, &id, ident);
self.ctx.commit_node(id)
}
pub fn write_export_ts_import_equals(
&mut self,
span: &Span,
is_type_only: bool,
ident: NodeRef,
reference: NodeRef,
) -> NodeRef {
let id = self
.ctx
.append_node(AstNode::TSImportEqualsDeclaration, span);
let value = if is_type_only { "type" } else { "value" };
self.ctx.write_str(AstProp::ImportKind, value);
self.ctx.write_ref(AstProp::Id, &id, ident);
self.ctx.write_ref(AstProp::ModuleReference, &id, reference);
self.ctx.commit_node(id)
}
pub fn write_ts_external_mod_ref(
&mut self,
span: &Span,
expr: NodeRef,
) -> NodeRef {
let id = self
.ctx
.append_node(AstNode::TSExternalModuleReference, span);
self.ctx.write_ref(AstProp::Expression, &id, expr);
self.ctx.commit_node(id)
}
pub fn write_export_spec(
&mut self,
span: &Span,
type_only: bool,
local: NodeRef,
exported: NodeRef,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::ExportSpecifier, span);
let kind = if type_only { "type" } else { "value" };
self.ctx.write_str(AstProp::ExportKind, kind);
self.ctx.write_ref(AstProp::Exported, &id, exported);
self.ctx.write_ref(AstProp::Local, &id, local);
self.ctx.commit_node(id)
}
pub fn write_var_decl(
&mut self,
span: &Span,
declare: bool,
kind: &str,
decls: Vec<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::VariableDeclaration, span);
self.ctx.write_bool(AstProp::Declare, declare);
self.ctx.write_str(AstProp::Kind, kind);
self.ctx.write_ref_vec(AstProp::Declarations, &id, decls);
self.ctx.commit_node(id)
}
pub fn write_var_declarator(
&mut self,
span: &Span,
ident: NodeRef,
init: Option<NodeRef>,
definite: bool,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::VariableDeclarator, span);
self.ctx.write_ref(AstProp::Id, &id, ident);
self.ctx.write_maybe_ref(AstProp::Init, &id, init);
self.ctx.write_bool(AstProp::Definite, definite);
self.ctx.commit_node(id)
}
#[allow(clippy::too_many_arguments)]
pub fn write_ts_decl_fn(
&mut self,
span: &Span,
is_declare: bool,
is_async: bool,
is_generator: bool,
ident: Option<NodeRef>,
type_param: Option<NodeRef>,
return_type: Option<NodeRef>,
params: Vec<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::TSDeclareFunction, span);
self.ctx.write_bool(AstProp::Declare, is_declare);
self.ctx.write_bool(AstProp::Async, is_async);
self.ctx.write_bool(AstProp::Generator, is_generator);
self.ctx.write_maybe_ref(AstProp::Id, &id, ident);
self
.ctx
.write_maybe_undef_ref(AstProp::TypeParameters, &id, type_param);
self
.ctx
.write_maybe_undef_ref(AstProp::ReturnType, &id, return_type);
self.ctx.write_undefined(AstProp::Body);
self.ctx.write_ref_vec(AstProp::Params, &id, params);
self.ctx.commit_node(id)
}
#[allow(clippy::too_many_arguments)]
pub fn write_fn_decl(
&mut self,
span: &Span,
is_declare: bool,
is_async: bool,
is_generator: bool,
// Ident is required in most cases, but optional as default export
// declaration. TsEstree is weird...
ident: Option<NodeRef>,
type_param: Option<NodeRef>,
return_type: Option<NodeRef>,
body: NodeRef,
params: Vec<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::FunctionDeclaration, span);
self.ctx.write_bool(AstProp::Declare, is_declare);
self.ctx.write_bool(AstProp::Async, is_async);
self.ctx.write_bool(AstProp::Generator, is_generator);
self.ctx.write_maybe_ref(AstProp::Id, &id, ident);
self
.ctx
.write_maybe_undef_ref(AstProp::TypeParameters, &id, type_param);
self
.ctx
.write_maybe_undef_ref(AstProp::ReturnType, &id, return_type);
self.ctx.write_ref(AstProp::Body, &id, body);
self.ctx.write_ref_vec(AstProp::Params, &id, params);
self.ctx.commit_node(id)
}
pub fn write_decorator(&mut self, span: &Span, expr: NodeRef) -> NodeRef {
let id = self.ctx.append_node(AstNode::Decorator, span);
self.ctx.write_ref(AstProp::Expression, &id, expr);
self.ctx.commit_node(id)
}
#[allow(clippy::too_many_arguments)]
pub fn write_class_decl(
&mut self,
span: &Span,
is_declare: bool,
is_abstract: bool,
// Ident is required in most cases, but optional as default export
// declaration. TsEstree is weird...
ident: Option<NodeRef>,
super_class: Option<NodeRef>,
implements: Vec<NodeRef>,
body: NodeRef,
decorators: Vec<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::ClassDeclaration, span);
self.ctx.write_bool(AstProp::Declare, is_declare);
self.ctx.write_bool(AstProp::Abstract, is_abstract);
self.ctx.write_maybe_ref(AstProp::Id, &id, ident);
self
.ctx
.write_maybe_ref(AstProp::SuperClass, &id, super_class);
self.ctx.write_ref_vec(AstProp::Implements, &id, implements);
self.ctx.write_ref(AstProp::Body, &id, body);
self.ctx.write_ref_vec(AstProp::Decorators, &id, decorators);
self.ctx.commit_node(id)
}
#[allow(clippy::too_many_arguments)]
pub fn write_class_expr(
&mut self,
span: &Span,
is_declare: bool,
is_abstract: bool,
ident: Option<NodeRef>,
super_class: Option<NodeRef>,
super_type_args: Option<NodeRef>,
type_params: Option<NodeRef>,
implements: Vec<NodeRef>,
body: NodeRef,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::ClassExpression, span);
self.ctx.write_bool(AstProp::Declare, is_declare);
self.ctx.write_bool(AstProp::Abstract, is_abstract);
self.ctx.write_maybe_ref(AstProp::Id, &id, ident);
self
.ctx
.write_maybe_ref(AstProp::SuperClass, &id, super_class);
self.ctx.write_maybe_undef_ref(
AstProp::SuperTypeArguments,
&id,
super_type_args,
);
self
.ctx
.write_maybe_undef_ref(AstProp::TypeParameters, &id, type_params);
self.ctx.write_ref_vec(AstProp::Implements, &id, implements);
self.ctx.write_ref(AstProp::Body, &id, body);
self.ctx.commit_node(id)
}
pub fn write_class_body(
&mut self,
span: &Span,
body: Vec<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::ClassBody, span);
self.ctx.write_ref_vec(AstProp::Body, &id, body);
self.ctx.commit_node(id)
}
pub fn write_static_block(&mut self, span: &Span, body: NodeRef) -> NodeRef {
let id = self.ctx.append_node(AstNode::StaticBlock, span);
self.ctx.write_ref(AstProp::Body, &id, body);
self.ctx.commit_node(id)
}
#[allow(clippy::too_many_arguments)]
pub fn write_accessor_property(
&mut self,
span: &Span,
is_declare: bool,
is_computed: bool,
is_optional: bool,
is_override: bool,
is_readonly: bool,
is_static: bool,
accessibility: Option<String>,
decorators: Vec<NodeRef>,
key: NodeRef,
value: Option<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::AccessorProperty, span);
self.ctx.write_bool(AstProp::Declare, is_declare);
self.ctx.write_bool(AstProp::Computed, is_computed);
self.ctx.write_bool(AstProp::Optional, is_optional);
self.ctx.write_bool(AstProp::Override, is_override);
self.ctx.write_bool(AstProp::Readonly, is_readonly);
self.ctx.write_bool(AstProp::Static, is_static);
self.write_accessibility(accessibility);
self.ctx.write_ref_vec(AstProp::Decorators, &id, decorators);
self.ctx.write_ref(AstProp::Key, &id, key);
self.ctx.write_maybe_ref(AstProp::Value, &id, value);
self.ctx.commit_node(id)
}
#[allow(clippy::too_many_arguments)]
pub fn write_class_prop(
&mut self,
span: &Span,
is_declare: bool,
is_computed: bool,
is_optional: bool,
is_override: bool,
is_readonly: bool,
is_static: bool,
accessibility: Option<String>,
decorators: Vec<NodeRef>,
key: NodeRef,
value: Option<NodeRef>,
type_ann: Option<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::PropertyDefinition, span);
self.ctx.write_bool(AstProp::Declare, is_declare);
self.ctx.write_bool(AstProp::Computed, is_computed);
self.ctx.write_bool(AstProp::Optional, is_optional);
self.ctx.write_bool(AstProp::Override, is_override);
self.ctx.write_bool(AstProp::Readonly, is_readonly);
self.ctx.write_bool(AstProp::Static, is_static);
self.write_accessibility(accessibility);
self.ctx.write_ref_vec(AstProp::Decorators, &id, decorators);
self.ctx.write_ref(AstProp::Key, &id, key);
self.ctx.write_maybe_ref(AstProp::Value, &id, value);
self
.ctx
.write_maybe_undef_ref(AstProp::TypeAnnotation, &id, type_ann);
self.ctx.commit_node(id)
}
#[allow(clippy::too_many_arguments)]
pub fn write_class_method(
&mut self,
span: &Span,
is_declare: bool,
is_computed: bool,
is_optional: bool,
is_override: bool,
is_static: bool,
kind: MethodKind,
accessibility: Option<String>,
key: NodeRef,
value: NodeRef,
decorators: Vec<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::MethodDefinition, span);
self.ctx.write_bool(AstProp::Declare, is_declare);
self.ctx.write_bool(AstProp::Computed, is_computed);
self.ctx.write_bool(AstProp::Optional, is_optional);
self.ctx.write_bool(AstProp::Override, is_override);
self.ctx.write_bool(AstProp::Static, is_static);
let kind_str = match kind {
MethodKind::Constructor => "constructor",
MethodKind::Get => "get",
MethodKind::Method => "method",
MethodKind::Set => "set",
};
self.ctx.write_str(AstProp::Kind, kind_str);
self.write_accessibility(accessibility);
self.ctx.write_ref(AstProp::Key, &id, key);
self.ctx.write_ref(AstProp::Value, &id, value);
self.ctx.write_ref_vec(AstProp::Decorators, &id, decorators);
self.ctx.commit_node(id)
}
pub fn write_block_stmt(
&mut self,
span: &Span,
body: Vec<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::BlockStatement, span);
self.ctx.write_ref_vec(AstProp::Body, &id, body);
self.ctx.commit_node(id)
}
pub fn write_debugger_stmt(&mut self, span: &Span) -> NodeRef {
let id = self.ctx.append_node(AstNode::DebuggerStatement, span);
self.ctx.commit_node(id)
}
pub fn write_with_stmt(
&mut self,
span: &Span,
obj: NodeRef,
body: NodeRef,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::WithStatement, span);
self.ctx.write_ref(AstProp::Object, &id, obj);
self.ctx.write_ref(AstProp::Body, &id, body);
self.ctx.commit_node(id)
}
pub fn write_return_stmt(
&mut self,
span: &Span,
arg: Option<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::ReturnStatement, span);
self.ctx.write_maybe_ref(AstProp::Argument, &id, arg);
self.ctx.commit_node(id)
}
pub fn write_labeled_stmt(
&mut self,
span: &Span,
label: NodeRef,
body: NodeRef,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::LabeledStatement, span);
self.ctx.write_ref(AstProp::Label, &id, label);
self.ctx.write_ref(AstProp::Body, &id, body);
self.ctx.commit_node(id)
}
pub fn write_break_stmt(
&mut self,
span: &Span,
label: Option<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::BreakStatement, span);
self.ctx.write_maybe_ref(AstProp::Label, &id, label);
self.ctx.commit_node(id)
}
pub fn write_continue_stmt(
&mut self,
span: &Span,
label: Option<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::ContinueStatement, span);
self.ctx.write_maybe_ref(AstProp::Label, &id, label);
self.ctx.commit_node(id)
}
pub fn write_if_stmt(
&mut self,
span: &Span,
test: NodeRef,
consequent: NodeRef,
alternate: Option<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::IfStatement, span);
self.ctx.write_ref(AstProp::Test, &id, test);
self.ctx.write_ref(AstProp::Consequent, &id, consequent);
self.ctx.write_maybe_ref(AstProp::Alternate, &id, alternate);
self.ctx.commit_node(id)
}
pub fn write_switch_stmt(
&mut self,
span: &Span,
discriminant: NodeRef,
cases: Vec<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::SwitchStatement, span);
self.ctx.write_ref(AstProp::Discriminant, &id, discriminant);
self.ctx.write_ref_vec(AstProp::Cases, &id, cases);
self.ctx.commit_node(id)
}
pub fn write_switch_case(
&mut self,
span: &Span,
test: Option<NodeRef>,
consequent: Vec<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::SwitchCase, span);
self.ctx.write_maybe_ref(AstProp::Test, &id, test);
self.ctx.write_ref_vec(AstProp::Consequent, &id, consequent);
self.ctx.commit_node(id)
}
pub fn write_throw_stmt(&mut self, span: &Span, arg: NodeRef) -> NodeRef {
let id = self.ctx.append_node(AstNode::ThrowStatement, span);
self.ctx.write_ref(AstProp::Argument, &id, arg);
self.ctx.commit_node(id)
}
pub fn write_while_stmt(
&mut self,
span: &Span,
test: NodeRef,
body: NodeRef,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::WhileStatement, span);
self.ctx.write_ref(AstProp::Test, &id, test);
self.ctx.write_ref(AstProp::Body, &id, body);
self.ctx.commit_node(id)
}
pub fn write_do_while_stmt(
&mut self,
span: &Span,
test: NodeRef,
body: NodeRef,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::DoWhileStatement, span);
self.ctx.write_ref(AstProp::Test, &id, test);
self.ctx.write_ref(AstProp::Body, &id, body);
self.ctx.commit_node(id)
}
pub fn write_for_stmt(
&mut self,
span: &Span,
init: Option<NodeRef>,
test: Option<NodeRef>,
update: Option<NodeRef>,
body: NodeRef,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::ForStatement, span);
self.ctx.write_maybe_ref(AstProp::Init, &id, init);
self.ctx.write_maybe_ref(AstProp::Test, &id, test);
self.ctx.write_maybe_ref(AstProp::Update, &id, update);
self.ctx.write_ref(AstProp::Body, &id, body);
self.ctx.commit_node(id)
}
pub fn write_for_in_stmt(
&mut self,
span: &Span,
left: NodeRef,
right: NodeRef,
body: NodeRef,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::ForInStatement, span);
self.ctx.write_ref(AstProp::Left, &id, left);
self.ctx.write_ref(AstProp::Right, &id, right);
self.ctx.write_ref(AstProp::Body, &id, body);
self.ctx.commit_node(id)
}
pub fn write_for_of_stmt(
&mut self,
span: &Span,
is_await: bool,
left: NodeRef,
right: NodeRef,
body: NodeRef,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::ForOfStatement, span);
self.ctx.write_bool(AstProp::Await, is_await);
self.ctx.write_ref(AstProp::Left, &id, left);
self.ctx.write_ref(AstProp::Right, &id, right);
self.ctx.write_ref(AstProp::Body, &id, body);
self.ctx.commit_node(id)
}
pub fn write_expr_stmt(&mut self, span: &Span, expr: NodeRef) -> NodeRef {
let id = self.ctx.append_node(AstNode::ExpressionStatement, span);
self.ctx.write_ref(AstProp::Expression, &id, expr);
self.ctx.commit_node(id)
}
pub fn write_try_stmt(
&mut self,
span: &Span,
block: NodeRef,
handler: Option<NodeRef>,
finalizer: Option<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::TryStatement, span);
self.ctx.write_ref(AstProp::Block, &id, block);
self.ctx.write_maybe_ref(AstProp::Handler, &id, handler);
self.ctx.write_maybe_ref(AstProp::Finalizer, &id, finalizer);
self.ctx.commit_node(id)
}
pub fn write_catch_clause(
&mut self,
span: &Span,
param: Option<NodeRef>,
body: NodeRef,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::CatchClause, span);
self.ctx.write_maybe_ref(AstProp::Param, &id, param);
self.ctx.write_ref(AstProp::Body, &id, body);
self.ctx.commit_node(id)
}
pub fn write_arr_expr(
&mut self,
span: &Span,
elems: Vec<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::ArrayExpression, span);
self.ctx.write_ref_vec(AstProp::Elements, &id, elems);
self.ctx.commit_node(id)
}
pub fn write_obj_expr(
&mut self,
span: &Span,
props: Vec<NodeRef>,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::ObjectExpression, span);
self.ctx.write_ref_vec(AstProp::Properties, &id, props);
self.ctx.commit_node(id)
}
pub fn write_bin_expr(
&mut self,
span: &Span,
operator: &str,
left: NodeRef,
right: NodeRef,
) -> NodeRef {
let id = self.ctx.append_node(AstNode::BinaryExpression, span);
self.ctx.write_str(AstProp::Operator, operator);
self.ctx.write_ref(AstProp::Left, &id, left);
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/lint/ast_buffer/swc.rs | cli/tools/lint/ast_buffer/swc.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use deno_ast::ParsedSource;
use deno_ast::swc::ast::AssignTarget;
use deno_ast::swc::ast::AssignTargetPat;
use deno_ast::swc::ast::BindingIdent;
use deno_ast::swc::ast::BlockStmtOrExpr;
use deno_ast::swc::ast::Callee;
use deno_ast::swc::ast::ClassMember;
use deno_ast::swc::ast::Decl;
use deno_ast::swc::ast::Decorator;
use deno_ast::swc::ast::DefaultDecl;
use deno_ast::swc::ast::ExportSpecifier;
use deno_ast::swc::ast::Expr;
use deno_ast::swc::ast::ExprOrSpread;
use deno_ast::swc::ast::FnExpr;
use deno_ast::swc::ast::ForHead;
use deno_ast::swc::ast::Function;
use deno_ast::swc::ast::Ident;
use deno_ast::swc::ast::IdentName;
use deno_ast::swc::ast::ImportSpecifier;
use deno_ast::swc::ast::JSXAttrName;
use deno_ast::swc::ast::JSXAttrOrSpread;
use deno_ast::swc::ast::JSXAttrValue;
use deno_ast::swc::ast::JSXElement;
use deno_ast::swc::ast::JSXElementChild;
use deno_ast::swc::ast::JSXElementName;
use deno_ast::swc::ast::JSXExpr;
use deno_ast::swc::ast::JSXExprContainer;
use deno_ast::swc::ast::JSXFragment;
use deno_ast::swc::ast::JSXMemberExpr;
use deno_ast::swc::ast::JSXNamespacedName;
use deno_ast::swc::ast::JSXObject;
use deno_ast::swc::ast::JSXOpeningElement;
use deno_ast::swc::ast::Key;
use deno_ast::swc::ast::Lit;
use deno_ast::swc::ast::MemberExpr;
use deno_ast::swc::ast::MemberProp;
use deno_ast::swc::ast::ModuleDecl;
use deno_ast::swc::ast::ModuleExportName;
use deno_ast::swc::ast::ModuleItem;
use deno_ast::swc::ast::ObjectLit;
use deno_ast::swc::ast::ObjectPatProp;
use deno_ast::swc::ast::OptChainBase;
use deno_ast::swc::ast::Param;
use deno_ast::swc::ast::ParamOrTsParamProp;
use deno_ast::swc::ast::Pat;
use deno_ast::swc::ast::PrivateName;
use deno_ast::swc::ast::Program;
use deno_ast::swc::ast::Prop;
use deno_ast::swc::ast::PropName;
use deno_ast::swc::ast::PropOrSpread;
use deno_ast::swc::ast::SimpleAssignTarget;
use deno_ast::swc::ast::Stmt;
use deno_ast::swc::ast::Str;
use deno_ast::swc::ast::SuperProp;
use deno_ast::swc::ast::TsEntityName;
use deno_ast::swc::ast::TsEnumMemberId;
use deno_ast::swc::ast::TsExprWithTypeArgs;
use deno_ast::swc::ast::TsFnOrConstructorType;
use deno_ast::swc::ast::TsFnParam;
use deno_ast::swc::ast::TsIndexSignature;
use deno_ast::swc::ast::TsLit;
use deno_ast::swc::ast::TsLitType;
use deno_ast::swc::ast::TsModuleName;
use deno_ast::swc::ast::TsModuleRef;
use deno_ast::swc::ast::TsNamespaceBody;
use deno_ast::swc::ast::TsParamPropParam;
use deno_ast::swc::ast::TsThisTypeOrIdent;
use deno_ast::swc::ast::TsType;
use deno_ast::swc::ast::TsTypeAnn;
use deno_ast::swc::ast::TsTypeElement;
use deno_ast::swc::ast::TsTypeParam;
use deno_ast::swc::ast::TsTypeParamDecl;
use deno_ast::swc::ast::TsTypeParamInstantiation;
use deno_ast::swc::ast::TsTypeQueryExpr;
use deno_ast::swc::ast::TsUnionOrIntersectionType;
use deno_ast::swc::ast::VarDeclOrExpr;
use deno_ast::swc::common::Span;
use deno_ast::swc::common::Spanned;
use deno_ast::swc::common::SyntaxContext;
use deno_ast::view::Accessibility;
use deno_ast::view::AssignOp;
use deno_ast::view::BinaryOp;
use deno_ast::view::MetaPropKind;
use deno_ast::view::MethodKind;
use deno_ast::view::TsKeywordTypeKind;
use deno_ast::view::TsTypeOperatorOp;
use deno_ast::view::UnaryOp;
use deno_ast::view::UpdateOp;
use deno_ast::view::VarDeclKind;
use super::buffer::AstBufSerializer;
use super::buffer::CommentKind;
use super::buffer::NodeRef;
use super::ts_estree::AstNode;
use super::ts_estree::MethodKind as TsEstreeMethodKind;
use super::ts_estree::PropertyKind;
use super::ts_estree::SourceKind;
use super::ts_estree::TsEsTreeBuilder;
use super::ts_estree::TsKeywordKind;
use super::ts_estree::TsModuleKind;
use crate::util::text_encoding::Utf16Map;
pub fn serialize_swc_to_buffer(
parsed_source: &ParsedSource,
utf16_map: &Utf16Map,
) -> Vec<u8> {
let mut ctx = TsEsTreeBuilder::new();
let program = &parsed_source.program();
match program.as_ref() {
Program::Module(module) => {
let children = module
.body
.iter()
.map(|item| match item {
ModuleItem::ModuleDecl(module_decl) => {
serialize_module_decl(&mut ctx, module_decl)
}
ModuleItem::Stmt(stmt) => serialize_stmt(&mut ctx, stmt),
})
.collect::<Vec<_>>();
ctx.write_program(&module.span, SourceKind::Module, children);
}
Program::Script(script) => {
let children = script
.body
.iter()
.map(|stmt| serialize_stmt(&mut ctx, stmt))
.collect::<Vec<_>>();
ctx.write_program(&script.span, SourceKind::Script, children);
}
}
for comment in parsed_source.comments().get_vec() {
let kind = match comment.kind {
deno_ast::swc::common::comments::CommentKind::Line => CommentKind::Line,
deno_ast::swc::common::comments::CommentKind::Block => CommentKind::Block,
};
ctx.write_comment(kind, &comment.text, &comment.span);
}
ctx.map_utf8_spans_to_utf16(utf16_map);
ctx.serialize()
}
fn serialize_module_decl(
ctx: &mut TsEsTreeBuilder,
module_decl: &ModuleDecl,
) -> NodeRef {
match module_decl {
ModuleDecl::Import(node) => {
let src = serialize_lit(ctx, &Lit::Str(node.src.as_ref().clone()));
let attrs = serialize_import_attrs(ctx, &node.with);
let specifiers = node
.specifiers
.iter()
.map(|spec| match spec {
ImportSpecifier::Named(spec) => {
let local = serialize_ident(ctx, &spec.local, None);
let imported = spec
.imported
.as_ref()
.map_or(serialize_ident(ctx, &spec.local, None), |v| {
serialize_module_export_name(ctx, v)
});
ctx.write_import_spec(
&spec.span,
spec.is_type_only,
local,
imported,
)
}
ImportSpecifier::Default(spec) => {
let local = serialize_ident(ctx, &spec.local, None);
ctx.write_import_default_spec(&spec.span, local)
}
ImportSpecifier::Namespace(spec) => {
let local = serialize_ident(ctx, &spec.local, None);
ctx.write_import_ns_spec(&spec.span, local)
}
})
.collect::<Vec<_>>();
ctx.write_import_decl(&node.span, node.type_only, src, specifiers, attrs)
}
ModuleDecl::ExportDecl(node) => {
let is_type_only = match &node.decl {
Decl::Class(_) => false,
Decl::Fn(_) => false,
Decl::Var(_) => false,
Decl::Using(_) => false,
Decl::TsInterface(_) => true,
Decl::TsTypeAlias(_) => true,
Decl::TsEnum(_) => true,
Decl::TsModule(_) => true,
};
let decl = serialize_decl(ctx, &node.decl);
ctx.write_export_named_decl(
&node.span,
is_type_only,
vec![],
None,
vec![],
Some(decl),
)
}
ModuleDecl::ExportNamed(node) => {
let attrs = serialize_import_attrs(ctx, &node.with);
let source = node
.src
.as_ref()
.map(|src| serialize_lit(ctx, &Lit::Str(*src.clone())));
if let Some(ExportSpecifier::Namespace(ns)) = node.specifiers.first() {
let exported = serialize_module_export_name(ctx, &ns.name);
ctx.write_export_all_decl(
&node.span,
node.type_only,
// Namespaced export must always have a source, so this
// scenario where it's optional can't happen. I think
// it's just the way SWC stores things internally, since they
// don't have a dedicated node for namespace exports.
source.unwrap_or(NodeRef(0)),
Some(exported),
attrs,
)
} else {
let specifiers = node
.specifiers
.iter()
.map(|spec| {
match spec {
ExportSpecifier::Named(spec) => {
let local = serialize_module_export_name(ctx, &spec.orig);
let exported = spec.exported.as_ref().map_or(
serialize_module_export_name(ctx, &spec.orig),
|exported| serialize_module_export_name(ctx, exported),
);
ctx.write_export_spec(
&spec.span,
spec.is_type_only,
local,
exported,
)
}
// Already handled earlier
ExportSpecifier::Namespace(_) => unreachable!(),
// this is not syntactically valid
ExportSpecifier::Default(_) => {
// Ignore syntax errors
NodeRef(0)
}
}
})
.collect::<Vec<_>>();
ctx.write_export_named_decl(
&node.span,
node.type_only,
specifiers,
source,
attrs,
None,
)
}
}
ModuleDecl::ExportDefaultDecl(node) => {
let (is_type_only, decl) = match &node.decl {
DefaultDecl::Class(node) => {
let ident = node
.ident
.as_ref()
.map(|ident| serialize_ident(ctx, ident, None));
let super_class = node
.class
.super_class
.as_ref()
.map(|expr| serialize_expr(ctx, expr.as_ref()));
let implements = node
.class
.implements
.iter()
.map(|item| serialize_ts_expr_with_type_args(ctx, item))
.collect::<Vec<_>>();
let members = node
.class
.body
.iter()
.filter_map(|member| serialize_class_member(ctx, member))
.collect::<Vec<_>>();
let body = ctx.write_class_body(&node.class.span, members);
let decorators = node
.class
.decorators
.iter()
.map(|deco| serialize_decorator(ctx, deco))
.collect::<Vec<_>>();
let decl = ctx.write_class_decl(
&node.class.span,
false,
node.class.is_abstract,
ident,
super_class,
implements,
body,
decorators,
);
(false, decl)
}
DefaultDecl::Fn(node) => {
let ident = node
.ident
.as_ref()
.map(|ident| serialize_ident(ctx, ident, None));
let fn_obj = node.function.as_ref();
let type_params =
maybe_serialize_ts_type_param_decl(ctx, &fn_obj.type_params);
let params = fn_obj
.params
.iter()
.map(|param| {
let decorators = param
.decorators
.iter()
.map(|deco| serialize_decorator(ctx, deco))
.collect::<Vec<_>>();
serialize_pat(ctx, ¶m.pat, Some(decorators))
})
.collect::<Vec<_>>();
let return_type =
maybe_serialize_ts_type_ann(ctx, &fn_obj.return_type);
let body = fn_obj
.body
.as_ref()
.map(|block| serialize_stmt(ctx, &Stmt::Block(block.clone())));
let decl = if let Some(body) = body {
ctx.write_fn_decl(
&fn_obj.span,
false,
fn_obj.is_async,
fn_obj.is_generator,
ident,
type_params,
return_type,
body,
params,
)
} else {
ctx.write_ts_decl_fn(
&fn_obj.span,
false,
fn_obj.is_async,
fn_obj.is_generator,
ident,
type_params,
return_type,
params,
)
};
(false, decl)
}
DefaultDecl::TsInterfaceDecl(node) => {
let ident_id = serialize_ident(ctx, &node.id, None);
let type_param =
maybe_serialize_ts_type_param_decl(ctx, &node.type_params);
let extend_ids = node
.extends
.iter()
.map(|item| {
let expr = serialize_expr(ctx, &item.expr);
let type_args = item
.type_args
.clone()
.map(|params| serialize_ts_param_inst(ctx, params.as_ref()));
ctx.write_ts_interface_heritage(&item.span, expr, type_args)
})
.collect::<Vec<_>>();
let body_elem_ids = node
.body
.body
.iter()
.map(|item| serialize_ts_type_elem(ctx, item))
.collect::<Vec<_>>();
let body_pos =
ctx.write_ts_interface_body(&node.body.span, body_elem_ids);
let decl = ctx.write_ts_interface_decl(
&node.span,
node.declare,
ident_id,
type_param,
extend_ids,
body_pos,
);
(true, decl)
}
};
ctx.write_export_default_decl(&node.span, is_type_only, decl)
}
ModuleDecl::ExportDefaultExpr(node) => {
let expr = serialize_expr(ctx, &node.expr);
ctx.write_export_default_decl(&node.span, false, expr)
}
ModuleDecl::ExportAll(node) => {
let src = serialize_lit(ctx, &Lit::Str(node.src.as_ref().clone()));
let attrs = serialize_import_attrs(ctx, &node.with);
ctx.write_export_all_decl(&node.span, node.type_only, src, None, attrs)
}
ModuleDecl::TsImportEquals(node) => {
let ident = serialize_ident(ctx, &node.id, None);
let module_ref = match &node.module_ref {
TsModuleRef::TsEntityName(entity) => {
serialize_ts_entity_name(ctx, entity)
}
TsModuleRef::TsExternalModuleRef(external) => {
let expr = serialize_lit(ctx, &Lit::Str(external.expr.clone()));
ctx.write_ts_external_mod_ref(&external.span, expr)
}
};
ctx.write_export_ts_import_equals(
&node.span,
node.is_type_only,
ident,
module_ref,
)
}
ModuleDecl::TsExportAssignment(node) => {
let expr = serialize_expr(ctx, &node.expr);
ctx.write_export_assign(&node.span, expr)
}
ModuleDecl::TsNamespaceExport(node) => {
let decl = serialize_ident(ctx, &node.id, None);
ctx.write_export_ts_namespace(&node.span, decl)
}
}
}
fn serialize_import_attrs(
ctx: &mut TsEsTreeBuilder,
raw_attrs: &Option<Box<ObjectLit>>,
) -> Vec<NodeRef> {
raw_attrs.as_ref().map_or(vec![], |obj| {
obj
.props
.iter()
.map(|prop| {
let (key, value) = match prop {
// Invalid syntax
PropOrSpread::Spread(_) => {
// Ignore syntax errors
(NodeRef(0), NodeRef(0))
}
PropOrSpread::Prop(prop) => {
match prop.as_ref() {
Prop::Shorthand(ident) => (
serialize_ident(ctx, ident, None),
serialize_ident(ctx, ident, None),
),
Prop::KeyValue(kv) => (
serialize_prop_name(ctx, &kv.key),
serialize_expr(ctx, kv.value.as_ref()),
),
// Invalid syntax
Prop::Assign(_)
| Prop::Getter(_)
| Prop::Setter(_)
| Prop::Method(_) => {
// Ignore syntax errors
(NodeRef(0), NodeRef(0))
}
}
}
};
ctx.write_import_attr(&prop.span(), key, value)
})
.collect::<Vec<_>>()
})
}
fn serialize_stmt(ctx: &mut TsEsTreeBuilder, stmt: &Stmt) -> NodeRef {
match stmt {
Stmt::Block(node) => {
let children = node
.stmts
.iter()
.map(|stmt| serialize_stmt(ctx, stmt))
.collect::<Vec<_>>();
ctx.write_block_stmt(&node.span, children)
}
Stmt::Empty(_) => NodeRef(0),
Stmt::Debugger(node) => ctx.write_debugger_stmt(&node.span),
Stmt::With(node) => {
let obj = serialize_expr(ctx, &node.obj);
let body = serialize_stmt(ctx, &node.body);
ctx.write_with_stmt(&node.span, obj, body)
}
Stmt::Return(node) => {
let arg = node.arg.as_ref().map(|arg| serialize_expr(ctx, arg));
ctx.write_return_stmt(&node.span, arg)
}
Stmt::Labeled(node) => {
let ident = serialize_ident(ctx, &node.label, None);
let stmt = serialize_stmt(ctx, &node.body);
ctx.write_labeled_stmt(&node.span, ident, stmt)
}
Stmt::Break(node) => {
let arg = node
.label
.as_ref()
.map(|label| serialize_ident(ctx, label, None));
ctx.write_break_stmt(&node.span, arg)
}
Stmt::Continue(node) => {
let arg = node
.label
.as_ref()
.map(|label| serialize_ident(ctx, label, None));
ctx.write_continue_stmt(&node.span, arg)
}
Stmt::If(node) => {
let test = serialize_expr(ctx, node.test.as_ref());
let cons = serialize_stmt(ctx, node.cons.as_ref());
let alt = node.alt.as_ref().map(|alt| serialize_stmt(ctx, alt));
ctx.write_if_stmt(&node.span, test, cons, alt)
}
Stmt::Switch(node) => {
let disc = serialize_expr(ctx, &node.discriminant);
let cases = node
.cases
.iter()
.map(|case| {
let test = case.test.as_ref().map(|test| serialize_expr(ctx, test));
let cons = case
.cons
.iter()
.map(|cons| serialize_stmt(ctx, cons))
.collect::<Vec<_>>();
ctx.write_switch_case(&case.span, test, cons)
})
.collect::<Vec<_>>();
ctx.write_switch_stmt(&node.span, disc, cases)
}
Stmt::Throw(node) => {
let arg = serialize_expr(ctx, &node.arg);
ctx.write_throw_stmt(&node.span, arg)
}
Stmt::Try(node) => {
let block = serialize_stmt(ctx, &Stmt::Block(node.block.clone()));
let handler = node.handler.as_ref().map(|catch| {
let param = catch
.param
.as_ref()
.map(|param| serialize_pat(ctx, param, None));
let body = serialize_stmt(ctx, &Stmt::Block(catch.body.clone()));
ctx.write_catch_clause(&catch.span, param, body)
});
let finalizer = node
.finalizer
.as_ref()
.map(|finalizer| serialize_stmt(ctx, &Stmt::Block(finalizer.clone())));
ctx.write_try_stmt(&node.span, block, handler, finalizer)
}
Stmt::While(node) => {
let test = serialize_expr(ctx, node.test.as_ref());
let stmt = serialize_stmt(ctx, node.body.as_ref());
ctx.write_while_stmt(&node.span, test, stmt)
}
Stmt::DoWhile(node) => {
let expr = serialize_expr(ctx, node.test.as_ref());
let stmt = serialize_stmt(ctx, node.body.as_ref());
ctx.write_do_while_stmt(&node.span, expr, stmt)
}
Stmt::For(node) => {
let init = node.init.as_ref().map(|init| match init {
VarDeclOrExpr::VarDecl(var_decl) => {
serialize_stmt(ctx, &Stmt::Decl(Decl::Var(var_decl.clone())))
}
VarDeclOrExpr::Expr(expr) => serialize_expr(ctx, expr),
});
let test = node.test.as_ref().map(|expr| serialize_expr(ctx, expr));
let update = node.update.as_ref().map(|expr| serialize_expr(ctx, expr));
let body = serialize_stmt(ctx, node.body.as_ref());
ctx.write_for_stmt(&node.span, init, test, update, body)
}
Stmt::ForIn(node) => {
let left = serialize_for_head(ctx, &node.left);
let right = serialize_expr(ctx, node.right.as_ref());
let body = serialize_stmt(ctx, node.body.as_ref());
ctx.write_for_in_stmt(&node.span, left, right, body)
}
Stmt::ForOf(node) => {
let left = serialize_for_head(ctx, &node.left);
let right = serialize_expr(ctx, node.right.as_ref());
let body = serialize_stmt(ctx, node.body.as_ref());
ctx.write_for_of_stmt(&node.span, node.is_await, left, right, body)
}
Stmt::Decl(node) => serialize_decl(ctx, node),
Stmt::Expr(node) => {
let expr = serialize_expr(ctx, node.expr.as_ref());
ctx.write_expr_stmt(&node.span, expr)
}
}
}
fn serialize_expr(ctx: &mut TsEsTreeBuilder, expr: &Expr) -> NodeRef {
match expr {
Expr::This(node) => ctx.write_this_expr(&node.span),
Expr::Array(node) => {
let elems = node
.elems
.iter()
.map(|item| {
item
.as_ref()
.map_or(NodeRef(0), |item| serialize_expr_or_spread(ctx, item))
})
.collect::<Vec<_>>();
ctx.write_arr_expr(&node.span, elems)
}
Expr::Object(node) => {
let props = node
.props
.iter()
.map(|prop| serialize_prop_or_spread(ctx, prop))
.collect::<Vec<_>>();
ctx.write_obj_expr(&node.span, props)
}
Expr::Fn(node) => {
let fn_obj = node.function.as_ref();
let ident = node
.ident
.as_ref()
.map(|ident| serialize_ident(ctx, ident, None));
let type_params =
maybe_serialize_ts_type_param_decl(ctx, &fn_obj.type_params);
let params = fn_obj
.params
.iter()
.map(|param| {
let decorators = param
.decorators
.iter()
.map(|deco| serialize_decorator(ctx, deco))
.collect::<Vec<_>>();
serialize_pat(ctx, ¶m.pat, Some(decorators))
})
.collect::<Vec<_>>();
let return_id = maybe_serialize_ts_type_ann(ctx, &fn_obj.return_type);
let body = fn_obj
.body
.as_ref()
.map(|block| serialize_stmt(ctx, &Stmt::Block(block.clone())));
ctx.write_fn_expr(
&fn_obj.span,
fn_obj.is_async,
fn_obj.is_generator,
ident,
type_params,
params,
return_id,
body,
)
}
Expr::Unary(node) => {
let arg = serialize_expr(ctx, &node.arg);
let op = match node.op {
UnaryOp::Minus => "-",
UnaryOp::Plus => "+",
UnaryOp::Bang => "!",
UnaryOp::Tilde => "~",
UnaryOp::TypeOf => "typeof",
UnaryOp::Void => "void",
UnaryOp::Delete => "delete",
};
ctx.write_unary_expr(&node.span, op, arg)
}
Expr::Update(node) => {
let arg = serialize_expr(ctx, node.arg.as_ref());
let op = match node.op {
UpdateOp::PlusPlus => "++",
UpdateOp::MinusMinus => "--",
};
ctx.write_update_expr(&node.span, node.prefix, op, arg)
}
Expr::Bin(node) => {
let (node_type, flag_str) = match node.op {
BinaryOp::LogicalAnd => (AstNode::LogicalExpression, "&&"),
BinaryOp::LogicalOr => (AstNode::LogicalExpression, "||"),
BinaryOp::NullishCoalescing => (AstNode::LogicalExpression, "??"),
BinaryOp::EqEq => (AstNode::BinaryExpression, "=="),
BinaryOp::NotEq => (AstNode::BinaryExpression, "!="),
BinaryOp::EqEqEq => (AstNode::BinaryExpression, "==="),
BinaryOp::NotEqEq => (AstNode::BinaryExpression, "!=="),
BinaryOp::Lt => (AstNode::BinaryExpression, "<"),
BinaryOp::LtEq => (AstNode::BinaryExpression, "<="),
BinaryOp::Gt => (AstNode::BinaryExpression, ">"),
BinaryOp::GtEq => (AstNode::BinaryExpression, ">="),
BinaryOp::LShift => (AstNode::BinaryExpression, "<<"),
BinaryOp::RShift => (AstNode::BinaryExpression, ">>"),
BinaryOp::ZeroFillRShift => (AstNode::BinaryExpression, ">>>"),
BinaryOp::Add => (AstNode::BinaryExpression, "+"),
BinaryOp::Sub => (AstNode::BinaryExpression, "-"),
BinaryOp::Mul => (AstNode::BinaryExpression, "*"),
BinaryOp::Div => (AstNode::BinaryExpression, "/"),
BinaryOp::Mod => (AstNode::BinaryExpression, "%"),
BinaryOp::BitOr => (AstNode::BinaryExpression, "|"),
BinaryOp::BitXor => (AstNode::BinaryExpression, "^"),
BinaryOp::BitAnd => (AstNode::BinaryExpression, "&"),
BinaryOp::In => (AstNode::BinaryExpression, "in"),
BinaryOp::InstanceOf => (AstNode::BinaryExpression, "instanceof"),
BinaryOp::Exp => (AstNode::BinaryExpression, "**"),
};
let left = serialize_expr(ctx, node.left.as_ref());
let right = serialize_expr(ctx, node.right.as_ref());
match node_type {
AstNode::LogicalExpression => {
ctx.write_logical_expr(&node.span, flag_str, left, right)
}
AstNode::BinaryExpression => {
ctx.write_bin_expr(&node.span, flag_str, left, right)
}
_ => unreachable!(),
}
}
Expr::Assign(node) => {
let left = match &node.left {
AssignTarget::Simple(simple_assign_target) => {
match simple_assign_target {
SimpleAssignTarget::Ident(target) => {
serialize_binding_ident(ctx, target, None)
}
SimpleAssignTarget::Member(target) => {
serialize_expr(ctx, &Expr::Member(target.clone()))
}
SimpleAssignTarget::SuperProp(target) => {
serialize_expr(ctx, &Expr::SuperProp(target.clone()))
}
SimpleAssignTarget::Paren(target) => {
serialize_expr(ctx, &target.expr)
}
SimpleAssignTarget::OptChain(target) => {
serialize_expr(ctx, &Expr::OptChain(target.clone()))
}
SimpleAssignTarget::TsAs(target) => {
serialize_expr(ctx, &Expr::TsAs(target.clone()))
}
SimpleAssignTarget::TsSatisfies(target) => {
serialize_expr(ctx, &Expr::TsSatisfies(target.clone()))
}
SimpleAssignTarget::TsNonNull(target) => {
serialize_expr(ctx, &Expr::TsNonNull(target.clone()))
}
SimpleAssignTarget::TsTypeAssertion(target) => {
serialize_expr(ctx, &Expr::TsTypeAssertion(target.clone()))
}
SimpleAssignTarget::TsInstantiation(target) => {
serialize_expr(ctx, &Expr::TsInstantiation(target.clone()))
}
SimpleAssignTarget::Invalid(_) => {
// Ignore syntax errors
NodeRef(0)
}
}
}
AssignTarget::Pat(target) => match target {
AssignTargetPat::Array(array_pat) => {
serialize_pat(ctx, &Pat::Array(array_pat.clone()), None)
}
AssignTargetPat::Object(object_pat) => {
serialize_pat(ctx, &Pat::Object(object_pat.clone()), None)
}
AssignTargetPat::Invalid(_) => {
// Ignore syntax errors
NodeRef(0)
}
},
};
let right = serialize_expr(ctx, node.right.as_ref());
let op = match node.op {
AssignOp::Assign => "=",
AssignOp::AddAssign => "+=",
AssignOp::SubAssign => "-=",
AssignOp::MulAssign => "*=",
AssignOp::DivAssign => "/=",
AssignOp::ModAssign => "%=",
AssignOp::LShiftAssign => "<<=",
AssignOp::RShiftAssign => ">>=",
AssignOp::ZeroFillRShiftAssign => ">>>=",
AssignOp::BitOrAssign => "|=",
AssignOp::BitXorAssign => "^=",
AssignOp::BitAndAssign => "&=",
AssignOp::ExpAssign => "**=",
AssignOp::AndAssign => "&&=",
AssignOp::OrAssign => "||=",
AssignOp::NullishAssign => "??=",
};
ctx.write_assignment_expr(&node.span, op, left, right)
}
Expr::Member(node) => serialize_member_expr(ctx, node, false),
Expr::SuperProp(node) => {
let obj = ctx.write_super(&node.obj.span);
let mut computed = false;
let prop = match &node.prop {
SuperProp::Ident(ident_name) => serialize_ident_name(ctx, ident_name),
SuperProp::Computed(prop) => {
computed = true;
serialize_expr(ctx, &prop.expr)
}
};
ctx.write_member_expr(&node.span, false, computed, obj, prop)
}
Expr::Cond(node) => {
let test = serialize_expr(ctx, node.test.as_ref());
let cons = serialize_expr(ctx, node.cons.as_ref());
let alt = serialize_expr(ctx, node.alt.as_ref());
ctx.write_conditional_expr(&node.span, test, cons, alt)
}
Expr::Call(node) => {
if let Callee::Import(_) = node.callee {
let source = node
.args
.first()
.map_or(NodeRef(0), |arg| serialize_expr_or_spread(ctx, arg));
let options = node
.args
.get(1)
.map(|arg| serialize_expr_or_spread(ctx, arg));
ctx.write_import_expr(&node.span, source, options)
} else {
let callee = match &node.callee {
Callee::Super(super_node) => ctx.write_super(&super_node.span),
Callee::Import(_) => unreachable!("Already handled"),
Callee::Expr(expr) => serialize_expr(ctx, expr),
};
let type_arg = node
.type_args
.clone()
.map(|param_node| serialize_ts_param_inst(ctx, param_node.as_ref()));
let args = node
.args
.iter()
.map(|arg| serialize_expr_or_spread(ctx, arg))
.collect::<Vec<_>>();
ctx.write_call_expr(&node.span, false, callee, type_arg, args)
}
}
Expr::New(node) => {
let callee = serialize_expr(ctx, node.callee.as_ref());
let args: Vec<NodeRef> = node.args.as_ref().map_or(vec![], |args| {
args
.iter()
.map(|arg| serialize_expr_or_spread(ctx, arg))
.collect::<Vec<_>>()
});
let type_args = node
.type_args
.clone()
.map(|param_node| serialize_ts_param_inst(ctx, param_node.as_ref()));
ctx.write_new_expr(&node.span, callee, type_args, args)
}
Expr::Seq(node) => {
let children = node
.exprs
.iter()
.map(|expr| serialize_expr(ctx, expr))
.collect::<Vec<_>>();
ctx.write_sequence_expr(&node.span, children)
}
Expr::Ident(node) => serialize_ident(ctx, node, None),
Expr::Lit(node) => serialize_lit(ctx, node),
Expr::Tpl(node) => {
let quasis = node
.quasis
.iter()
.map(|quasi| {
ctx.write_template_elem(
&quasi.span,
quasi.tail,
&quasi.raw,
&quasi
.cooked
.as_ref()
.map_or("".to_string(), |v| v.to_string_lossy().to_string()),
)
})
.collect::<Vec<_>>();
let exprs = node
.exprs
.iter()
.map(|expr| serialize_expr(ctx, expr))
.collect::<Vec<_>>();
ctx.write_template_lit(&node.span, quasis, exprs)
}
Expr::TaggedTpl(node) => {
let tag = serialize_expr(ctx, &node.tag);
let type_param = node
.type_params
.clone()
.map(|params| serialize_ts_param_inst(ctx, params.as_ref()));
let quasi = serialize_expr(ctx, &Expr::Tpl(*node.tpl.clone()));
ctx.write_tagged_template_expr(&node.span, tag, type_param, quasi)
}
Expr::Arrow(node) => {
let type_param =
maybe_serialize_ts_type_param_decl(ctx, &node.type_params);
let params = node
.params
.iter()
.map(|param| serialize_pat(ctx, param, None))
.collect::<Vec<_>>();
let body = match node.body.as_ref() {
BlockStmtOrExpr::BlockStmt(block_stmt) => {
serialize_stmt(ctx, &Stmt::Block(block_stmt.clone()))
}
BlockStmtOrExpr::Expr(expr) => serialize_expr(ctx, expr.as_ref()),
};
let return_type = maybe_serialize_ts_type_ann(ctx, &node.return_type);
ctx.write_arrow_fn_expr(
&node.span,
node.is_async,
node.is_generator,
type_param,
params,
return_type,
body,
)
}
Expr::Class(node) => {
let ident = node
.ident
.as_ref()
.map(|ident| serialize_ident(ctx, ident, None));
let type_params =
maybe_serialize_ts_type_param_decl(ctx, &node.class.type_params);
let super_class = node
.class
.super_class
.as_ref()
.map(|expr| serialize_expr(ctx, expr.as_ref()));
let super_type_args = node
.class
.super_type_params
.as_ref()
.map(|param| serialize_ts_param_inst(ctx, param.as_ref()));
let implements = node
.class
.implements
.iter()
.map(|item| serialize_ts_expr_with_type_args(ctx, item))
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/lint/ast_buffer/mod.rs | cli/tools/lint/ast_buffer/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use deno_ast::ParsedSource;
use swc::serialize_swc_to_buffer;
use crate::util::text_encoding::Utf16Map;
mod buffer;
mod swc;
mod ts_estree;
pub fn serialize_ast_to_buffer(
parsed_source: &ParsedSource,
utf16_map: &Utf16Map,
) -> Vec<u8> {
// TODO: We could support multiple languages here
serialize_swc_to_buffer(parsed_source, utf16_map)
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/lint/ast_buffer/buffer.rs | cli/tools/lint/ast_buffer/buffer.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::fmt::Display;
use deno_ast::swc::common::DUMMY_SP;
use deno_ast::swc::common::Span;
use indexmap::IndexMap;
use crate::util::text_encoding::Utf16Map;
/// Each property has this flag to mark what kind of value it holds-
/// Plain objects and arrays are not supported yet, but could be easily
/// added if needed.
#[derive(Debug, PartialEq)]
pub enum PropFlags {
Ref,
RefArr,
String,
Number,
Bool,
Null,
Undefined,
Object,
Regex,
BigInt,
Array,
}
impl From<PropFlags> for u8 {
fn from(m: PropFlags) -> u8 {
m as u8
}
}
impl TryFrom<u8> for PropFlags {
type Error = &'static str;
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(PropFlags::Ref),
1 => Ok(PropFlags::RefArr),
2 => Ok(PropFlags::String),
3 => Ok(PropFlags::Number),
4 => Ok(PropFlags::Bool),
5 => Ok(PropFlags::Null),
6 => Ok(PropFlags::Undefined),
7 => Ok(PropFlags::Object),
8 => Ok(PropFlags::Regex),
9 => Ok(PropFlags::BigInt),
10 => Ok(PropFlags::Array),
_ => Err("Unknown Prop flag"),
}
}
}
pub type Index = u32;
const GROUP_KIND: u8 = 1;
const MASK_U32_1: u32 = 0b11111111_00000000_00000000_00000000;
const MASK_U32_2: u32 = 0b00000000_11111111_00000000_00000000;
const MASK_U32_3: u32 = 0b00000000_00000000_11111111_00000000;
const MASK_U32_4: u32 = 0b00000000_00000000_00000000_11111111;
#[inline]
fn append_u32(result: &mut Vec<u8>, value: u32) {
let v1: u8 = ((value & MASK_U32_1) >> 24) as u8;
let v2: u8 = ((value & MASK_U32_2) >> 16) as u8;
let v3: u8 = ((value & MASK_U32_3) >> 8) as u8;
let v4: u8 = (value & MASK_U32_4) as u8;
result.push(v1);
result.push(v2);
result.push(v3);
result.push(v4);
}
fn append_usize(result: &mut Vec<u8>, value: usize) {
let raw = u32::try_from(value).unwrap();
append_u32(result, raw);
}
#[derive(Debug)]
pub struct StringTable {
id: usize,
table: IndexMap<String, usize>,
}
impl StringTable {
pub fn new() -> Self {
Self {
id: 0,
table: IndexMap::new(),
}
}
pub fn insert(&mut self, s: &str) -> usize {
if let Some(id) = self.table.get(s) {
return *id;
}
let id = self.id;
self.id += 1;
self.table.insert(s.to_string(), id);
id
}
pub fn serialize(&mut self) -> Vec<u8> {
let mut result: Vec<u8> = vec![];
append_u32(&mut result, self.table.len() as u32);
// Assume that it's sorted by id
for (s, _id) in &self.table {
let bytes = s.as_bytes();
append_u32(&mut result, bytes.len() as u32);
result.append(&mut bytes.to_vec());
}
result
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub struct NodeRef(pub Index);
#[derive(Debug, Clone, Copy, PartialEq)]
pub struct PendingRef(pub Index);
pub trait AstBufSerializer {
fn serialize(&mut self) -> Vec<u8>;
}
/// <type u8>
/// <prop offset u32>
/// <child idx u32>
/// <next idx u32>
/// <parent idx u32>
#[derive(Debug)]
struct Node {
kind: u8,
prop_offset: u32,
child: u32,
next: u32,
parent: u32,
}
#[derive(Debug)]
pub enum CommentKind {
Line,
Block,
}
#[derive(Debug)]
struct Comment {
kind: CommentKind,
str_id: usize,
span_id: usize,
}
#[derive(Debug)]
pub struct SerializeCtx {
root_idx: Index,
nodes: Vec<Node>,
prop_stack: Vec<Vec<u8>>,
field_count: Vec<usize>,
field_buf: Vec<u8>,
prev_sibling_stack: Vec<Index>,
/// Vec of spans
spans: Vec<u32>,
/// Maps string id to the actual string
str_table: StringTable,
/// Maps kind id to string id
kind_name_map: Vec<usize>,
/// Maps prop id to string id
prop_name_map: Vec<usize>,
/// Comments
comments: Vec<Comment>,
}
/// This is the internal context used to allocate and fill the buffer. The point
/// is to be able to write absolute offsets directly in place.
///
/// The typical workflow is to reserve all necessary space for the currrent
/// node with placeholders for the offsets of the child nodes. Once child
/// nodes have been traversed, we know their offsets and can replace the
/// placeholder values with the actual ones.
impl SerializeCtx {
pub fn new(kind_len: u8, prop_len: u8) -> Self {
let kind_size = kind_len as usize;
let prop_size = prop_len as usize;
let mut ctx = Self {
spans: Vec::with_capacity(512),
root_idx: 0,
nodes: Vec::with_capacity(512),
prop_stack: vec![vec![]],
prev_sibling_stack: vec![0],
field_count: vec![0],
field_buf: Vec::with_capacity(1024),
str_table: StringTable::new(),
kind_name_map: vec![0; kind_size],
prop_name_map: vec![0; prop_size],
comments: vec![],
};
let empty_str = ctx.str_table.insert("");
// Placeholder node is always 0
ctx.append_node(0, &DUMMY_SP);
ctx.kind_name_map[0] = empty_str;
ctx.kind_name_map[1] = empty_str;
// Insert default props that are always present
let type_str = ctx.str_table.insert("type");
let parent_str = ctx.str_table.insert("parent");
let range_str = ctx.str_table.insert("range");
let length_str = ctx.str_table.insert("length");
// These values are expected to be in this order on the JS side
ctx.prop_name_map[0] = empty_str;
ctx.prop_name_map[1] = type_str;
ctx.prop_name_map[2] = parent_str;
ctx.prop_name_map[3] = range_str;
ctx.prop_name_map[4] = length_str;
ctx
}
pub fn set_root_idx(&mut self, idx: Index) {
self.root_idx = idx;
}
pub fn map_utf8_spans_to_utf16(&mut self, map: &Utf16Map) {
for value in &mut self.spans {
*value = map
.utf8_to_utf16_offset((*value).into())
.unwrap_or_else(|| panic!("Failed converting '{value}' to utf16."))
.into();
}
}
/// Allocate a node's header
fn field_header<P>(&mut self, prop: P, prop_flags: PropFlags)
where
P: Into<u8> + Display + Clone,
{
let flags: u8 = prop_flags.into();
let n: u8 = prop.clone().into();
if let Some(v) = self.prop_name_map.get::<usize>(n.into())
&& *v == 0
{
let id = self.str_table.insert(&format!("{prop}"));
self.prop_name_map[n as usize] = id;
}
// Increment field counter
let idx = self.field_count.len() - 1;
let count = self.field_count[idx];
self.field_count[idx] = count + 1;
let buf = self.prop_stack.last_mut().unwrap();
buf.push(n);
buf.push(flags);
}
fn get_node(&mut self, id: Index) -> &mut Node {
self.nodes.get_mut(id as usize).unwrap()
}
fn set_parent(&mut self, child_id: Index, parent_id: Index) {
let child = self.get_node(child_id);
child.parent = parent_id;
}
fn set_child(&mut self, parent_id: Index, child_id: Index) {
let parent = self.get_node(parent_id);
parent.child = child_id;
}
fn set_next(&mut self, node_id: Index, next_id: Index) {
let node = self.get_node(node_id);
node.next = next_id;
}
fn update_ref_links(&mut self, parent_id: Index, child_id: Index) {
let last_idx = self.prev_sibling_stack.len() - 1;
let parent = self.get_node(parent_id);
if parent.child == 0 {
parent.child = child_id;
} else {
let prev_id = self.prev_sibling_stack[last_idx];
self.set_next(prev_id, child_id);
}
self.prev_sibling_stack[last_idx] = child_id;
self.set_parent(child_id, parent_id);
}
pub fn append_node<K>(&mut self, kind: K, span: &Span) -> PendingRef
where
K: Into<u8> + Display + Clone,
{
let (start, end) = span_to_value(span);
self.append_inner(kind, start, end)
}
pub fn append_inner<K>(
&mut self,
kind: K,
span_lo: u32,
span_hi: u32,
) -> PendingRef
where
K: Into<u8> + Display + Clone,
{
let kind_u8: u8 = kind.clone().into();
let id: Index = self.nodes.len() as u32;
self.nodes.push(Node {
kind: kind_u8,
prop_offset: 0,
child: 0,
next: 0,
parent: 0,
});
if let Some(v) = self.kind_name_map.get::<usize>(kind_u8.into())
&& *v == 0
{
let s_id = self.str_table.insert(&format!("{kind}"));
self.kind_name_map[kind_u8 as usize] = s_id;
}
self.field_count.push(0);
self.prop_stack.push(vec![]);
self.prev_sibling_stack.push(0);
// write spans
self.spans.push(span_lo);
self.spans.push(span_hi);
PendingRef(id)
}
pub fn commit_node(&mut self, id: PendingRef) -> NodeRef {
let mut buf = self.prop_stack.pop().unwrap();
let count = self.field_count.pop().unwrap();
let offset = self.field_buf.len();
// All nodes have <10 fields
self.field_buf.push(count as u8);
self.field_buf.append(&mut buf);
let node = self.nodes.get_mut(id.0 as usize).unwrap();
node.prop_offset = offset as u32;
self.prev_sibling_stack.pop();
NodeRef(id.0)
}
// Allocate an object field
pub fn open_obj(&mut self) {
self.field_count.push(0);
self.prop_stack.push(vec![]);
}
pub fn commit_obj<P>(&mut self, prop: P)
where
P: Into<u8> + Display + Clone,
{
let mut buf = self.prop_stack.pop().unwrap();
let count = self.field_count.pop().unwrap();
let offset = self.field_buf.len();
append_usize(&mut self.field_buf, count);
self.field_buf.append(&mut buf);
self.field_header(prop, PropFlags::Object);
let buf = self.prop_stack.last_mut().unwrap();
append_usize(buf, offset);
}
/// Allocate an null field
pub fn write_null<P>(&mut self, prop: P)
where
P: Into<u8> + Display + Clone,
{
self.field_header(prop, PropFlags::Null);
let buf = self.prop_stack.last_mut().unwrap();
append_u32(buf, 0);
}
/// Allocate an null field
pub fn write_undefined<P>(&mut self, prop: P)
where
P: Into<u8> + Display + Clone,
{
self.field_header(prop, PropFlags::Undefined);
let buf = self.prop_stack.last_mut().unwrap();
append_u32(buf, 0);
}
/// Allocate a number field
pub fn write_num<P>(&mut self, prop: P, value: &str)
where
P: Into<u8> + Display + Clone,
{
self.field_header(prop, PropFlags::Number);
let id = self.str_table.insert(value);
let buf = self.prop_stack.last_mut().unwrap();
append_usize(buf, id);
}
/// Allocate a bigint field
pub fn write_bigint<P>(&mut self, prop: P, value: &str)
where
P: Into<u8> + Display + Clone,
{
self.field_header(prop, PropFlags::BigInt);
let id = self.str_table.insert(value);
let buf = self.prop_stack.last_mut().unwrap();
append_usize(buf, id);
}
/// Allocate a RegExp field
pub fn write_regex<P>(&mut self, prop: P, value: &str)
where
P: Into<u8> + Display + Clone,
{
self.field_header(prop, PropFlags::Regex);
let id = self.str_table.insert(value);
let buf = self.prop_stack.last_mut().unwrap();
append_usize(buf, id);
}
/// Store the string in our string table and save the id of the string
/// in the current field.
pub fn write_str<P>(&mut self, prop: P, value: &str)
where
P: Into<u8> + Display + Clone,
{
self.field_header(prop, PropFlags::String);
let id = self.str_table.insert(value);
let buf = self.prop_stack.last_mut().unwrap();
append_usize(buf, id);
}
/// Write a bool to a field.
pub fn write_bool<P>(&mut self, prop: P, value: bool)
where
P: Into<u8> + Display + Clone,
{
self.field_header(prop, PropFlags::Bool);
let n = if value { 1 } else { 0 };
let buf = self.prop_stack.last_mut().unwrap();
append_u32(buf, n);
}
/// Replace the placeholder of a reference field with the actual offset
/// to the node we want to point to.
pub fn write_ref<P>(&mut self, prop: P, parent: &PendingRef, value: NodeRef)
where
P: Into<u8> + Display + Clone,
{
self.field_header(prop, PropFlags::Ref);
let buf = self.prop_stack.last_mut().unwrap();
append_u32(buf, value.0);
if parent.0 > 0 {
self.update_ref_links(parent.0, value.0);
}
}
/// Helper for writing optional node offsets
pub fn write_maybe_ref<P>(
&mut self,
prop: P,
parent: &PendingRef,
value: Option<NodeRef>,
) where
P: Into<u8> + Display + Clone,
{
if let Some(v) = value {
self.write_ref(prop, parent, v);
} else {
self.write_null(prop);
};
}
/// Helper for writing optional node offsets with undefined as empty value
pub fn write_maybe_undef_ref<P>(
&mut self,
prop: P,
parent: &PendingRef,
value: Option<NodeRef>,
) where
P: Into<u8> + Display + Clone,
{
if let Some(v) = value {
self.write_ref(prop, parent, v);
} else {
self.write_undefined(prop);
};
}
/// Write a vec of node offsets into the property. The necessary space
/// has been reserved earlier.
pub fn write_ref_vec<P>(
&mut self,
prop: P,
parent_ref: &PendingRef,
value: Vec<NodeRef>,
) where
P: Into<u8> + Display + Clone,
{
self.field_header(prop, PropFlags::RefArr);
let group_id = self.append_node(GROUP_KIND, &DUMMY_SP);
let group_id = self.commit_node(group_id).0;
let buf = self.prop_stack.last_mut().unwrap();
append_u32(buf, group_id);
self.update_ref_links(parent_ref.0, group_id);
let mut prev_id = 0;
for (i, item) in value.iter().enumerate() {
self.set_parent(item.0, group_id);
if i == 0 {
self.set_child(group_id, item.0);
} else {
self.set_next(prev_id, item.0);
}
prev_id = item.0;
}
}
pub fn write_maybe_ref_vec_skip<P>(
&mut self,
prop: P,
parent_ref: &PendingRef,
value: Option<Vec<NodeRef>>,
) where
P: Into<u8> + Display + Clone,
{
if let Some(value) = value {
self.write_ref_vec(prop, parent_ref, value);
}
}
pub fn write_ref_vec_or_empty<P>(
&mut self,
prop: P,
parent_ref: &PendingRef,
value: Option<Vec<NodeRef>>,
) where
P: Into<u8> + Display + Clone,
{
let actual = value.unwrap_or_default();
self.write_ref_vec(prop, parent_ref, actual)
}
pub fn write_comment(&mut self, kind: CommentKind, value: &str, span: &Span) {
let str_id = self.str_table.insert(value);
let span_id = self.spans.len() / 2;
let (span_lo, span_hi) = span_to_value(span);
self.spans.push(span_lo);
self.spans.push(span_hi);
self.comments.push(Comment {
kind,
str_id,
span_id,
});
}
/// Serialize all information we have into a buffer that can be sent to JS.
/// It has the following structure:
///
/// <...ast>
/// <string table>
/// <node kind map> <- node kind id maps to string id
/// <node prop map> <- node property id maps to string id
/// <spans> <- List of spans, rarely needed
/// <offset spans>
/// <offset kind map>
/// <offset prop map>
/// <offset str table>
pub fn serialize(&mut self) -> Vec<u8> {
let mut buf: Vec<u8> = vec![];
// The buffer starts with the serialized AST first, because that
// contains absolute offsets. By butting this at the start of the
// message we don't have to waste time updating any offsets.
for node in &self.nodes {
buf.push(node.kind);
append_u32(&mut buf, node.prop_offset);
append_u32(&mut buf, node.child);
append_u32(&mut buf, node.next);
append_u32(&mut buf, node.parent);
}
// Next follows the string table. We'll keep track of the offset
// in the message of where the string table begins
let offset_str_table = buf.len();
// Serialize string table
buf.append(&mut self.str_table.serialize());
// Next, serialize the mappings of kind -> string of encountered
// nodes in the AST. We use this additional lookup table to compress
// the message so that we can save space by using a u8 . All nodes of
// JS, TS and JSX together are <200
let offset_kind_map = buf.len();
// Write the total number of entries in the kind -> str mapping table
// TODO: make this a u8
append_usize(&mut buf, self.kind_name_map.len());
for v in &self.kind_name_map {
append_usize(&mut buf, *v);
}
// Store offset to prop -> string map. It's the same as with node kind
// as the total number of properties is <120 which allows us to store it
// as u8.
let offset_prop_map = buf.len();
// Write the total number of entries in the kind -> str mapping table
append_usize(&mut buf, self.prop_name_map.len());
for v in &self.prop_name_map {
append_usize(&mut buf, *v);
}
// Spans are rarely needed, so they're stored in a separate array.
// They're indexed by the node id.
let offset_spans = buf.len();
for v in &self.spans {
append_u32(&mut buf, *v);
}
// The field value table. They're detached from nodes as they're not
// as frequently needed as the nodes themselves. The most common
// operation is traversal and we can traverse nodes without knowing
// about the fields.
let offset_props = buf.len();
buf.append(&mut self.field_buf);
// Serialize comments
let offset_comments = buf.len();
append_usize(&mut buf, self.comments.len());
for comment in &self.comments {
let kind = match comment.kind {
CommentKind::Line => 0,
CommentKind::Block => 1,
};
buf.push(kind);
append_usize(&mut buf, comment.span_id);
append_usize(&mut buf, comment.str_id);
}
// Putting offsets of relevant parts of the buffer at the end. This
// allows us to hop to the relevant part by merely looking at the last
// for values in the message. Each value represents an offset into the
// buffer.
append_usize(&mut buf, offset_comments);
append_usize(&mut buf, offset_props);
append_usize(&mut buf, offset_spans);
append_usize(&mut buf, offset_kind_map);
append_usize(&mut buf, offset_prop_map);
append_usize(&mut buf, offset_str_table);
append_u32(&mut buf, self.root_idx);
buf
}
}
fn span_to_value(span: &Span) -> (u32, u32) {
if *span == DUMMY_SP {
(0, 0)
} else {
// -1 is because swc stores spans 1-indexed
(span.lo.0 - 1, span.hi.0 - 1)
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/lint/rules/no_slow_types.rs | cli/tools/lint/rules/no_slow_types.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use deno_ast::ModuleSpecifier;
use deno_ast::diagnostics::Diagnostic;
use deno_graph::ModuleGraph;
use deno_graph::fast_check::FastCheckDiagnostic;
use deno_lint::diagnostic::LintDiagnostic;
use deno_lint::diagnostic::LintDiagnosticDetails;
use deno_lint::diagnostic::LintDiagnosticRange;
use deno_lint::diagnostic::LintDocsUrl;
use deno_lint::tags;
use super::PackageLintRule;
const CODE: &str = "no-slow-types";
#[derive(Debug)]
pub struct NoSlowTypesRule;
impl PackageLintRule for NoSlowTypesRule {
fn code(&self) -> &'static str {
CODE
}
fn tags(&self) -> tags::Tags {
&[tags::JSR]
}
fn help_docs_url(&self) -> Cow<'static, str> {
Cow::Borrowed("https://jsr.io/docs/about-slow-types")
}
fn lint_package(
&self,
graph: &ModuleGraph,
entrypoints: &[ModuleSpecifier],
) -> Vec<LintDiagnostic> {
collect_no_slow_type_diagnostics(graph, entrypoints)
.into_iter()
.map(|d| LintDiagnostic {
specifier: d.specifier().clone(),
range: d.range().map(|range| LintDiagnosticRange {
text_info: range.text_info.clone(),
range: range.range,
description: d.range_description().map(|r| r.to_string()),
}),
details: LintDiagnosticDetails {
message: d.message().to_string(),
code: CODE.to_string(),
hint: d.hint().map(|h| h.to_string()),
info: d
.info()
.iter()
.map(|info| Cow::Owned(info.to_string()))
.collect(),
fixes: vec![],
custom_docs_url: d
.docs_url()
.map(|u| LintDocsUrl::Custom(u.into_owned()))
.unwrap_or_default(),
},
})
.collect()
}
}
/// Collects diagnostics from the module graph for the
/// given package's export URLs.
pub fn collect_no_slow_type_diagnostics(
graph: &ModuleGraph,
package_export_urls: &[ModuleSpecifier],
) -> Vec<FastCheckDiagnostic> {
let mut js_exports = package_export_urls
.iter()
.filter_map(|url| graph.get(url).and_then(|m| m.js()));
// fast check puts the same diagnostics in each entrypoint for the
// package (since it's all or nothing), so we only need to check
// the first one JS entrypoint
let Some(module) = js_exports.next() else {
// could happen if all the exports are JSON
return vec![];
};
if let Some(diagnostics) = module.fast_check_diagnostics() {
let mut diagnostics = diagnostics.clone();
diagnostics.sort_by_cached_key(|d| {
(
d.specifier().clone(),
d.range().map(|r| r.range),
d.code().to_string(),
)
});
diagnostics
} else {
Vec::new()
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/lint/rules/mod.rs | cli/tools/lint/rules/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::HashSet;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_config::deno_json::LintRulesConfig;
use deno_config::workspace::WorkspaceDirectory;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_graph::ModuleGraph;
use deno_lint::diagnostic::LintDiagnostic;
use deno_lint::rules::LintRule;
use deno_lint::tags;
use deno_resolver::workspace::WorkspaceResolver;
use crate::sys::CliSys;
mod no_sloppy_imports;
mod no_slow_types;
// used for publishing
pub use no_slow_types::collect_no_slow_type_diagnostics;
pub trait PackageLintRule: std::fmt::Debug + Send + Sync {
fn code(&self) -> &'static str;
fn tags(&self) -> tags::Tags {
&[]
}
fn help_docs_url(&self) -> Cow<'static, str>;
fn lint_package(
&self,
graph: &ModuleGraph,
entrypoints: &[ModuleSpecifier],
) -> Vec<LintDiagnostic>;
}
pub(super) trait ExtendedLintRule: LintRule {
/// If the rule supports the incremental cache.
fn supports_incremental_cache(&self) -> bool;
fn help_docs_url(&self) -> Cow<'static, str>;
fn into_base(self: Box<Self>) -> Box<dyn LintRule>;
}
pub enum FileOrPackageLintRule {
File(Box<dyn LintRule>),
Package(Box<dyn PackageLintRule>),
}
#[derive(Debug)]
enum CliLintRuleKind {
DenoLint(Box<dyn LintRule>),
Extended(Box<dyn ExtendedLintRule>),
Package(Box<dyn PackageLintRule>),
}
#[derive(Debug)]
pub struct CliLintRule(CliLintRuleKind);
impl PartialEq for CliLintRule {
fn eq(&self, other: &Self) -> bool {
self.code() == other.code()
}
}
impl CliLintRule {
pub fn code(&self) -> &'static str {
use CliLintRuleKind::*;
match &self.0 {
DenoLint(rule) => rule.code(),
Extended(rule) => rule.code(),
Package(rule) => rule.code(),
}
}
pub fn tags(&self) -> tags::Tags {
use CliLintRuleKind::*;
match &self.0 {
DenoLint(rule) => rule.tags(),
Extended(rule) => rule.tags(),
Package(rule) => rule.tags(),
}
}
pub fn help_docs_url(&self) -> Cow<'static, str> {
use CliLintRuleKind::*;
match &self.0 {
DenoLint(rule) => {
Cow::Owned(format!("https://docs.deno.com/lint/rules/{}", rule.code()))
}
Extended(rule) => rule.help_docs_url(),
Package(rule) => rule.help_docs_url(),
}
}
pub fn supports_incremental_cache(&self) -> bool {
use CliLintRuleKind::*;
match &self.0 {
DenoLint(_) => true,
Extended(rule) => rule.supports_incremental_cache(),
// graph rules don't go through the incremental cache, so allow it
Package(_) => true,
}
}
pub fn into_file_or_pkg_rule(self) -> FileOrPackageLintRule {
use CliLintRuleKind::*;
match self.0 {
DenoLint(rule) => FileOrPackageLintRule::File(rule),
Extended(rule) => FileOrPackageLintRule::File(rule.into_base()),
Package(rule) => FileOrPackageLintRule::Package(rule),
}
}
}
#[derive(Debug)]
pub struct ConfiguredRules {
pub all_rule_codes: HashSet<Cow<'static, str>>,
pub rules: Vec<CliLintRule>,
}
impl ConfiguredRules {
pub fn supports_incremental_cache(&self) -> bool {
self.rules.iter().all(|r| r.supports_incremental_cache())
}
pub fn incremental_cache_state(&self) -> impl std::hash::Hash + use<> {
// use a hash of the rule names in order to bust the cache
let mut codes = self.rules.iter().map(|r| r.code()).collect::<Vec<_>>();
// ensure this is stable by sorting it
codes.sort_unstable();
Some(codes)
}
}
pub struct LintRuleProvider {
workspace_resolver: Option<Arc<WorkspaceResolver<CliSys>>>,
}
impl LintRuleProvider {
pub fn new(
workspace_resolver: Option<Arc<WorkspaceResolver<CliSys>>>,
) -> Self {
Self { workspace_resolver }
}
pub fn resolve_lint_rules_err_empty(
&self,
rules: LintRulesConfig,
maybe_workspace_dir: Option<&WorkspaceDirectory>,
) -> Result<ConfiguredRules, AnyError> {
let lint_rules = self.resolve_lint_rules(rules, maybe_workspace_dir);
if lint_rules.rules.is_empty() {
bail!("No rules have been configured")
}
Ok(lint_rules)
}
pub fn all_rules(&self) -> Vec<CliLintRule> {
let deno_lint_rules = deno_lint::rules::get_all_rules();
let cli_lint_rules = vec![CliLintRule(CliLintRuleKind::Extended(
Box::new(no_sloppy_imports::NoSloppyImportsRule::new(
self.workspace_resolver.clone(),
)),
))];
let cli_graph_rules = vec![CliLintRule(CliLintRuleKind::Package(
Box::new(no_slow_types::NoSlowTypesRule),
))];
deno_lint_rules
.into_iter()
.map(|rule| CliLintRule(CliLintRuleKind::DenoLint(rule)))
.chain(cli_lint_rules)
.chain(cli_graph_rules)
.collect()
}
pub fn resolve_lint_rules(
&self,
rules: LintRulesConfig,
maybe_workspace_dir: Option<&WorkspaceDirectory>,
) -> ConfiguredRules {
let all_rules = self.all_rules();
let mut all_rule_names = HashSet::with_capacity(all_rules.len());
for rule in &all_rules {
all_rule_names.insert(rule.code().into());
}
let rules = filtered_rules(
all_rules.into_iter(),
rules
.tags
.or_else(|| Some(get_default_tags(maybe_workspace_dir))),
rules.exclude,
rules.include,
);
ConfiguredRules {
rules,
all_rule_codes: all_rule_names,
}
}
}
fn get_default_tags(
maybe_workspace_dir: Option<&WorkspaceDirectory>,
) -> Vec<String> {
let mut tags = Vec::with_capacity(2);
tags.push("recommended".to_string());
if let Some(member_dir) = maybe_workspace_dir {
if member_dir
.member_or_root_deno_json()
.map(|c| c.is_package())
.unwrap_or(false)
{
tags.push("jsr".to_string());
}
if member_dir.member_or_root_deno_json().is_some()
|| member_dir.member_or_root_pkg_json().is_some()
{
tags.push("workspace".to_string());
}
}
tags
}
fn filtered_rules(
all_rules: impl Iterator<Item = CliLintRule>,
maybe_tags: Option<Vec<String>>,
maybe_exclude: Option<Vec<String>>,
maybe_include: Option<Vec<String>>,
) -> Vec<CliLintRule> {
let tags_set =
maybe_tags.map(|tags| tags.into_iter().collect::<HashSet<_>>());
let mut rules = all_rules
.filter(|rule| {
let mut passes = if let Some(tags_set) = &tags_set {
rule
.tags()
.iter()
.any(|t| tags_set.contains(&t.to_string()))
} else {
true
};
if let Some(includes) = &maybe_include
&& includes.contains(&rule.code().to_owned())
{
passes |= true;
}
if let Some(excludes) = &maybe_exclude
&& excludes.contains(&rule.code().to_owned())
{
passes &= false;
}
passes
})
.collect::<Vec<_>>();
rules.sort_by_key(|r| r.code());
rules
}
#[cfg(test)]
mod test {
use super::*;
use crate::args::LintRulesConfig;
#[test]
fn recommended_rules_when_no_tags_in_config() {
let rules_config = LintRulesConfig {
exclude: Some(vec!["no-debugger".to_string()]),
include: None,
tags: None,
};
let rules_provider = LintRuleProvider::new(None);
let rules = rules_provider.resolve_lint_rules(rules_config, None);
let mut rule_names = rules
.rules
.into_iter()
.map(|r| r.code().to_string())
.collect::<Vec<_>>();
rule_names.sort();
let mut recommended_rule_names = rules_provider
.resolve_lint_rules(Default::default(), None)
.rules
.into_iter()
.filter(|r| r.tags().contains(&tags::RECOMMENDED))
.map(|r| r.code().to_string())
.filter(|n| n != "no-debugger")
.collect::<Vec<_>>();
recommended_rule_names.sort();
assert_eq!(rule_names, recommended_rule_names);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/lint/rules/no_sloppy_imports.rs | cli/tools/lint/rules/no_sloppy_imports.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::cell::RefCell;
use std::collections::HashMap;
use std::sync::Arc;
use deno_ast::SourceRange;
use deno_error::JsErrorBox;
use deno_graph::Range;
use deno_graph::source::ResolutionKind;
use deno_graph::source::ResolveError;
use deno_lint::diagnostic::LintDiagnosticDetails;
use deno_lint::diagnostic::LintDiagnosticRange;
use deno_lint::diagnostic::LintDocsUrl;
use deno_lint::diagnostic::LintFix;
use deno_lint::diagnostic::LintFixChange;
use deno_lint::rules::LintRule;
use deno_lint::tags;
use deno_resolver::workspace::SloppyImportsResolutionReason;
use deno_resolver::workspace::WorkspaceResolver;
use text_lines::LineAndColumnIndex;
use super::ExtendedLintRule;
use crate::graph_util::CliJsrUrlProvider;
use crate::sys::CliSys;
#[derive(Debug)]
pub struct NoSloppyImportsRule {
// None for making printing out the lint rules easy
workspace_resolver: Option<Arc<WorkspaceResolver<CliSys>>>,
}
impl NoSloppyImportsRule {
pub fn new(
workspace_resolver: Option<Arc<WorkspaceResolver<CliSys>>>,
) -> Self {
NoSloppyImportsRule { workspace_resolver }
}
}
const CODE: &str = "no-sloppy-imports";
const DOCS_URL: &str = "https://docs.deno.com/runtime/manual/tools/unstable_flags/#--unstable-sloppy-imports";
impl ExtendedLintRule for NoSloppyImportsRule {
fn supports_incremental_cache(&self) -> bool {
// only allow the incremental cache when we don't
// do sloppy import resolution because sloppy import
// resolution requires knowing about the surrounding files
// in addition to the current one
let Some(workspace_resolver) = &self.workspace_resolver else {
return true;
};
!workspace_resolver.sloppy_imports_enabled()
&& !workspace_resolver.has_compiler_options_root_dirs()
}
fn help_docs_url(&self) -> Cow<'static, str> {
Cow::Borrowed(DOCS_URL)
}
fn into_base(self: Box<Self>) -> Box<dyn LintRule> {
self
}
}
impl LintRule for NoSloppyImportsRule {
fn lint_program_with_ast_view<'view>(
&self,
context: &mut deno_lint::context::Context<'view>,
_program: deno_lint::Program<'view>,
) {
let Some(workspace_resolver) = &self.workspace_resolver else {
return;
};
if context.specifier().scheme() != "file" {
return;
}
let resolver = SloppyImportCaptureResolver {
workspace_resolver,
captures: Default::default(),
};
// fill this and capture the sloppy imports in the resolver
deno_graph::parse_module_from_ast(deno_graph::ParseModuleFromAstOptions {
graph_kind: deno_graph::GraphKind::All,
specifier: context.specifier().clone(),
maybe_headers: None,
mtime: None,
parsed_source: context.parsed_source(),
// ignore resolving dynamic imports like import(`./dir/${something}`)
file_system: &deno_graph::source::NullFileSystem,
jsr_url_provider: &CliJsrUrlProvider,
maybe_resolver: Some(&resolver),
});
for (referrer, (specifier, sloppy_reason)) in
resolver.captures.borrow_mut().drain()
{
let start_range =
context.text_info().loc_to_source_pos(LineAndColumnIndex {
line_index: referrer.range.start.line,
column_index: referrer.range.start.character,
});
let end_range =
context.text_info().loc_to_source_pos(LineAndColumnIndex {
line_index: referrer.range.end.line,
column_index: referrer.range.end.character,
});
let source_range = SourceRange::new(start_range, end_range);
context.add_diagnostic_details(
Some(LintDiagnosticRange {
range: source_range,
description: None,
text_info: context.text_info().clone(),
}),
LintDiagnosticDetails {
message: "Sloppy imports are not allowed.".to_string(),
code: CODE.to_string(),
custom_docs_url: LintDocsUrl::Custom(DOCS_URL.to_string()),
fixes: context
.specifier()
.make_relative(&specifier)
.map(|relative| {
vec![LintFix {
description: Cow::Owned(
sloppy_reason.quick_fix_message_for_specifier(&specifier),
),
changes: vec![LintFixChange {
new_text: Cow::Owned({
let relative = if relative.starts_with("../") {
relative
} else {
format!("./{}", relative)
};
let current_text =
context.text_info().range_text(&source_range);
if current_text.starts_with('"') {
format!("\"{}\"", relative)
} else if current_text.starts_with('\'') {
format!("'{}'", relative)
} else {
relative
}
}),
range: source_range,
}],
}]
})
.unwrap_or_default(),
hint: None,
info: vec![],
},
);
}
}
fn code(&self) -> &'static str {
CODE
}
fn tags(&self) -> tags::Tags {
&[tags::RECOMMENDED]
}
}
#[derive(Debug)]
struct SloppyImportCaptureResolver<'a> {
workspace_resolver: &'a WorkspaceResolver<CliSys>,
captures: RefCell<
HashMap<Range, (deno_ast::ModuleSpecifier, SloppyImportsResolutionReason)>,
>,
}
impl deno_graph::source::Resolver for SloppyImportCaptureResolver<'_> {
fn resolve(
&self,
specifier_text: &str,
referrer_range: &Range,
resolution_kind: ResolutionKind,
) -> Result<deno_ast::ModuleSpecifier, deno_graph::source::ResolveError> {
let resolution = self
.workspace_resolver
.resolve(
specifier_text,
&referrer_range.specifier,
match resolution_kind {
ResolutionKind::Execution => {
deno_resolver::workspace::ResolutionKind::Execution
}
ResolutionKind::Types => {
deno_resolver::workspace::ResolutionKind::Types
}
},
)
.map_err(|err| ResolveError::Other(JsErrorBox::from_err(err)))?;
match resolution {
deno_resolver::workspace::MappedResolution::Normal {
specifier,
sloppy_reason,
..
} => {
if let Some(sloppy_reason) = sloppy_reason {
self
.captures
.borrow_mut()
.entry(referrer_range.clone())
.or_insert_with(|| (specifier.clone(), sloppy_reason));
}
Ok(specifier)
}
deno_resolver::workspace::MappedResolution::WorkspaceJsrPackage {
..
}
| deno_resolver::workspace::MappedResolution::WorkspaceNpmPackage {
..
}
| deno_resolver::workspace::MappedResolution::PackageJson { .. }
| deno_resolver::workspace::MappedResolution::PackageJsonImport {
..
} => {
// this error is ignored
Err(ResolveError::Other(JsErrorBox::generic("")))
}
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/bench/mod.rs | cli/tools/bench/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashSet;
use std::path::Path;
use std::sync::Arc;
use std::time::Duration;
use deno_config::glob::WalkEntry;
use deno_core::ModuleSpecifier;
use deno_core::PollEventLoopOptions;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::error::CoreErrorKind;
use deno_core::error::JsError;
use deno_core::futures::StreamExt;
use deno_core::futures::future;
use deno_core::futures::stream;
use deno_core::serde_v8;
use deno_core::unsync::spawn;
use deno_core::unsync::spawn_blocking;
use deno_core::v8;
use deno_error::JsErrorBox;
use deno_npm_installer::graph::NpmCachingStrategy;
use deno_runtime::WorkerExecutionMode;
use deno_runtime::deno_permissions::Permissions;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::permissions::RuntimePermissionDescriptorParser;
use deno_runtime::tokio_util::create_and_run_current_thread;
use indexmap::IndexMap;
use indexmap::IndexSet;
use log::Level;
use serde::Deserialize;
use serde::Serialize;
use tokio::sync::mpsc::UnboundedSender;
use tokio::sync::mpsc::unbounded_channel;
use crate::args::BenchFlags;
use crate::args::CliOptions;
use crate::args::Flags;
use crate::colors;
use crate::display::write_json_to_stdout;
use crate::factory::CliFactory;
use crate::graph_container::CheckSpecifiersOptions;
use crate::graph_util::has_graph_root_local_dependent_changed;
use crate::ops;
use crate::sys::CliSys;
use crate::tools::test::TestFilter;
use crate::tools::test::format_test_error;
use crate::util::file_watcher;
use crate::util::fs::CollectSpecifiersOptions;
use crate::util::fs::collect_specifiers;
use crate::util::path::is_script_ext;
use crate::util::path::matches_pattern_or_exact_path;
use crate::worker::CliMainWorkerFactory;
use crate::worker::CreateCustomWorkerError;
mod mitata;
mod reporters;
use reporters::BenchReporter;
use reporters::ConsoleReporter;
use reporters::JsonReporter;
#[derive(Debug, Clone)]
struct BenchSpecifierOptions {
filter: TestFilter,
json: bool,
log_level: Option<log::Level>,
}
#[derive(Debug, Clone, Eq, PartialEq, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct BenchPlan {
pub total: usize,
pub origin: String,
pub used_only: bool,
pub names: Vec<String>,
}
#[derive(Debug, Clone, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum BenchEvent {
Plan(BenchPlan),
Output(String),
Register(BenchDescription),
Wait(usize),
Result(usize, BenchResult),
UncaughtError(String, Box<JsError>),
}
#[derive(Debug, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum BenchResult {
Ok(BenchStats),
Failed(Box<JsError>),
}
#[derive(Debug, Clone)]
pub struct BenchReport {
pub total: usize,
pub failed: usize,
pub failures: Vec<(BenchDescription, Box<JsError>)>,
pub measurements: Vec<(BenchDescription, BenchStats)>,
}
#[derive(Debug, Clone, PartialEq, Deserialize, Eq, Hash)]
pub struct BenchDescription {
pub id: usize,
pub name: String,
pub origin: String,
pub baseline: bool,
pub group: Option<String>,
pub ignore: bool,
pub only: bool,
pub warmup: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct BenchStats {
pub n: u64,
pub min: f64,
pub max: f64,
pub avg: f64,
pub p75: f64,
pub p99: f64,
pub p995: f64,
pub p999: f64,
pub high_precision: bool,
pub used_explicit_timers: bool,
}
impl BenchReport {
pub fn new() -> Self {
Self {
total: 0,
failed: 0,
failures: Vec::new(),
measurements: Vec::new(),
}
}
}
fn create_reporter(
show_output: bool,
json: bool,
) -> Box<dyn BenchReporter + Send> {
if json {
return Box::new(JsonReporter::new());
}
Box::new(ConsoleReporter::new(show_output))
}
/// Run a single specifier as an executable bench module.
async fn bench_specifier(
worker_factory: Arc<CliMainWorkerFactory>,
permissions_container: PermissionsContainer,
specifier: ModuleSpecifier,
preload_modules: Vec<ModuleSpecifier>,
require_modules: Vec<ModuleSpecifier>,
sender: UnboundedSender<BenchEvent>,
filter: TestFilter,
) -> Result<(), AnyError> {
match bench_specifier_inner(
worker_factory,
permissions_container,
specifier.clone(),
preload_modules,
require_modules,
&sender,
filter,
)
.await
{
Ok(()) => Ok(()),
Err(CreateCustomWorkerError::Core(error)) => match error.into_kind() {
CoreErrorKind::Js(error) => {
sender.send(BenchEvent::UncaughtError(specifier.to_string(), error))?;
Ok(())
}
error => Err(error.into_box().into()),
},
Err(e) => Err(e.into()),
}
}
/// Run a single specifier as an executable bench module.
async fn bench_specifier_inner(
worker_factory: Arc<CliMainWorkerFactory>,
permissions_container: PermissionsContainer,
specifier: ModuleSpecifier,
preload_modules: Vec<ModuleSpecifier>,
require_modules: Vec<ModuleSpecifier>,
sender: &UnboundedSender<BenchEvent>,
filter: TestFilter,
) -> Result<(), CreateCustomWorkerError> {
let mut worker = worker_factory
.create_custom_worker(
WorkerExecutionMode::Bench,
specifier.clone(),
preload_modules,
require_modules,
permissions_container,
vec![ops::bench::deno_bench::init(sender.clone())],
Default::default(),
None,
)
.await?;
worker.execute_preload_modules().await?;
// We execute the main module as a side module so that import.meta.main is not set.
worker.execute_side_module().await?;
let mut worker = worker.into_main_worker();
// Ensure that there are no pending exceptions before we start running tests
worker.run_up_to_duration(Duration::from_millis(0)).await?;
worker
.dispatch_load_event()
.map_err(|e| CoreErrorKind::Js(e).into_box())?;
let benchmarks = {
let state_rc = worker.js_runtime.op_state();
let mut state = state_rc.borrow_mut();
std::mem::take(&mut state.borrow_mut::<ops::bench::BenchContainer>().0)
};
let (only, no_only): (Vec<_>, Vec<_>) =
benchmarks.into_iter().partition(|(d, _)| d.only);
let used_only = !only.is_empty();
let benchmarks = if used_only { only } else { no_only };
let mut benchmarks = benchmarks
.into_iter()
.filter(|(d, _)| d.warmup || filter.includes(&d.name) && !d.ignore)
.collect::<Vec<_>>();
let mut groups = IndexSet::<Option<String>>::new();
// make sure ungrouped benchmarks are placed above grouped
groups.insert(None);
for (desc, _) in &benchmarks {
groups.insert(desc.group.clone());
}
benchmarks.sort_by(|(d1, _), (d2, _)| {
groups
.get_index_of(&d1.group)
.unwrap()
.partial_cmp(&groups.get_index_of(&d2.group).unwrap())
.unwrap()
});
sender
.send(BenchEvent::Plan(BenchPlan {
origin: specifier.to_string(),
total: benchmarks.len(),
used_only,
names: benchmarks.iter().map(|(d, _)| d.name.clone()).collect(),
}))
.map_err(JsErrorBox::from_err)
.map_err(|e| CoreErrorKind::JsBox(e).into_box())?;
for (desc, function) in benchmarks {
sender
.send(BenchEvent::Wait(desc.id))
.map_err(JsErrorBox::from_err)
.map_err(|e| CoreErrorKind::JsBox(e).into_box())?;
let call = worker.js_runtime.call(&function);
let result = worker
.js_runtime
.with_event_loop_promise(call, PollEventLoopOptions::default())
.await?;
deno_core::scope!(scope, &mut worker.js_runtime);
let result = v8::Local::new(scope, result);
let result = serde_v8::from_v8::<BenchResult>(scope, result)
.map_err(JsErrorBox::from_err)
.map_err(|e| CoreErrorKind::JsBox(e).into_box())?;
sender
.send(BenchEvent::Result(desc.id, result))
.map_err(JsErrorBox::from_err)
.map_err(|e| CoreErrorKind::JsBox(e).into_box())?;
}
// Ignore `defaultPrevented` of the `beforeunload` event. We don't allow the
// event loop to continue beyond what's needed to await results.
worker
.dispatch_beforeunload_event()
.map_err(|e| CoreErrorKind::Js(e).into_box())?;
worker
.dispatch_process_beforeexit_event()
.map_err(|e| CoreErrorKind::Js(e).into_box())?;
worker
.dispatch_unload_event()
.map_err(|e| CoreErrorKind::Js(e).into_box())?;
worker
.dispatch_process_exit_event()
.map_err(|e| CoreErrorKind::Js(e).into_box())?;
// Ensure the worker has settled so we can catch any remaining unhandled rejections. We don't
// want to wait forever here.
worker.run_up_to_duration(Duration::from_millis(0)).await?;
Ok(())
}
/// Test a collection of specifiers with test modes concurrently.
async fn bench_specifiers(
worker_factory: Arc<CliMainWorkerFactory>,
cli_options: &Arc<CliOptions>,
permission_desc_parser: &Arc<RuntimePermissionDescriptorParser<CliSys>>,
specifiers: Vec<ModuleSpecifier>,
preload_modules: Vec<ModuleSpecifier>,
require_modules: Vec<ModuleSpecifier>,
options: BenchSpecifierOptions,
) -> Result<(), AnyError> {
let (sender, mut receiver) = unbounded_channel::<BenchEvent>();
let log_level = options.log_level;
let option_for_handles = options.clone();
let join_handles = specifiers.into_iter().map(move |specifier| {
let worker_factory = worker_factory.clone();
let specifier_dir = cli_options.workspace().resolve_member_dir(&specifier);
let sender = sender.clone();
let options = option_for_handles.clone();
let preload_modules = preload_modules.clone();
let require_modules = require_modules.clone();
let cli_options = cli_options.clone();
let permission_desc_parser = permission_desc_parser.clone();
spawn_blocking(move || {
// Various test files should not share the same permissions in terms of
// `PermissionsContainer` - otherwise granting/revoking permissions in one
// file would have impact on other files, which is undesirable.
let permissions =
cli_options.permissions_options_for_dir(&specifier_dir)?;
let permissions_container = PermissionsContainer::new(
permission_desc_parser.clone(),
Permissions::from_options(
permission_desc_parser.as_ref(),
&permissions,
)?,
);
let future = bench_specifier(
worker_factory,
permissions_container,
specifier,
preload_modules,
require_modules,
sender,
options.filter,
);
create_and_run_current_thread(future)
})
});
let join_stream = stream::iter(join_handles)
.buffer_unordered(1)
.collect::<Vec<Result<Result<(), AnyError>, tokio::task::JoinError>>>();
let handler = {
spawn(async move {
let mut used_only = false;
let mut report = BenchReport::new();
let mut reporter =
create_reporter(log_level != Some(Level::Error), options.json);
let mut benches = IndexMap::new();
while let Some(event) = receiver.recv().await {
match event {
BenchEvent::Plan(plan) => {
report.total += plan.total;
if plan.used_only {
used_only = true;
}
reporter.report_plan(&plan);
}
BenchEvent::Register(desc) => {
reporter.report_register(&desc);
benches.insert(desc.id, desc);
}
BenchEvent::Wait(id) => {
reporter.report_wait(benches.get(&id).unwrap());
}
BenchEvent::Output(output) => {
reporter.report_output(&output);
}
BenchEvent::Result(id, result) => {
let desc = benches.get(&id).unwrap();
reporter.report_result(desc, &result);
match result {
BenchResult::Ok(stats) => {
report.measurements.push((desc.clone(), stats));
}
BenchResult::Failed(failure) => {
report.failed += 1;
report.failures.push((desc.clone(), failure));
}
};
}
BenchEvent::UncaughtError(origin, error) => {
report.failed += 1;
reporter.report_uncaught_error(&origin, error);
}
}
}
reporter.report_end(&report);
if used_only {
return Err(anyhow!(
"Bench failed because the \"only\" option was used",
));
}
if report.failed > 0 {
return Err(anyhow!("Bench failed"));
}
Ok(())
})
};
let (join_results, result) = future::join(join_stream, handler).await;
// propagate any errors
for join_result in join_results {
join_result??;
}
result??;
Ok(())
}
/// Checks if the path has a basename and extension Deno supports for benches.
fn is_supported_bench_path(entry: WalkEntry) -> bool {
if !is_script_ext(entry.path) {
false
} else if has_supported_bench_path_name(entry.path) {
true
} else if let Some(include) = &entry.patterns.include {
// allow someone to explicitly specify a path
matches_pattern_or_exact_path(include, entry.path)
} else {
false
}
}
fn has_supported_bench_path_name(path: &Path) -> bool {
if let Some(name) = path.file_stem() {
let basename = name.to_string_lossy();
basename.ends_with("_bench")
|| basename.ends_with(".bench")
|| basename == "bench"
} else {
false
}
}
pub async fn run_benchmarks(
flags: Arc<Flags>,
bench_flags: BenchFlags,
) -> Result<(), AnyError> {
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let workspace_bench_options =
cli_options.resolve_workspace_bench_options(&bench_flags);
let members_with_bench_options =
cli_options.resolve_bench_options_for_members(&bench_flags)?;
let specifiers = members_with_bench_options
.iter()
.map(|(_, bench_options)| {
collect_specifiers(
CollectSpecifiersOptions {
file_patterns: bench_options.files.clone(),
vendor_folder: cli_options.vendor_dir_path().map(ToOwned::to_owned),
include_ignored_specified: false,
},
is_supported_bench_path,
)
})
.collect::<Result<Vec<_>, _>>()?
.into_iter()
.flatten()
.collect::<Vec<_>>();
if !workspace_bench_options.permit_no_files && specifiers.is_empty() {
return Err(anyhow!("No bench modules found"));
}
let main_graph_container = factory.main_module_graph_container().await?;
main_graph_container
.check_specifiers(
&specifiers,
CheckSpecifiersOptions {
ext_overwrite: cli_options.ext_flag().as_ref(),
..Default::default()
},
)
.await?;
if workspace_bench_options.no_run {
return Ok(());
}
let preload_modules = cli_options.preload_modules()?;
let require_modules = cli_options.require_modules()?;
let log_level = cli_options.log_level();
let worker_factory =
Arc::new(factory.create_cli_main_worker_factory().await?);
bench_specifiers(
worker_factory,
cli_options,
factory.permission_desc_parser()?,
specifiers,
preload_modules,
require_modules,
BenchSpecifierOptions {
filter: TestFilter::from_flag(&workspace_bench_options.filter),
json: workspace_bench_options.json,
log_level,
},
)
.await?;
Ok(())
}
// TODO(bartlomieju): heavy duplication of code with `cli/tools/test.rs`
pub async fn run_benchmarks_with_watch(
flags: Arc<Flags>,
bench_flags: BenchFlags,
) -> Result<(), AnyError> {
file_watcher::watch_func(
flags,
file_watcher::PrintConfig::new(
"Bench",
bench_flags
.watch
.as_ref()
.map(|w| !w.no_clear_screen)
.unwrap_or(true),
),
move |flags, watcher_communicator, changed_paths| {
let bench_flags = bench_flags.clone();
watcher_communicator.show_path_changed(changed_paths.clone());
Ok(async move {
let factory = CliFactory::from_flags_for_watcher(
flags,
watcher_communicator.clone(),
);
let cli_options = factory.cli_options()?;
let workspace_bench_options =
cli_options.resolve_workspace_bench_options(&bench_flags);
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
let graph_kind = cli_options.type_check_mode().as_graph_kind();
let module_graph_creator = factory.module_graph_creator().await?;
let members_with_bench_options =
cli_options.resolve_bench_options_for_members(&bench_flags)?;
let watch_paths = members_with_bench_options
.iter()
.filter_map(|(_, bench_options)| {
bench_options
.files
.include
.as_ref()
.map(|set| set.base_paths())
})
.flatten()
.collect::<Vec<_>>();
let _ = watcher_communicator.watch_paths(watch_paths);
let collected_bench_modules = members_with_bench_options
.iter()
.map(|(_, bench_options)| {
collect_specifiers(
CollectSpecifiersOptions {
file_patterns: bench_options.files.clone(),
vendor_folder: cli_options
.vendor_dir_path()
.map(ToOwned::to_owned),
include_ignored_specified: false,
},
is_supported_bench_path,
)
})
.collect::<Result<Vec<_>, _>>()?
.into_iter()
.flatten()
.collect::<Vec<_>>();
let graph = module_graph_creator
.create_graph(
graph_kind,
collected_bench_modules.clone(),
NpmCachingStrategy::Eager,
)
.await?;
module_graph_creator.graph_valid(&graph)?;
let bench_modules = &graph.roots;
let bench_modules_to_reload = if let Some(changed_paths) = changed_paths
{
let changed_paths = changed_paths.into_iter().collect::<HashSet<_>>();
let mut result = IndexSet::with_capacity(bench_modules.len());
for bench_module_specifier in bench_modules {
if has_graph_root_local_dependent_changed(
&graph,
bench_module_specifier,
&changed_paths,
) {
result.insert(bench_module_specifier.clone());
}
}
result
} else {
bench_modules.clone()
};
let worker_factory =
Arc::new(factory.create_cli_main_worker_factory().await?);
let specifiers = collected_bench_modules
.into_iter()
.filter(|specifier| bench_modules_to_reload.contains(specifier))
.collect::<Vec<ModuleSpecifier>>();
factory
.main_module_graph_container()
.await?
.check_specifiers(
&specifiers,
CheckSpecifiersOptions {
ext_overwrite: cli_options.ext_flag().as_ref(),
allow_unknown_media_types: false,
},
)
.await?;
if workspace_bench_options.no_run {
return Ok(());
}
let log_level = cli_options.log_level();
let preload_modules = cli_options.preload_modules()?;
let require_modules = cli_options.require_modules()?;
bench_specifiers(
worker_factory,
cli_options,
factory.permission_desc_parser()?,
specifiers,
preload_modules,
require_modules,
BenchSpecifierOptions {
filter: TestFilter::from_flag(&workspace_bench_options.filter),
json: workspace_bench_options.json,
log_level,
},
)
.await?;
Ok(())
})
},
)
.await?;
Ok(())
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/bench/mitata.rs | cli/tools/bench/mitata.rs | // Copyright 2022 evanwashere
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
use crate::colors;
/// Taken from https://stackoverflow.com/a/76572321
fn precision_f64(x: f64, decimals: u32) -> f64 {
if x == 0. || decimals == 0 {
0.
} else {
let shift = decimals as i32 - x.abs().log10().ceil() as i32;
let shift_factor = 10_f64.powi(shift);
(x * shift_factor).round() / shift_factor
}
}
fn avg_to_iter_per_s(time: f64) -> String {
let iter_per_s = precision_f64(1e9 / time, 4);
let (decimals, fractional) = into_decimal_and_fractional_parts(iter_per_s);
human_readable_decimal_with_fractional(decimals, fractional)
}
/// Return a tuple representing decimal part of provided float, as well as its
/// first fractional digit.
fn into_decimal_and_fractional_parts(num: f64) -> (i64, i64) {
let mut decimal_part = num.floor() as i64;
let fractional_part = {
let decs = ((num - num.floor()) * 10.0).round();
if decs == 10.0 {
decimal_part += 1;
0
} else {
decs as i64
}
};
(decimal_part, fractional_part)
}
fn human_readable_decimal_with_fractional(
decimal: i64,
fractional: i64,
) -> String {
// Sweet one-liner to separate integer by commas from:
// https://stackoverflow.com/a/67834588/21759102
let fmt_decimal = decimal
.to_string()
.as_bytes()
.rchunks(3)
.rev()
.map(std::str::from_utf8)
.collect::<Result<Vec<&str>, _>>()
.unwrap()
.join(",");
if fmt_decimal.len() >= 4 {
fmt_decimal
} else {
format!("{}.{}", fmt_decimal, fractional)
}
}
pub fn fmt_duration(time: f64) -> String {
if time < 1e0 {
return format!("{:.1} ps", time * 1e3);
}
if time < 1e3 {
return format!("{:.1} ns", time);
}
if time < 1e6 {
return format!("{:.1} µs", time / 1e3);
}
if time < 1e9 {
return format!("{:.1} ms", time / 1e6);
}
if time < 1e12 {
return format!("{:.1} s", time / 1e9);
}
if time < 36e11 {
return format!("{:.1} m", time / 60e9);
}
format!("{:.1} h", time / 36e11)
}
pub mod cpu {
#![allow(dead_code)]
pub fn name() -> String {
#[cfg(target_os = "linux")]
return linux();
#[cfg(target_os = "macos")]
return macos();
#[cfg(target_os = "windows")]
return windows();
#[allow(unreachable_code)]
{
"unknown".to_string()
}
}
pub fn macos() -> String {
let mut sysctl = std::process::Command::new("sysctl");
sysctl.arg("-n");
sysctl.arg("machdep.cpu.brand_string");
std::str::from_utf8(
&sysctl
.output()
.map(|x| x.stdout)
.unwrap_or(Vec::from("unknown")),
)
.unwrap()
.trim()
.to_string()
}
pub fn windows() -> String {
let mut wmi = std::process::Command::new("wmic");
wmi.arg("cpu");
wmi.arg("get");
wmi.arg("name");
match wmi.output() {
Err(_) => String::from("unknown"),
Ok(x) => {
let x = String::from_utf8_lossy(&x.stdout);
x.lines().nth(1).unwrap_or("unknown").trim().to_string()
}
}
}
pub fn linux() -> String {
let info = std::fs::read_to_string("/proc/cpuinfo").unwrap_or_default();
for line in info.lines() {
let mut iter = line.split(':');
let key = iter.next().unwrap_or("");
if key.contains("Hardware")
|| key.contains("Processor")
|| key.contains("chip type")
|| key.contains("model name")
|| key.starts_with("cpu type")
|| key.starts_with("cpu model")
{
return iter.next().unwrap_or("unknown").trim().to_string();
}
}
String::from("unknown")
}
}
pub mod reporter {
use super::*;
#[derive(Clone, PartialEq)]
pub struct Error {
pub message: String,
pub stack: Option<String>,
}
#[derive(Clone, PartialEq)]
pub struct BenchmarkStats {
pub avg: f64,
pub min: f64,
pub max: f64,
pub p75: f64,
pub p99: f64,
pub p995: f64,
}
#[derive(Clone, PartialEq)]
pub struct GroupBenchmark {
pub name: String,
pub group: String,
pub baseline: bool,
pub stats: BenchmarkStats,
}
#[derive(Clone, PartialEq)]
pub struct Options {
size: usize,
pub avg: bool,
pub min_max: bool,
pub percentiles: bool,
}
impl Options {
pub fn new(names: &[&str]) -> Options {
Options {
avg: true,
min_max: true,
size: size(names),
percentiles: true,
}
}
}
pub fn size(names: &[&str]) -> usize {
let mut max = 9;
for name in names {
if max < name.len() {
max = name.len();
}
}
2 + max
}
pub fn br(options: &Options) -> String {
let mut s = String::new();
s.push_str(&format!("| {} |", "-".repeat(options.size)));
if options.avg {
s.push_str(&format!(" {} | {} |", "-".repeat(15), "-".repeat(13)));
}
if options.min_max {
s.push_str(&format!(" {} |", "-".repeat(21)));
}
if options.percentiles {
s.push_str(&format!(
" {} | {} | {} |",
"-".repeat(8),
"-".repeat(8),
"-".repeat(8)
));
}
s
}
pub fn benchmark_error(n: &str, e: &Error, options: &Options) -> String {
let size = options.size;
let mut s = String::new();
s.push_str(&format!("{:<size$}", n));
s.push_str(&format!(" {}: {}", colors::red("error"), e.message));
if let Some(ref stack) = e.stack {
s.push('\n');
s.push_str(&colors::gray(stack).to_string());
}
s
}
pub fn header(options: &Options) -> String {
let size = options.size;
let mut s = String::new();
s.push_str(&format!("| {:<size$} |", "benchmark"));
if options.avg {
s.push_str(&format!(" {:<15} |", "time/iter (avg)"));
s.push_str(&format!(" {:>13} |", "iter/s"));
}
if options.min_max {
s.push_str(&format!(" {:^21} |", "(min … max)"));
}
if options.percentiles {
s.push_str(&format!(" {:>8} | {:>8} | {:>8} |", "p75", "p99", "p995"));
}
s
}
pub fn benchmark(
name: &str,
stats: &BenchmarkStats,
options: &Options,
) -> String {
let size = options.size;
let mut s = String::new();
s.push_str(&format!("| {:<size$} |", name));
if options.avg {
s.push_str(&format!(
" {} |",
colors::yellow(&format!("{:>15}", fmt_duration(stats.avg)))
));
s.push_str(&format!(" {:>13} |", &avg_to_iter_per_s(stats.avg)));
}
if options.min_max {
s.push_str(&format!(
" ({} … {}) |",
colors::cyan(format!("{:>8}", fmt_duration(stats.min))),
colors::magenta(format!("{:>8}", fmt_duration(stats.max)))
));
}
if options.percentiles {
s.push_str(
&colors::magenta(format!(
" {:>8} | {:>8} | {:>8} |",
fmt_duration(stats.p75),
fmt_duration(stats.p99),
fmt_duration(stats.p995)
))
.to_string(),
);
}
s
}
pub fn summary(benchmarks: &[GroupBenchmark]) -> String {
let mut s = String::new();
let mut benchmarks = benchmarks.to_owned();
benchmarks.sort_by(|a, b| a.stats.avg.partial_cmp(&b.stats.avg).unwrap());
let baseline = benchmarks
.iter()
.find(|b| b.baseline)
.unwrap_or(&benchmarks[0]);
s.push_str(&format!(
"{}\n {}",
colors::gray("summary"),
colors::cyan_bold(&baseline.name)
));
for b in benchmarks.iter().filter(|b| *b != baseline) {
let faster = b.stats.avg >= baseline.stats.avg;
let x_faster = precision_f64(
if faster {
b.stats.avg / baseline.stats.avg
} else {
baseline.stats.avg / b.stats.avg
},
4,
);
let diff = if x_faster > 1000. {
&format!("{:>9.0}", x_faster)
} else {
&format!("{:>9.2}", x_faster)
};
s.push_str(&format!(
"\n{}x {} than {}",
if faster {
colors::green(diff)
} else {
colors::red(diff)
},
if faster { "faster" } else { "slower" },
colors::cyan_bold(&b.name)
));
}
s
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_into_decimal_and_fractional_parts() {
assert_eq!(into_decimal_and_fractional_parts(10.0), (10, 0));
assert_eq!(into_decimal_and_fractional_parts(10.1), (10, 1));
assert_eq!(into_decimal_and_fractional_parts(10.2), (10, 2));
assert_eq!(into_decimal_and_fractional_parts(10.3), (10, 3));
assert_eq!(into_decimal_and_fractional_parts(10.4), (10, 4));
assert_eq!(into_decimal_and_fractional_parts(10.5), (10, 5));
assert_eq!(into_decimal_and_fractional_parts(10.6), (10, 6));
assert_eq!(into_decimal_and_fractional_parts(10.7), (10, 7));
assert_eq!(into_decimal_and_fractional_parts(10.8), (10, 8));
assert_eq!(into_decimal_and_fractional_parts(10.9), (10, 9));
assert_eq!(into_decimal_and_fractional_parts(10.99), (11, 0));
}
#[test]
fn test_avg_to_iter_per_s() {
assert_eq!(avg_to_iter_per_s(55.85), "17,910,000");
assert_eq!(avg_to_iter_per_s(64_870_000.0), "15.4");
assert_eq!(avg_to_iter_per_s(104_370_000.0), "9.6");
assert_eq!(avg_to_iter_per_s(640_000.0), "1,563");
assert_eq!(avg_to_iter_per_s(6_400_000.0), "156.3");
assert_eq!(avg_to_iter_per_s(46_890_000.0), "21.3");
assert_eq!(avg_to_iter_per_s(100_000_000.0), "10.0");
assert_eq!(avg_to_iter_per_s(1_000_000_000.0), "1.0");
assert_eq!(avg_to_iter_per_s(5_920_000_000.0), "0.2");
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/bench/reporters.rs | cli/tools/bench/reporters.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use deno_lib::version::DENO_VERSION_INFO;
use serde::Serialize;
use super::*;
use crate::tools::test::TestFailureFormatOptions;
pub trait BenchReporter {
fn report_group_summary(&mut self);
fn report_plan(&mut self, plan: &BenchPlan);
fn report_end(&mut self, report: &BenchReport);
fn report_register(&mut self, desc: &BenchDescription);
fn report_wait(&mut self, desc: &BenchDescription);
fn report_output(&mut self, output: &str);
fn report_result(&mut self, desc: &BenchDescription, result: &BenchResult);
fn report_uncaught_error(&mut self, origin: &str, error: Box<JsError>);
}
const JSON_SCHEMA_VERSION: u8 = 1;
#[derive(Debug, Serialize)]
struct JsonReporterOutput {
version: u8,
runtime: String,
cpu: String,
benches: Vec<JsonReporterBench>,
}
impl Default for JsonReporterOutput {
fn default() -> Self {
Self {
version: JSON_SCHEMA_VERSION,
runtime: format!("{} {}", DENO_VERSION_INFO.user_agent, env!("TARGET")),
cpu: mitata::cpu::name(),
benches: vec![],
}
}
}
#[derive(Debug, Serialize)]
struct JsonReporterBench {
origin: String,
group: Option<String>,
name: String,
baseline: bool,
results: Vec<BenchResult>,
}
#[derive(Debug, Serialize)]
pub struct JsonReporter(JsonReporterOutput);
impl JsonReporter {
pub fn new() -> Self {
Self(Default::default())
}
}
#[allow(clippy::print_stdout)]
impl BenchReporter for JsonReporter {
fn report_group_summary(&mut self) {}
#[cold]
fn report_plan(&mut self, _plan: &BenchPlan) {}
fn report_end(&mut self, _report: &BenchReport) {
match write_json_to_stdout(self) {
Ok(_) => (),
Err(e) => println!("{}", e),
}
}
fn report_register(&mut self, _desc: &BenchDescription) {}
fn report_wait(&mut self, _desc: &BenchDescription) {}
fn report_output(&mut self, _output: &str) {}
fn report_result(&mut self, desc: &BenchDescription, result: &BenchResult) {
if desc.warmup {
return;
}
let maybe_bench = self.0.benches.iter_mut().find(|bench| {
bench.origin == desc.origin
&& bench.group == desc.group
&& bench.name == desc.name
&& bench.baseline == desc.baseline
});
if let Some(bench) = maybe_bench {
bench.results.push(result.clone());
} else {
self.0.benches.push(JsonReporterBench {
origin: desc.origin.clone(),
group: desc.group.clone(),
name: desc.name.clone(),
baseline: desc.baseline,
results: vec![result.clone()],
});
}
}
fn report_uncaught_error(&mut self, _origin: &str, _error: Box<JsError>) {}
}
pub struct ConsoleReporter {
name: String,
show_output: bool,
group: Option<String>,
baseline: bool,
group_measurements: Vec<(BenchDescription, BenchStats)>,
options: Option<mitata::reporter::Options>,
}
impl ConsoleReporter {
pub fn new(show_output: bool) -> Self {
Self {
show_output,
group: None,
options: None,
baseline: false,
name: String::new(),
group_measurements: Vec::new(),
}
}
}
#[allow(clippy::print_stdout)]
impl BenchReporter for ConsoleReporter {
#[cold]
fn report_plan(&mut self, plan: &BenchPlan) {
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
static FIRST_PLAN: AtomicBool = AtomicBool::new(true);
self.report_group_summary();
self.group = None;
self.baseline = false;
self.name = String::new();
self.group_measurements.clear();
self.options = Some(mitata::reporter::Options::new(
&plan.names.iter().map(|x| x.as_str()).collect::<Vec<&str>>(),
));
let options = self.options.as_mut().unwrap();
options.percentiles = true;
if FIRST_PLAN
.compare_exchange(true, false, Ordering::SeqCst, Ordering::SeqCst)
.is_ok()
{
println!(
"{}",
colors::gray(format!(" CPU | {}", mitata::cpu::name()))
);
println!(
"{}\n",
colors::gray(format!(
"Runtime | Deno {} ({})",
DENO_VERSION_INFO.deno,
env!("TARGET")
))
);
} else {
println!();
}
println!(
"{}\n\n{}\n{}",
colors::gray(&plan.origin),
mitata::reporter::header(options),
mitata::reporter::br(options)
);
}
fn report_register(&mut self, _desc: &BenchDescription) {}
fn report_wait(&mut self, desc: &BenchDescription) {
self.name.clone_from(&desc.name);
match &desc.group {
None => {}
Some(group) => {
if self.group.is_none() || group != self.group.as_ref().unwrap() {
self.report_group_summary();
println!("{} {}", colors::gray("group"), colors::green(group));
}
self.group = Some(group.clone());
}
}
}
fn report_output(&mut self, output: &str) {
if self.show_output {
print!("{} {}", colors::gray(format!("{}:", self.name)), output)
}
}
fn report_result(&mut self, desc: &BenchDescription, result: &BenchResult) {
if desc.warmup {
return;
}
let options = self.options.as_ref().unwrap();
match result {
BenchResult::Ok(stats) => {
let mut desc = desc.clone();
if desc.baseline && !self.baseline {
self.baseline = true;
} else {
desc.baseline = false;
}
println!(
"{}",
mitata::reporter::benchmark(
&desc.name,
&mitata::reporter::BenchmarkStats {
avg: stats.avg,
min: stats.min,
max: stats.max,
p75: stats.p75,
p99: stats.p99,
p995: stats.p995,
},
options
)
);
if !stats.high_precision && stats.used_explicit_timers {
println!(
"{}",
colors::yellow(format!(
"Warning: start() and end() calls in \"{}\" are ignored because it averages less\nthan 10µs per iteration. Remove them for better results.",
&desc.name
))
);
}
self.group_measurements.push((desc, stats.clone()));
}
BenchResult::Failed(js_error) => {
println!(
"{}",
mitata::reporter::benchmark_error(
&desc.name,
&mitata::reporter::Error {
stack: None,
message: format_test_error(
js_error,
&TestFailureFormatOptions::default()
),
},
options
)
)
}
};
}
fn report_group_summary(&mut self) {
if self.options.is_none() {
return;
}
if 2 <= self.group_measurements.len()
&& (self.group.is_some() || (self.group.is_none() && self.baseline))
{
println!(
"\n{}",
mitata::reporter::summary(
&self
.group_measurements
.iter()
.map(|(d, s)| mitata::reporter::GroupBenchmark {
name: d.name.clone(),
baseline: d.baseline,
group: d.group.as_deref().unwrap_or("").to_owned(),
stats: mitata::reporter::BenchmarkStats {
avg: s.avg,
min: s.min,
max: s.max,
p75: s.p75,
p99: s.p99,
p995: s.p995,
},
})
.collect::<Vec<mitata::reporter::GroupBenchmark>>(),
)
);
}
println!();
self.baseline = false;
self.group_measurements.clear();
}
fn report_end(&mut self, _: &BenchReport) {
self.report_group_summary();
}
fn report_uncaught_error(&mut self, _origin: &str, error: Box<JsError>) {
println!(
"{}: {}",
colors::red_bold("error"),
format_test_error(&error, &TestFailureFormatOptions::default())
);
println!(
"This error was not caught from a benchmark and caused the bench runner to fail on the referenced module."
);
println!(
"It most likely originated from a dangling promise, event/timeout handler or top-level code."
);
println!();
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/repl/session.rs | cli/tools/repl/session.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap;
use std::sync::Arc;
use std::sync::atomic::AtomicI32;
use deno_ast::ImportsNotUsedAsValues;
use deno_ast::JsxAutomaticOptions;
use deno_ast::JsxClassicOptions;
use deno_ast::ModuleKind;
use deno_ast::ModuleSpecifier;
use deno_ast::ParseDiagnosticsError;
use deno_ast::ParsedSource;
use deno_ast::SourcePos;
use deno_ast::SourceRangedForSpanned;
use deno_ast::SourceTextInfo;
use deno_ast::diagnostics::Diagnostic;
use deno_ast::swc::ast as swc_ast;
use deno_ast::swc::atoms::Wtf8Atom;
use deno_ast::swc::common::comments::CommentKind;
use deno_ast::swc::ecma_visit::Visit;
use deno_ast::swc::ecma_visit::VisitWith;
use deno_ast::swc::ecma_visit::noop_visit_type;
use deno_core::LocalInspectorSession;
use deno_core::PollEventLoopOptions;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::error::CoreError;
use deno_core::futures::FutureExt;
use deno_core::futures::StreamExt;
use deno_core::futures::channel::mpsc::UnboundedReceiver;
use deno_core::futures::channel::mpsc::UnboundedSender;
use deno_core::futures::channel::mpsc::unbounded;
use deno_core::parking_lot::Mutex as SyncMutex;
use deno_core::serde_json;
use deno_core::serde_json::Value;
use deno_core::unsync::spawn;
use deno_core::url::Url;
use deno_error::JsErrorBox;
use deno_graph::Position;
use deno_graph::PositionRange;
use deno_graph::analysis::SpecifierWithRange;
use deno_lib::util::result::any_and_jserrorbox_downcast_ref;
use deno_resolver::deno_json::CompilerOptionsResolver;
use deno_runtime::worker::MainWorker;
use deno_semver::npm::NpmPackageReqReference;
use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode;
use once_cell::sync::Lazy;
use regex::Match;
use regex::Regex;
use tokio::sync::Mutex;
use tokio::sync::oneshot;
use crate::args::CliOptions;
use crate::cdp;
use crate::cdp::RemoteObjectId;
use crate::colors;
use crate::lsp::ReplLanguageServer;
use crate::npm::CliNpmInstaller;
use crate::resolver::CliResolver;
use crate::tools::test::TestEventReceiver;
use crate::tools::test::TestEventTracker;
use crate::tools::test::TestFailureFormatOptions;
use crate::tools::test::report_tests;
use crate::tools::test::reporters::PrettyTestReporter;
use crate::tools::test::reporters::TestReporter;
use crate::tools::test::run_tests_for_worker;
use crate::tools::test::worker_has_tests;
fn comment_source_to_position_range(
comment_start: SourcePos,
m: &Match,
text_info: &SourceTextInfo,
is_jsx_import_source: bool,
) -> PositionRange {
// the comment text starts after the double slash or slash star, so add 2
let comment_start = comment_start + 2;
// -1 and +1 to include the quotes, but not for jsx import sources because
// they don't have quotes
let padding = if is_jsx_import_source { 0 } else { 1 };
PositionRange {
start: Position::from_source_pos(
comment_start + m.start() - padding,
text_info,
),
end: Position::from_source_pos(
comment_start + m.end() + padding,
text_info,
),
}
}
fn get_prelude() -> String {
r#"(() => {
const repl_internal = {
String,
lastEvalResult: undefined,
lastThrownError: undefined,
inspectArgs: Deno[Deno.internal].inspectArgs,
noColor: Deno.noColor,
get closed() {
try {
return typeof globalThis.closed === 'undefined' ? false : globalThis.closed;
} catch {
return false;
}
}
};
Object.defineProperty(globalThis, "_", {
configurable: true,
get: () => repl_internal.lastEvalResult,
set: (value) => {
Object.defineProperty(globalThis, "_", {
value: value,
writable: true,
enumerable: true,
configurable: true,
});
console.log("Last evaluation result is no longer saved to _.");
},
});
Object.defineProperty(globalThis, "_error", {
configurable: true,
get: () => repl_internal.lastThrownError,
set: (value) => {
Object.defineProperty(globalThis, "_error", {
value: value,
writable: true,
enumerable: true,
configurable: true,
});
console.log("Last thrown error is no longer saved to _error.");
},
});
globalThis.clear = console.clear.bind(console);
return repl_internal;
})()"#.to_string()
}
pub enum EvaluationOutput {
Value(String),
Error(String),
}
impl std::fmt::Display for EvaluationOutput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
EvaluationOutput::Value(value) => f.write_str(value),
EvaluationOutput::Error(value) => f.write_str(value),
}
}
}
pub fn result_to_evaluation_output(
r: Result<EvaluationOutput, AnyError>,
) -> EvaluationOutput {
match r {
Ok(value) => value,
Err(err) => {
EvaluationOutput::Error(format!("{} {:#}", colors::red("error:"), err))
}
}
}
#[derive(Debug)]
pub struct TsEvaluateResponse {
pub ts_code: String,
pub value: cdp::EvaluateResponse,
}
pub struct ReplSession {
internal_object_id: Option<RemoteObjectId>,
npm_installer: Option<Arc<CliNpmInstaller>>,
resolver: Arc<CliResolver>,
// NB: `session` and `state` must come before Worker, so that relevant V8 objects
// are dropped before the isolate is dropped with `worker`.
session: LocalInspectorSession,
state: ReplSessionState,
pub worker: MainWorker,
pub context_id: u64,
pub language_server: ReplLanguageServer,
pub notifications: Arc<Mutex<UnboundedReceiver<Value>>>,
referrer: ModuleSpecifier,
main_module: ModuleSpecifier,
test_reporter_factory: Box<dyn Fn() -> Box<dyn TestReporter>>,
/// This is only optional because it's temporarily taken when evaluating.
test_event_receiver: Option<TestEventReceiver>,
jsx: deno_ast::JsxRuntime,
decorators: deno_ast::DecoratorsTranspileOption,
}
// TODO: duplicated in `cli/tools/run/hmr.rs`
#[derive(Debug)]
enum InspectorMessageState {
Ready(serde_json::Value),
WaitingFor(oneshot::Sender<serde_json::Value>),
}
#[derive(Debug)]
pub struct ReplSessionInner {
messages: HashMap<i32, InspectorMessageState>,
notification_tx: UnboundedSender<serde_json::Value>,
}
#[derive(Clone, Debug)]
pub struct ReplSessionState(Arc<SyncMutex<ReplSessionInner>>);
impl ReplSessionState {
pub fn new(notification_tx: UnboundedSender<serde_json::Value>) -> Self {
Self(Arc::new(SyncMutex::new(ReplSessionInner {
messages: HashMap::new(),
notification_tx,
})))
}
fn callback(&self, msg: deno_core::InspectorMsg) {
let deno_core::InspectorMsgKind::Message(msg_id) = msg.kind else {
if let Ok(value) = serde_json::from_str(&msg.content) {
let _ = self.0.lock().notification_tx.unbounded_send(value);
}
return;
};
let message: serde_json::Value = match serde_json::from_str(&msg.content) {
Ok(v) => v,
Err(error) => match error.classify() {
serde_json::error::Category::Syntax => serde_json::json!({
"id": msg_id,
"result": {
"result": {
"type": "error",
"description": "Unterminated string literal",
"value": "Unterminated string literal",
},
"exceptionDetails": {
"exceptionId": 0,
"text": "Unterminated string literal",
"lineNumber": 0,
"columnNumber": 0
},
},
}),
_ => panic!("Could not parse inspector message"),
},
};
let mut state = self.0.lock();
let Some(message_state) = state.messages.remove(&msg_id) else {
state
.messages
.insert(msg_id, InspectorMessageState::Ready(message));
return;
};
let InspectorMessageState::WaitingFor(sender) = message_state else {
return;
};
let _ = sender.send(message);
}
async fn wait_for_response(&self, msg_id: i32) -> serde_json::Value {
if let Some(message_state) = self.0.lock().messages.remove(&msg_id) {
let InspectorMessageState::Ready(mut value) = message_state else {
unreachable!();
};
return value["result"].take();
}
let (tx, rx) = oneshot::channel();
self
.0
.lock()
.messages
.insert(msg_id, InspectorMessageState::WaitingFor(tx));
let mut value = rx.await.unwrap();
value["result"].take()
}
}
static NEXT_MSG_ID: AtomicI32 = AtomicI32::new(0);
fn next_msg_id() -> i32 {
NEXT_MSG_ID.fetch_add(1, std::sync::atomic::Ordering::Relaxed)
}
impl ReplSession {
#[allow(clippy::too_many_arguments)]
pub async fn initialize(
cli_options: &CliOptions,
npm_installer: Option<Arc<CliNpmInstaller>>,
resolver: Arc<CliResolver>,
compiler_options_resolver: &CompilerOptionsResolver,
mut worker: MainWorker,
main_module: ModuleSpecifier,
test_event_receiver: TestEventReceiver,
) -> Result<Self, AnyError> {
let language_server = ReplLanguageServer::new_initialized().await?;
let (notification_tx, mut notification_rx) = unbounded();
let repl_session_state = ReplSessionState::new(notification_tx);
let state = repl_session_state.clone();
let callback =
Box::new(move |message| repl_session_state.callback(message));
let mut session = worker.create_inspector_session(callback);
session.post_message::<()>(next_msg_id(), "Runtime.enable", None);
// Enabling the runtime domain will always send trigger one executionContextCreated for each
// context the inspector knows about so we grab the execution context from that since
// our inspector does not support a default context (0 is an invalid context id).
let context_id: u64;
loop {
let notification = notification_rx.next().await.unwrap();
let notification =
serde_json::from_value::<cdp::Notification>(notification)?;
if notification.method == "Runtime.executionContextCreated" {
let execution_context_created = serde_json::from_value::<
cdp::ExecutionContextCreated,
>(notification.params)?;
assert!(
execution_context_created
.context
.aux_data
.get("isDefault")
.unwrap()
.as_bool()
.unwrap()
);
context_id = execution_context_created.context.id;
break;
}
}
assert_ne!(context_id, 0);
let referrer =
deno_core::resolve_path("./$deno$repl.mts", cli_options.initial_cwd())
.unwrap();
let cwd_url =
Url::from_directory_path(cli_options.initial_cwd()).map_err(|_| {
anyhow!(
"Unable to construct URL from the path of cwd: {}",
cli_options.initial_cwd().to_string_lossy(),
)
})?;
let transpile_options = &compiler_options_resolver
.for_specifier(&cwd_url)
.transpile_options()?
.transpile;
let mut repl_session = ReplSession {
internal_object_id: None,
npm_installer,
resolver,
worker,
session,
state,
context_id,
language_server,
referrer,
notifications: Arc::new(Mutex::new(notification_rx)),
test_reporter_factory: Box::new(move || {
Box::new(PrettyTestReporter::new(
false,
true,
false,
true,
cwd_url.clone(),
TestFailureFormatOptions::default(),
))
}),
main_module,
test_event_receiver: Some(test_event_receiver),
jsx: transpile_options.jsx.clone().unwrap_or_default(),
decorators: transpile_options.decorators.clone(),
};
// inject prelude
let evaluated = repl_session.evaluate_expression(&get_prelude()).await?;
repl_session.internal_object_id = evaluated.result.object_id;
Ok(repl_session)
}
pub fn set_test_reporter_factory(
&mut self,
f: Box<dyn Fn() -> Box<dyn TestReporter>>,
) {
self.test_reporter_factory = f;
}
pub async fn closing(&mut self) -> Result<bool, AnyError> {
let result = self
.call_function_on_repl_internal_obj(
r#"function () { return this.closed; }"#.to_string(),
&[],
)
.await?
.result;
let closed = result
.value
.ok_or_else(|| anyhow!(result.description.unwrap()))?
.as_bool()
.unwrap();
Ok(closed)
}
pub async fn post_message_with_event_loop<T: serde::Serialize>(
&mut self,
method: &str,
params: Option<T>,
) -> Value {
let msg_id = next_msg_id();
self.session.post_message(msg_id, method, params);
let fut = self
.state
.wait_for_response(msg_id)
.map(Ok::<_, ()>)
.boxed_local();
self
.worker
.js_runtime
.with_event_loop_future(
fut,
PollEventLoopOptions {
// NOTE(bartlomieju): this is an important bit; we don't want to pump V8
// message loop here, so that GC won't run. Otherwise, the resulting
// object might be GC'ed before we have a chance to inspect it.
pump_v8_message_loop: false,
..Default::default()
},
)
.await
.unwrap()
}
pub async fn run_event_loop(&mut self) -> Result<(), CoreError> {
self.worker.run_event_loop(true).await
}
pub async fn evaluate_line_and_get_output(
&mut self,
line: &str,
) -> EvaluationOutput {
fn format_diagnostic(diagnostic: &deno_ast::ParseDiagnostic) -> String {
let display_position = diagnostic.display_position();
format!(
"{}: {} at {}:{}",
colors::red("parse error"),
diagnostic.message(),
display_position.line_number,
display_position.column_number,
)
}
async fn inner(
session: &mut ReplSession,
line: &str,
) -> Result<EvaluationOutput, AnyError> {
match session.evaluate_line_with_object_wrapping(line).await {
Ok(evaluate_response) => {
let cdp::EvaluateResponse {
result,
exception_details,
} = evaluate_response.value;
Ok(if let Some(exception_details) = exception_details {
session.set_last_thrown_error(&result).await?;
let description = match exception_details.exception {
Some(exception) => {
if let Some(description) = exception.description {
description
} else if let Some(value) = exception.value {
value.to_string()
} else {
"undefined".to_string()
}
}
None => "Unknown exception".to_string(),
};
EvaluationOutput::Error(format!(
"{} {}",
exception_details.text, description
))
} else {
session
.language_server
.commit_text(&evaluate_response.ts_code)
.await;
session.set_last_eval_result(&result).await?;
let value = session.get_eval_value(&result).await?;
EvaluationOutput::Value(value)
})
}
Err(err) => {
// handle a parsing diagnostic
match any_and_jserrorbox_downcast_ref::<deno_ast::ParseDiagnostic>(
&err,
) {
Some(diagnostic) => {
Ok(EvaluationOutput::Error(format_diagnostic(diagnostic)))
}
None => {
match any_and_jserrorbox_downcast_ref::<ParseDiagnosticsError>(
&err,
) {
Some(diagnostics) => Ok(EvaluationOutput::Error(
diagnostics
.0
.iter()
.map(format_diagnostic)
.collect::<Vec<_>>()
.join("\n\n"),
)),
None => Err(err),
}
}
}
}
}
}
let result = inner(self, line).await;
result_to_evaluation_output(result)
}
pub async fn evaluate_line_with_object_wrapping(
&mut self,
line: &str,
) -> Result<TsEvaluateResponse, AnyError> {
// Expressions like { "foo": "bar" } are interpreted as block expressions at the
// statement level rather than an object literal so we interpret it as an expression statement
// to match the behavior found in a typical prompt including browser developer tools.
let wrapped_line = if line.trim_start().starts_with('{')
&& !line.trim_end().ends_with(';')
{
format!("({})", &line)
} else {
line.to_string()
};
let evaluate_response = self.evaluate_ts_expression(&wrapped_line).await;
// If that fails, we retry it without wrapping in parens letting the error bubble up to the
// user if it is still an error.
let result = if wrapped_line != line
&& (evaluate_response.is_err()
|| evaluate_response
.as_ref()
.unwrap()
.value
.exception_details
.is_some())
{
self.evaluate_ts_expression(line).await
} else {
evaluate_response
};
if worker_has_tests(&mut self.worker) {
let report_tests_handle = spawn(report_tests(
self.test_event_receiver.take().unwrap(),
(self.test_reporter_factory)(),
));
let event_tracker =
TestEventTracker::new(self.worker.js_runtime.op_state());
run_tests_for_worker(
&mut self.worker,
&self.main_module,
&Default::default(),
&Default::default(),
&event_tracker,
)
.await
.unwrap();
event_tracker.force_end_report().unwrap();
self.test_event_receiver = Some(report_tests_handle.await.unwrap().1);
}
result
}
pub async fn set_last_thrown_error(
&mut self,
error: &cdp::RemoteObject,
) -> Result<(), AnyError> {
self
.post_message_with_event_loop(
"Runtime.callFunctionOn",
Some(cdp::CallFunctionOnArgs {
function_declaration:
r#"function (object) { this.lastThrownError = object; }"#
.to_string(),
object_id: self.internal_object_id.clone(),
arguments: Some(vec![error.into()]),
silent: None,
return_by_value: None,
generate_preview: None,
user_gesture: None,
await_promise: None,
execution_context_id: None,
object_group: None,
throw_on_side_effect: None,
}),
)
.await;
Ok(())
}
pub async fn set_last_eval_result(
&mut self,
evaluate_result: &cdp::RemoteObject,
) -> Result<(), AnyError> {
self
.post_message_with_event_loop(
"Runtime.callFunctionOn",
Some(cdp::CallFunctionOnArgs {
function_declaration: r#"function (object) { this.lastEvalResult = object; }"#.to_string(),
object_id: self.internal_object_id.clone(),
arguments: Some(vec![evaluate_result.into()]),
silent: None,
return_by_value: None,
generate_preview: None,
user_gesture: None,
await_promise: None,
execution_context_id: None,
object_group: None,
throw_on_side_effect: None,
}),
)
.await;
Ok(())
}
pub async fn call_function_on_args(
&mut self,
function_declaration: String,
args: &[cdp::RemoteObject],
) -> Result<cdp::CallFunctionOnResponse, AnyError> {
let arguments: Option<Vec<cdp::CallArgument>> = if args.is_empty() {
None
} else {
Some(args.iter().map(|a| a.into()).collect())
};
let inspect_response = self
.post_message_with_event_loop(
"Runtime.callFunctionOn",
Some(cdp::CallFunctionOnArgs {
function_declaration,
object_id: None,
arguments,
silent: None,
return_by_value: None,
generate_preview: None,
user_gesture: None,
await_promise: None,
execution_context_id: Some(self.context_id),
object_group: None,
throw_on_side_effect: None,
}),
)
.await;
let response: cdp::CallFunctionOnResponse =
serde_json::from_value(inspect_response)?;
Ok(response)
}
pub async fn call_function_on_repl_internal_obj(
&mut self,
function_declaration: String,
args: &[cdp::RemoteObject],
) -> Result<cdp::CallFunctionOnResponse, AnyError> {
let arguments: Option<Vec<cdp::CallArgument>> = if args.is_empty() {
None
} else {
Some(args.iter().map(|a| a.into()).collect())
};
let inspect_response = self
.post_message_with_event_loop(
"Runtime.callFunctionOn",
Some(cdp::CallFunctionOnArgs {
function_declaration,
object_id: self.internal_object_id.clone(),
arguments,
silent: None,
return_by_value: None,
generate_preview: None,
user_gesture: None,
await_promise: None,
execution_context_id: None,
object_group: None,
throw_on_side_effect: None,
}),
)
.await;
let response: cdp::CallFunctionOnResponse =
serde_json::from_value(inspect_response)?;
Ok(response)
}
pub async fn get_eval_value(
&mut self,
evaluate_result: &cdp::RemoteObject,
) -> Result<String, AnyError> {
// TODO(caspervonb) we should investigate using previews here but to keep things
// consistent with the previous implementation we just get the preview result from
// Deno.inspectArgs.
let response = self
.call_function_on_repl_internal_obj(
r#"function (object) {
try {
return this.inspectArgs(["%o", object], { colors: !this.noColor });
} catch (err) {
return this.inspectArgs(["%o", err]);
}
}"#
.to_string(),
std::slice::from_ref(evaluate_result),
)
.await?;
let s = response
.result
.value
.map(|v| v.as_str().unwrap().to_string())
.or(response.result.description)
.ok_or_else(|| anyhow!("failed to evaluate expression"))?;
Ok(s)
}
async fn evaluate_ts_expression(
&mut self,
expression: &str,
) -> Result<TsEvaluateResponse, AnyError> {
let parsed_source =
match parse_source_as(expression.to_string(), deno_ast::MediaType::Tsx) {
Ok(parsed) => parsed,
Err(err) => {
match parse_source_as(
expression.to_string(),
deno_ast::MediaType::TypeScript,
) {
Ok(parsed) => parsed,
_ => {
return Err(err);
}
}
}
};
self
.check_for_npm_or_node_imports(&parsed_source.program())
.await?;
self.analyze_and_handle_jsx(&parsed_source);
let transpiled_src = parsed_source
.transpile(
&deno_ast::TranspileOptions {
decorators: self.decorators.clone(),
imports_not_used_as_values: ImportsNotUsedAsValues::Preserve,
jsx: Some(self.jsx.clone()),
var_decl_imports: true,
verbatim_module_syntax: false,
},
&deno_ast::TranspileModuleOptions {
module_kind: Some(ModuleKind::Esm),
},
&deno_ast::EmitOptions {
source_map: deno_ast::SourceMapOption::None,
source_map_base: None,
source_map_file: None,
inline_sources: false,
remove_comments: false,
},
)?
.into_source()
.text;
let value = self
.evaluate_expression(&format!("'use strict'; void 0;{transpiled_src}"))
.await?;
Ok(TsEvaluateResponse {
ts_code: expression.to_string(),
value,
})
}
fn analyze_and_handle_jsx(&mut self, parsed_source: &ParsedSource) {
let Some(analyzed_pragmas) = analyze_jsx_pragmas(parsed_source) else {
return;
};
if !analyzed_pragmas.has_any() {
return;
}
if let Some(jsx) = analyzed_pragmas.jsx {
match &mut self.jsx {
deno_ast::JsxRuntime::Classic(jsx_classic_options) => {
jsx_classic_options.factory = jsx.text;
}
deno_ast::JsxRuntime::Automatic(_)
| deno_ast::JsxRuntime::Precompile(_) => {
self.jsx = deno_ast::JsxRuntime::Classic(JsxClassicOptions {
factory: jsx.text,
..Default::default()
});
}
}
}
if let Some(jsx_frag) = analyzed_pragmas.jsx_fragment {
match &mut self.jsx {
deno_ast::JsxRuntime::Classic(jsx_classic_options) => {
jsx_classic_options.fragment_factory = jsx_frag.text;
}
deno_ast::JsxRuntime::Automatic(_)
| deno_ast::JsxRuntime::Precompile(_) => {
self.jsx = deno_ast::JsxRuntime::Classic(JsxClassicOptions {
fragment_factory: jsx_frag.text,
..Default::default()
});
}
}
}
if let Some(jsx_import_source) = analyzed_pragmas.jsx_import_source {
match &mut self.jsx {
deno_ast::JsxRuntime::Classic(_) => {
self.jsx = deno_ast::JsxRuntime::Automatic(JsxAutomaticOptions {
import_source: Some(jsx_import_source.text),
development: false,
});
}
deno_ast::JsxRuntime::Automatic(automatic)
| deno_ast::JsxRuntime::Precompile(deno_ast::JsxPrecompileOptions {
automatic,
..
}) => {
automatic.import_source = Some(jsx_import_source.text);
}
}
}
}
async fn check_for_npm_or_node_imports(
&mut self,
program: &swc_ast::Program,
) -> Result<(), AnyError> {
let Some(npm_installer) = &self.npm_installer else {
return Ok(());
};
let mut collector = ImportCollector::new();
program.visit_with(&mut collector);
let resolved_imports = collector
.imports
.iter()
.flat_map(|i| {
let specifier = i.to_string_lossy();
self
.resolver
.resolve(
&specifier,
&self.referrer,
deno_graph::Position::zeroed(),
ResolutionMode::Import,
NodeResolutionKind::Execution,
)
.ok()
.or_else(|| ModuleSpecifier::parse(&specifier).ok())
})
.collect::<Vec<_>>();
let npm_imports = resolved_imports
.iter()
.flat_map(|url| NpmPackageReqReference::from_specifier(url).ok())
.map(|r| r.into_inner().req)
.collect::<Vec<_>>();
if !npm_imports.is_empty() {
npm_installer
.add_and_cache_package_reqs(&npm_imports)
.await?;
}
Ok(())
}
async fn evaluate_expression(
&mut self,
expression: &str,
) -> Result<cdp::EvaluateResponse, JsErrorBox> {
let res = self
.post_message_with_event_loop(
"Runtime.evaluate",
Some(cdp::EvaluateArgs {
expression: expression.to_string(),
object_group: None,
include_command_line_api: None,
silent: None,
context_id: Some(self.context_id),
return_by_value: None,
generate_preview: None,
user_gesture: None,
await_promise: None,
throw_on_side_effect: None,
timeout: None,
disable_breaks: None,
repl_mode: Some(true),
allow_unsafe_eval_blocked_by_csp: None,
unique_context_id: None,
}),
)
.await;
serde_json::from_value(res).map_err(JsErrorBox::from_err)
}
}
/// Walk an AST and get all import specifiers for analysis if any of them is
/// an npm specifier.
struct ImportCollector {
pub imports: Vec<Wtf8Atom>,
}
impl ImportCollector {
pub fn new() -> Self {
Self { imports: vec![] }
}
}
impl Visit for ImportCollector {
noop_visit_type!();
fn visit_call_expr(&mut self, call_expr: &swc_ast::CallExpr) {
if !matches!(call_expr.callee, swc_ast::Callee::Import(_)) {
return;
}
if !call_expr.args.is_empty() {
let arg = &call_expr.args[0];
if let swc_ast::Expr::Lit(swc_ast::Lit::Str(str_lit)) = &*arg.expr {
self.imports.push(str_lit.value.clone());
}
}
}
fn visit_module_decl(&mut self, module_decl: &swc_ast::ModuleDecl) {
use deno_ast::swc::ast::*;
match module_decl {
ModuleDecl::Import(import_decl) => {
if import_decl.type_only {
return;
}
self.imports.push(import_decl.src.value.clone());
}
ModuleDecl::ExportAll(export_all) => {
self.imports.push(export_all.src.value.clone());
}
ModuleDecl::ExportNamed(export_named) => {
if let Some(src) = &export_named.src {
self.imports.push(src.value.clone());
}
}
_ => {}
}
}
}
fn parse_source_as(
source: String,
media_type: deno_ast::MediaType,
) -> Result<deno_ast::ParsedSource, AnyError> {
let specifier = if media_type == deno_ast::MediaType::Tsx {
ModuleSpecifier::parse("file:///repl.tsx").unwrap()
} else {
ModuleSpecifier::parse("file:///repl.ts").unwrap()
};
let parsed = deno_ast::parse_module(deno_ast::ParseParams {
specifier,
text: source.into(),
media_type,
capture_tokens: true,
maybe_syntax: None,
scope_analysis: false,
})?;
Ok(parsed)
}
// TODO(bartlomieju): remove these and use regexes from `deno_graph`
/// Matches the `@jsxImportSource` pragma.
static JSX_IMPORT_SOURCE_RE: Lazy<Regex> =
Lazy::new(|| Regex::new(r"(?i)^[\s*]*@jsxImportSource\s+(\S+)").unwrap());
/// Matches the `@jsx` pragma.
static JSX_RE: Lazy<Regex> =
Lazy::new(|| Regex::new(r"(?i)^[\s*]*@jsx\s+(\S+)").unwrap());
/// Matches the `@jsxFrag` pragma.
static JSX_FRAG_RE: Lazy<Regex> =
Lazy::new(|| Regex::new(r"(?i)^[\s*]*@jsxFrag\s+(\S+)").unwrap());
#[derive(Default, Debug)]
struct AnalyzedJsxPragmas {
/// Information about `@jsxImportSource` pragma.
jsx_import_source: Option<SpecifierWithRange>,
/// Matches the `@jsx` pragma.
jsx: Option<SpecifierWithRange>,
/// Matches the `@jsxFrag` pragma.
jsx_fragment: Option<SpecifierWithRange>,
}
impl AnalyzedJsxPragmas {
fn has_any(&self) -> bool {
self.jsx_import_source.is_some()
|| self.jsx.is_some()
|| self.jsx_fragment.is_some()
}
}
/// Analyze provided source and return information about carious pragmas
/// used to configure the JSX transforms.
fn analyze_jsx_pragmas(
parsed_source: &ParsedSource,
) -> Option<AnalyzedJsxPragmas> {
if !matches!(
parsed_source.media_type(),
deno_ast::MediaType::Jsx | deno_ast::MediaType::Tsx
) {
return None;
}
let mut analyzed_pragmas = AnalyzedJsxPragmas::default();
for c in parsed_source.get_leading_comments()?.iter() {
if c.kind != CommentKind::Block {
continue; // invalid
}
if let Some(captures) = JSX_IMPORT_SOURCE_RE.captures(&c.text)
&& let Some(m) = captures.get(1)
{
analyzed_pragmas.jsx_import_source = Some(SpecifierWithRange {
text: m.as_str().to_string(),
range: comment_source_to_position_range(
c.start(),
&m,
parsed_source.text_info_lazy(),
true,
),
});
}
if let Some(captures) = JSX_RE.captures(&c.text)
&& let Some(m) = captures.get(1)
{
analyzed_pragmas.jsx = Some(SpecifierWithRange {
text: m.as_str().to_string(),
range: comment_source_to_position_range(
c.start(),
&m,
parsed_source.text_info_lazy(),
false,
),
});
}
if let Some(captures) = JSX_FRAG_RE.captures(&c.text)
&& let Some(m) = captures.get(1)
{
analyzed_pragmas.jsx_fragment = Some(SpecifierWithRange {
text: m.as_str().to_string(),
range: comment_source_to_position_range(
c.start(),
&m,
parsed_source.text_info_lazy(),
false,
),
});
}
}
Some(analyzed_pragmas)
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/repl/editor.rs | cli/tools/repl/editor.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering::Relaxed;
use deno_ast::swc::parser::error::SyntaxError;
use deno_ast::swc::parser::token::BinOpToken;
use deno_ast::swc::parser::token::Token;
use deno_ast::swc::parser::token::Word;
use deno_ast::view::AssignOp;
use deno_core::anyhow::Context as _;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::serde_json;
use rustyline::Cmd;
use rustyline::CompletionType;
use rustyline::ConditionalEventHandler;
use rustyline::Config;
use rustyline::Context;
use rustyline::Editor;
use rustyline::Event;
use rustyline::EventContext;
use rustyline::EventHandler;
use rustyline::KeyCode;
use rustyline::KeyEvent;
use rustyline::Modifiers;
use rustyline::RepeatCount;
use rustyline::completion::Completer;
use rustyline::error::ReadlineError;
use rustyline::highlight::Highlighter;
use rustyline::validate::ValidationContext;
use rustyline::validate::ValidationResult;
use rustyline::validate::Validator;
use rustyline_derive::Helper;
use rustyline_derive::Hinter;
use super::channel::RustylineSyncMessageSender;
use crate::cdp;
use crate::colors;
// Provides helpers to the editor like validation for multi-line edits, completion candidates for
// tab completion.
#[derive(Helper, Hinter)]
pub struct EditorHelper {
pub context_id: u64,
pub sync_sender: RustylineSyncMessageSender,
}
impl EditorHelper {
pub fn get_global_lexical_scope_names(&self) -> Vec<String> {
let evaluate_response = self
.sync_sender
.post_message(
"Runtime.globalLexicalScopeNames",
Some(cdp::GlobalLexicalScopeNamesArgs {
execution_context_id: Some(self.context_id),
}),
)
.unwrap();
let evaluate_response: cdp::GlobalLexicalScopeNamesResponse =
serde_json::from_value(evaluate_response).unwrap();
evaluate_response.names
}
pub fn get_expression_property_names(&self, expr: &str) -> Vec<String> {
// try to get the properties from the expression
if let Some(properties) = self.get_object_expr_properties(expr) {
return properties;
}
// otherwise fall back to the prototype
let expr_type = self.get_expression_type(expr);
let object_expr = match expr_type.as_deref() {
// possibilities: https://chromedevtools.github.io/devtools-protocol/v8/Runtime/#type-RemoteObject
Some("object") => "Object.prototype",
Some("function") => "Function.prototype",
Some("string") => "String.prototype",
Some("boolean") => "Boolean.prototype",
Some("bigint") => "BigInt.prototype",
Some("number") => "Number.prototype",
_ => return Vec::new(), // undefined, symbol, and unhandled
};
self
.get_object_expr_properties(object_expr)
.unwrap_or_default()
}
fn get_expression_type(&self, expr: &str) -> Option<String> {
self.evaluate_expression(expr).map(|res| res.result.kind)
}
fn get_object_expr_properties(
&self,
object_expr: &str,
) -> Option<Vec<String>> {
let evaluate_result = self.evaluate_expression(object_expr)?;
let object_id = evaluate_result.result.object_id?;
let get_properties_response = self
.sync_sender
.post_message(
"Runtime.getProperties",
Some(cdp::GetPropertiesArgs {
object_id,
own_properties: None,
accessor_properties_only: None,
generate_preview: None,
non_indexed_properties_only: Some(true),
}),
)
.ok()?;
let get_properties_response: cdp::GetPropertiesResponse =
serde_json::from_value(get_properties_response).ok()?;
Some(
get_properties_response
.result
.into_iter()
.map(|prop| prop.name)
.collect(),
)
}
fn evaluate_expression(&self, expr: &str) -> Option<cdp::EvaluateResponse> {
let evaluate_response = self
.sync_sender
.post_message(
"Runtime.evaluate",
Some(cdp::EvaluateArgs {
expression: expr.to_string(),
object_group: None,
include_command_line_api: None,
silent: None,
context_id: Some(self.context_id),
return_by_value: None,
generate_preview: None,
user_gesture: None,
await_promise: None,
throw_on_side_effect: Some(true),
timeout: Some(200),
disable_breaks: None,
repl_mode: None,
allow_unsafe_eval_blocked_by_csp: None,
unique_context_id: None,
}),
)
.ok()?;
let evaluate_response: cdp::EvaluateResponse =
serde_json::from_value(evaluate_response).ok()?;
if evaluate_response.exception_details.is_some() {
None
} else {
Some(evaluate_response)
}
}
}
fn is_word_boundary(c: char) -> bool {
if matches!(c, '.' | '_' | '$') {
false
} else {
char::is_ascii_whitespace(&c) || char::is_ascii_punctuation(&c)
}
}
fn get_expr_from_line_at_pos(line: &str, cursor_pos: usize) -> &str {
let start = line[..cursor_pos].rfind(is_word_boundary).unwrap_or(0);
let end = line[cursor_pos..]
.rfind(is_word_boundary)
.map(|i| cursor_pos + i)
.unwrap_or(cursor_pos);
let word = &line[start..end];
let word = word.strip_prefix(is_word_boundary).unwrap_or(word);
(word.strip_suffix(is_word_boundary).unwrap_or(word)) as _
}
impl Completer for EditorHelper {
type Candidate = String;
fn complete(
&self,
line: &str,
pos: usize,
_ctx: &Context<'_>,
) -> Result<(usize, Vec<String>), ReadlineError> {
let lsp_completions = self.sync_sender.lsp_completions(line, pos);
if !lsp_completions.is_empty() {
// assumes all lsp completions have the same start position
return Ok((
lsp_completions[0].range.start,
lsp_completions.into_iter().map(|c| c.new_text).collect(),
));
}
let expr = get_expr_from_line_at_pos(line, pos);
// check if the expression is in the form `obj.prop`
if let Some(index) = expr.rfind('.') {
let sub_expr = &expr[..index];
let prop_name = &expr[index + 1..];
let candidates = self
.get_expression_property_names(sub_expr)
.into_iter()
.filter(|n| !n.starts_with("Symbol(") && n.starts_with(prop_name))
.collect();
Ok((pos - prop_name.len(), candidates))
} else {
// combine results of declarations and globalThis properties
let mut candidates = self
.get_expression_property_names("globalThis")
.into_iter()
.chain(self.get_global_lexical_scope_names())
.filter(|n| n.starts_with(expr))
.collect::<Vec<_>>();
// sort and remove duplicates
candidates.sort();
candidates.dedup(); // make sure to sort first
Ok((pos - expr.len(), candidates))
}
}
}
impl Validator for EditorHelper {
fn validate(
&self,
ctx: &mut ValidationContext,
) -> Result<ValidationResult, ReadlineError> {
Ok(validate(ctx.input()))
}
}
fn validate(input: &str) -> ValidationResult {
let line_info = text_lines::TextLines::new(input);
let mut stack: Vec<Token> = Vec::new();
let mut in_template = false;
let mut div_token_count_on_current_line = 0;
let mut last_line_index = 0;
let mut queued_validation_error = None;
let tokens = deno_ast::lex(input, deno_ast::MediaType::TypeScript)
.into_iter()
.filter_map(|item| match item.inner {
deno_ast::TokenOrComment::Token(token) => Some((token, item.range)),
deno_ast::TokenOrComment::Comment { .. } => None,
});
for (token, range) in tokens {
let current_line_index = line_info.line_index(range.start);
if current_line_index != last_line_index {
div_token_count_on_current_line = 0;
last_line_index = current_line_index;
if let Some(error) = queued_validation_error {
return error;
}
}
match token {
Token::BinOp(BinOpToken::Div) | Token::AssignOp(AssignOp::DivAssign) => {
// it's too complicated to write code to detect regular expression literals
// which are no longer tokenized, so if a `/` or `/=` happens twice on the same
// line, then we bail
div_token_count_on_current_line += 1;
if div_token_count_on_current_line >= 2 {
return ValidationResult::Valid(None);
}
}
Token::BackQuote => in_template = !in_template,
Token::LParen | Token::LBracket | Token::LBrace | Token::DollarLBrace => {
stack.push(token)
}
Token::RParen | Token::RBracket | Token::RBrace => {
match (stack.pop(), token) {
(Some(Token::LParen), Token::RParen)
| (Some(Token::LBracket), Token::RBracket)
| (Some(Token::LBrace), Token::RBrace)
| (Some(Token::DollarLBrace), Token::RBrace) => {}
(Some(left), _) => {
// queue up a validation error to surface once we've finished examining the current line
queued_validation_error = Some(ValidationResult::Invalid(Some(
format!("Mismatched pairs: {left:?} is not properly closed"),
)));
}
(None, _) => {
// While technically invalid when unpaired, it should be V8's task to output error instead.
// Thus marked as valid with no info.
return ValidationResult::Valid(None);
}
}
}
Token::Error(error) => {
match error.kind() {
// If there is unterminated template, it continues to read input.
SyntaxError::UnterminatedTpl => {}
_ => {
// If it failed parsing, it should be V8's task to output error instead.
// Thus marked as valid with no info.
return ValidationResult::Valid(None);
}
}
}
_ => {}
}
}
if let Some(error) = queued_validation_error {
error
} else if !stack.is_empty() || in_template {
ValidationResult::Incomplete
} else {
ValidationResult::Valid(None)
}
}
impl Highlighter for EditorHelper {
fn highlight_hint<'h>(&self, hint: &'h str) -> Cow<'h, str> {
hint.into()
}
fn highlight_candidate<'c>(
&self,
candidate: &'c str,
completion: rustyline::CompletionType,
) -> Cow<'c, str> {
if completion == CompletionType::List {
candidate.into()
} else {
self.highlight(candidate, 0)
}
}
fn highlight_char(&self, line: &str, _: usize, _: bool) -> bool {
!line.is_empty()
}
fn highlight<'l>(&self, line: &'l str, _: usize) -> Cow<'l, str> {
let mut out_line = String::from(line);
let mut lexed_items = deno_ast::lex(line, deno_ast::MediaType::TypeScript)
.into_iter()
.peekable();
while let Some(item) = lexed_items.next() {
// Adding color adds more bytes to the string,
// so an offset is needed to stop spans falling out of sync.
let offset = out_line.len() - line.len();
let range = item.range;
out_line.replace_range(
range.start + offset..range.end + offset,
&match item.inner {
deno_ast::TokenOrComment::Token(token) => match token {
Token::Str { .. } | Token::Template { .. } | Token::BackQuote => {
colors::green(&line[range]).to_string()
}
Token::Regex(_, _) => colors::red(&line[range]).to_string(),
Token::Num { .. } | Token::BigInt { .. } => {
colors::yellow(&line[range]).to_string()
}
Token::Word(word) => match word {
Word::True | Word::False | Word::Null => {
colors::yellow(&line[range]).to_string()
}
Word::Keyword(_) => colors::cyan(&line[range]).to_string(),
Word::Ident(ident) => {
match ident.as_ref() {
"undefined" => colors::gray(&line[range]).to_string(),
"Infinity" | "NaN" => {
colors::yellow(&line[range]).to_string()
}
"async" | "of" => colors::cyan(&line[range]).to_string(),
_ => {
let next = lexed_items.peek().map(|item| &item.inner);
if matches!(
next,
Some(deno_ast::TokenOrComment::Token(Token::LParen))
) {
// We're looking for something that looks like a function
// We use a simple heuristic: 'ident' followed by 'LParen'
colors::intense_blue(&line[range]).to_string()
} else if ident.as_ref() == "from"
&& matches!(
next,
Some(deno_ast::TokenOrComment::Token(
Token::Str { .. }
))
)
{
// When ident 'from' is followed by a string literal, highlight it
// E.g. "export * from 'something'" or "import a from 'something'"
colors::cyan(&line[range]).to_string()
} else {
line[range].to_string()
}
}
}
}
},
_ => line[range].to_string(),
},
deno_ast::TokenOrComment::Comment { .. } => {
colors::gray(&line[range]).to_string()
}
},
);
}
out_line.into()
}
}
#[derive(Clone)]
pub struct ReplEditor {
inner: Arc<Mutex<Editor<EditorHelper, rustyline::history::FileHistory>>>,
history_file_path: Option<PathBuf>,
errored_on_history_save: Arc<AtomicBool>,
should_exit_on_interrupt: Arc<AtomicBool>,
}
impl ReplEditor {
pub fn new(
helper: EditorHelper,
history_file_path: Option<PathBuf>,
) -> Result<Self, AnyError> {
let editor_config = Config::builder()
.completion_type(CompletionType::List)
.build();
let mut editor =
Editor::with_config(editor_config).expect("Failed to create editor.");
editor.set_helper(Some(helper));
if let Some(history_file_path) = &history_file_path {
editor.load_history(history_file_path).unwrap_or(());
}
editor.bind_sequence(
KeyEvent(KeyCode::Char('s'), Modifiers::CTRL),
EventHandler::Simple(Cmd::Newline),
);
editor.bind_sequence(
KeyEvent(KeyCode::Tab, Modifiers::NONE),
EventHandler::Conditional(Box::new(TabEventHandler)),
);
let should_exit_on_interrupt = Arc::new(AtomicBool::new(false));
editor.bind_sequence(
KeyEvent(KeyCode::Char('r'), Modifiers::CTRL),
EventHandler::Conditional(Box::new(ReverseSearchHistoryEventHandler {
should_exit_on_interrupt: should_exit_on_interrupt.clone(),
})),
);
if let Some(history_file_path) = &history_file_path {
let history_file_dir = history_file_path.parent().unwrap();
std::fs::create_dir_all(history_file_dir).with_context(|| {
format!(
"Unable to create directory for the history file: {}",
history_file_dir.display()
)
})?;
}
Ok(ReplEditor {
inner: Arc::new(Mutex::new(editor)),
history_file_path,
errored_on_history_save: Arc::new(AtomicBool::new(false)),
should_exit_on_interrupt,
})
}
pub fn readline(&self) -> Result<String, ReadlineError> {
self.inner.lock().readline("> ")
}
pub fn update_history(&self, entry: String) {
let _ = self.inner.lock().add_history_entry(entry);
if let Some(history_file_path) = &self.history_file_path
&& let Err(e) = self.inner.lock().append_history(history_file_path)
{
if self.errored_on_history_save.load(Relaxed) {
return;
}
self.errored_on_history_save.store(true, Relaxed);
log::warn!("Unable to save history file: {}", e);
}
}
pub fn should_exit_on_interrupt(&self) -> bool {
self.should_exit_on_interrupt.load(Relaxed)
}
pub fn set_should_exit_on_interrupt(&self, yes: bool) {
self.should_exit_on_interrupt.store(yes, Relaxed);
}
}
/// Command to reverse search history , same as rustyline default C-R but that resets repl should_exit flag to false
struct ReverseSearchHistoryEventHandler {
should_exit_on_interrupt: Arc<AtomicBool>,
}
impl ConditionalEventHandler for ReverseSearchHistoryEventHandler {
fn handle(
&self,
_: &Event,
_: RepeatCount,
_: bool,
_: &EventContext,
) -> Option<Cmd> {
self.should_exit_on_interrupt.store(false, Relaxed);
Some(Cmd::ReverseSearchHistory)
}
}
/// A custom tab key event handler
/// It uses a heuristic to determine if the user is requesting completion or if they want to insert an actual tab
/// The heuristic goes like this:
/// - If the last character before the cursor is whitespace, the user wants to insert a tab
/// - Else the user is requesting completion
struct TabEventHandler;
impl ConditionalEventHandler for TabEventHandler {
fn handle(
&self,
evt: &Event,
n: RepeatCount,
_: bool,
ctx: &EventContext,
) -> Option<Cmd> {
debug_assert_eq!(
*evt,
Event::from(KeyEvent(KeyCode::Tab, Modifiers::NONE))
);
if ctx.line().is_empty()
|| ctx.line()[..ctx.pos()]
.chars()
.next_back()
.filter(|c| c.is_whitespace())
.is_some()
{
if cfg!(target_os = "windows") {
// Inserting a tab is broken in windows with rustyline
// use 4 spaces as a workaround for now
Some(Cmd::Insert(n, " ".into()))
} else {
Some(Cmd::Insert(n, "\t".into()))
}
} else {
None // default complete
}
}
}
#[cfg(test)]
mod test {
use rustyline::validate::ValidationResult;
use super::validate;
#[test]
fn validate_only_one_forward_slash_per_line() {
let code = r#"function test(arr){
if( arr.length <= 1) return arr.map(a => a / 2)
let left = test( arr.slice( 0 , arr.length/2 ) )"#;
assert!(matches!(validate(code), ValidationResult::Incomplete));
}
#[test]
fn validate_regex_looking_code() {
let code = r#"/testing/;"#;
assert!(matches!(validate(code), ValidationResult::Valid(_)));
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/repl/mod.rs | cli/tools/repl/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::io;
use std::io::Write;
use std::sync::Arc;
use deno_core::error::AnyError;
use deno_core::futures::StreamExt;
use deno_core::serde_json;
use deno_core::unsync::spawn_blocking;
use deno_lib::version::DENO_VERSION_INFO;
use deno_runtime::WorkerExecutionMode;
use rustyline::error::ReadlineError;
use tokio_util::sync::CancellationToken;
use crate::args::CliOptions;
use crate::args::Flags;
use crate::args::ReplFlags;
use crate::cdp;
use crate::colors;
use crate::factory::CliFactory;
use crate::file_fetcher::CliFileFetcher;
use crate::file_fetcher::TextDecodedFile;
mod channel;
mod editor;
mod session;
use channel::RustylineSyncMessage;
use channel::RustylineSyncMessageHandler;
use channel::RustylineSyncResponse;
use channel::rustyline_channel;
use editor::EditorHelper;
use editor::ReplEditor;
pub use session::EvaluationOutput;
pub use session::ReplSession;
pub use session::TsEvaluateResponse;
use super::test::create_single_test_event_channel;
struct Repl {
session: ReplSession,
editor: ReplEditor,
message_handler: RustylineSyncMessageHandler,
}
#[allow(clippy::print_stdout)]
impl Repl {
async fn run(&mut self) -> Result<(), AnyError> {
loop {
let line = read_line_and_poll(
&mut self.session,
&mut self.message_handler,
self.editor.clone(),
)
.await;
match line {
Ok(line) => {
self.editor.set_should_exit_on_interrupt(false);
self.editor.update_history(line.clone());
let output = self.session.evaluate_line_and_get_output(&line).await;
// We check for close and break here instead of making it a loop condition to get
// consistent behavior in when the user evaluates a call to close().
match self.session.closing().await {
Ok(closing) if closing => break,
Ok(_) => {}
Err(err) => {
println!("Error: {:?}", err)
}
};
println!("{}", output);
}
Err(ReadlineError::Interrupted) => {
if self.editor.should_exit_on_interrupt() {
break;
}
self.editor.set_should_exit_on_interrupt(true);
println!("press ctrl+c again to exit");
continue;
}
Err(ReadlineError::Eof) => {
break;
}
Err(err) => {
println!("Error: {:?}", err);
break;
}
}
}
Ok(())
}
}
#[allow(clippy::print_stdout)]
async fn read_line_and_poll(
repl_session: &mut ReplSession,
message_handler: &mut RustylineSyncMessageHandler,
editor: ReplEditor,
) -> Result<String, ReadlineError> {
let mut line_fut = spawn_blocking(move || editor.readline());
let mut poll_worker = true;
let notifications_rc = repl_session.notifications.clone();
let mut notifications = notifications_rc.lock().await;
loop {
tokio::select! {
result = &mut line_fut => {
return result.unwrap();
}
result = message_handler.recv() => {
match result {
Some(RustylineSyncMessage::PostMessage { method, params }) => {
let result = repl_session
.post_message_with_event_loop(&method, params)
.await;
message_handler.send(RustylineSyncResponse::PostMessage(result)).unwrap();
},
Some(RustylineSyncMessage::LspCompletions {
line_text,
position,
}) => {
let result = repl_session.language_server.completions(
&line_text,
position,
CancellationToken::new(),
).await;
message_handler.send(RustylineSyncResponse::LspCompletions(result)).unwrap();
}
None => {}, // channel closed
}
poll_worker = true;
}
message = notifications.next() => {
if let Some(message) = message {
let notification: cdp::Notification = serde_json::from_value(message).unwrap();
if notification.method == "Runtime.exceptionThrown" {
let exception_thrown: cdp::ExceptionThrown = serde_json::from_value(notification.params).unwrap();
let (message, description) = exception_thrown.exception_details.get_message_and_description();
println!("{} {}", message, description);
}
}
}
_ = repl_session.run_event_loop(), if poll_worker => {
poll_worker = false;
}
}
}
}
async fn read_eval_file(
cli_options: &CliOptions,
file_fetcher: &CliFileFetcher,
eval_file: &str,
) -> Result<Arc<str>, AnyError> {
let specifier =
deno_path_util::resolve_url_or_path(eval_file, cli_options.initial_cwd())?;
let file = file_fetcher.fetch_bypass_permissions(&specifier).await?;
Ok(TextDecodedFile::decode(file)?.source)
}
#[allow(clippy::print_stdout)]
pub async fn run(
flags: Arc<Flags>,
repl_flags: ReplFlags,
) -> Result<i32, AnyError> {
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let main_module = cli_options.resolve_main_module()?;
let permissions = factory.root_permissions_container()?;
let npm_installer = factory.npm_installer_if_managed().await?.cloned();
let resolver = factory.resolver().await?.clone();
let file_fetcher = factory.file_fetcher()?;
let compiler_options_resolver = factory.compiler_options_resolver()?;
let worker_factory = factory.create_cli_main_worker_factory().await?;
let (worker, test_event_receiver) = create_single_test_event_channel();
let test_event_sender = worker.sender;
let mut worker = worker_factory
.create_custom_worker(
WorkerExecutionMode::Repl,
main_module.clone(),
// `deno repl` doesn't support preloading modules
vec![],
// `deno repl` doesn't support require modules
vec![],
permissions.clone(),
vec![crate::ops::testing::deno_test::init(test_event_sender)],
Default::default(),
None,
)
.await?;
worker.setup_repl().await?;
let worker = worker.into_main_worker();
let session = ReplSession::initialize(
cli_options,
npm_installer,
resolver,
compiler_options_resolver,
worker,
main_module.clone(),
test_event_receiver,
)
.await?;
#[cfg(unix)]
if repl_flags.json {
return run_json(session).await;
}
let rustyline_channel = rustyline_channel();
let helper = EditorHelper {
context_id: session.context_id,
sync_sender: rustyline_channel.0,
};
let history_file_path = factory
.deno_dir()
.ok()
.and_then(|dir| dir.repl_history_file_path());
let editor = ReplEditor::new(helper, history_file_path)?;
let mut repl = Repl {
session,
editor,
message_handler: rustyline_channel.1,
};
if let Some(eval_files) = repl_flags.eval_files {
for eval_file in eval_files {
match read_eval_file(cli_options, file_fetcher, &eval_file).await {
Ok(eval_source) => {
let output = repl
.session
.evaluate_line_and_get_output(&eval_source)
.await;
// only output errors
if let EvaluationOutput::Error(error_text) = output {
println!("Error in --eval-file file \"{eval_file}\": {error_text}");
}
}
Err(e) => {
println!("Error in --eval-file file \"{eval_file}\": {e}");
}
}
}
}
if let Some(eval) = repl_flags.eval {
let output = repl.session.evaluate_line_and_get_output(&eval).await;
// only output errors
if let EvaluationOutput::Error(error_text) = output {
println!("Error in --eval flag: {error_text}");
}
}
// Doing this manually, instead of using `log::info!` because these messages
// are supposed to go to stdout, not stderr.
// Using writeln, because println panics in certain cases
// (eg: broken pipes - https://github.com/denoland/deno/issues/21861)
if !cli_options.is_quiet() {
let mut handle = io::stdout().lock();
writeln!(handle, "Deno {}", DENO_VERSION_INFO.deno)?;
writeln!(handle, "exit using ctrl+d, ctrl+c, or close()")?;
if repl_flags.is_default_command {
writeln!(
handle,
"{}",
colors::yellow("REPL is running with all permissions allowed.")
)?;
writeln!(
handle,
"To specify permissions, run `deno repl` with allow flags."
)?;
}
}
repl.run().await?;
Ok(repl.session.worker.exit_code())
}
#[cfg(unix)]
async fn run_json(mut repl_session: ReplSession) -> Result<i32, AnyError> {
use bytes::Buf;
use bytes::Bytes;
use deno_runtime::deno_io::BiPipe;
use tokio::io::AsyncReadExt;
use tokio::io::AsyncWriteExt;
use tokio::io::BufReader;
#[derive(serde::Serialize, serde::Deserialize, Debug)]
#[serde(tag = "type")]
enum ReplMessage {
Run { code: String, output: bool },
RunSuccess { output: Option<String> },
RunFailure { text: String },
Error { error: String },
}
let (receiver, mut sender) = BiPipe::from_raw(3)?.split();
let mut receiver = BufReader::new(receiver);
loop {
let mut line_fut = std::pin::pin!(async {
let len = receiver.read_u32_le().await?;
let mut buf = vec![0; len as _];
receiver.read_exact(&mut buf).await?;
Ok::<_, AnyError>(buf)
});
let mut poll_worker = true;
let line = loop {
tokio::select! {
line = &mut line_fut => break line?,
_ = repl_session.run_event_loop(), if poll_worker => {
poll_worker = false;
continue;
}
}
};
let command: ReplMessage = serde_json::from_slice(&line)?;
if let ReplMessage::Run { code, output } = command {
let result = repl_session.evaluate_line_with_object_wrapping(&code).await;
// We check for close and break here instead of making it a loop condition to get
// consistent behavior in when the user evaluates a call to close().
match repl_session.closing().await {
Ok(closing) if closing => break,
Ok(_) => {}
Err(err) => {
let buf = serde_json::to_vec(&ReplMessage::Error {
error: format!("{}", err),
})?;
sender
.write_all_buf(
&mut Bytes::from_owner((buf.len() as u32).to_le_bytes())
.chain(Bytes::from(buf)),
)
.await?;
}
};
match result {
Ok(evaluate_response) => {
let cdp::EvaluateResponse {
result,
exception_details,
} = evaluate_response.value;
let msg = if let Some(exception_details) = exception_details {
repl_session.set_last_thrown_error(&result).await?;
ReplMessage::RunFailure {
text: exception_details.text,
}
} else {
repl_session
.language_server
.commit_text(&evaluate_response.ts_code)
.await;
repl_session.set_last_eval_result(&result).await?;
let output = if output {
let response = repl_session
.call_function_on_repl_internal_obj(
"function (object) { return this.String(object); }".into(),
&[result],
)
.await?;
let output = response
.result
.value
.map(|v| v.as_str().unwrap().to_string())
.or(response.result.description)
.unwrap_or_else(|| "something went wrong".into());
Some(output)
} else {
None
};
ReplMessage::RunSuccess { output }
};
let buf = serde_json::to_vec(&msg)?;
sender
.write_all_buf(
&mut Bytes::from_owner((buf.len() as u32).to_le_bytes())
.chain(Bytes::from(buf)),
)
.await?;
}
Err(err) => {
let buf = serde_json::to_vec(&ReplMessage::Error {
error: format!("{}", err),
})?;
sender
.write_all_buf(
&mut Bytes::from_owner((buf.len() as u32).to_le_bytes())
.chain(Bytes::from(buf)),
)
.await?;
}
}
}
}
Ok(repl_session.worker.exit_code())
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/repl/channel.rs | cli/tools/repl/channel.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::cell::RefCell;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::serde_json::Value;
use deno_error::JsErrorBox;
use tokio::sync::mpsc::Receiver;
use tokio::sync::mpsc::Sender;
use tokio::sync::mpsc::UnboundedReceiver;
use tokio::sync::mpsc::UnboundedSender;
use tokio::sync::mpsc::channel;
use tokio::sync::mpsc::unbounded_channel;
use crate::lsp::ReplCompletionItem;
/// Rustyline uses synchronous methods in its interfaces, but we need to call
/// async methods. To get around this, we communicate with async code by using
/// a channel and blocking on the result.
pub fn rustyline_channel()
-> (RustylineSyncMessageSender, RustylineSyncMessageHandler) {
let (message_tx, message_rx) = channel(1);
let (response_tx, response_rx) = unbounded_channel();
(
RustylineSyncMessageSender {
message_tx,
response_rx: RefCell::new(response_rx),
},
RustylineSyncMessageHandler {
response_tx,
message_rx,
},
)
}
pub enum RustylineSyncMessage {
PostMessage {
method: String,
params: Option<Value>,
},
LspCompletions {
line_text: String,
position: usize,
},
}
pub enum RustylineSyncResponse {
PostMessage(Value),
LspCompletions(Vec<ReplCompletionItem>),
}
pub struct RustylineSyncMessageSender {
message_tx: Sender<RustylineSyncMessage>,
response_rx: RefCell<UnboundedReceiver<RustylineSyncResponse>>,
}
impl RustylineSyncMessageSender {
#[allow(clippy::result_large_err)]
pub fn post_message<T: serde::Serialize>(
&self,
method: &str,
params: Option<T>,
) -> Result<Value, JsErrorBox> {
match self
.message_tx
.blocking_send(RustylineSyncMessage::PostMessage {
method: method.to_string(),
params: params
.map(|params| serde_json::to_value(params))
.transpose()
.map_err(JsErrorBox::from_err)?,
}) {
Err(err) => Err(JsErrorBox::from_err(err)),
_ => match self.response_rx.borrow_mut().blocking_recv().unwrap() {
RustylineSyncResponse::PostMessage(result) => Ok(result),
RustylineSyncResponse::LspCompletions(_) => unreachable!(),
},
}
}
pub fn lsp_completions(
&self,
line_text: &str,
position: usize,
) -> Vec<ReplCompletionItem> {
if self
.message_tx
.blocking_send(RustylineSyncMessage::LspCompletions {
line_text: line_text.to_string(),
position,
})
.is_err()
{
Vec::new()
} else {
match self.response_rx.borrow_mut().blocking_recv().unwrap() {
RustylineSyncResponse::LspCompletions(result) => result,
RustylineSyncResponse::PostMessage(_) => unreachable!(),
}
}
}
}
pub struct RustylineSyncMessageHandler {
message_rx: Receiver<RustylineSyncMessage>,
response_tx: UnboundedSender<RustylineSyncResponse>,
}
impl RustylineSyncMessageHandler {
pub async fn recv(&mut self) -> Option<RustylineSyncMessage> {
self.message_rx.recv().await
}
pub fn send(&self, response: RustylineSyncResponse) -> Result<(), AnyError> {
self
.response_tx
.send(response)
.map_err(|err| anyhow!("{}", err))
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/coverage/merge.rs | cli/tools/coverage/merge.rs | // Copyright 2018-2025 the Deno authors. MIT license.
//
// Forked from https://github.com/demurgos/v8-coverage/tree/d0ca18da8740198681e0bc68971b0a6cdb11db3e/rust
// Copyright 2021 Charles Samborski. All rights reserved. MIT license.
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap;
use std::iter::Peekable;
use super::range_tree::RangeTree;
use super::range_tree::RangeTreeArena;
use crate::cdp;
#[derive(Eq, PartialEq, Clone, Debug)]
pub struct ProcessCoverage {
pub result: Vec<cdp::ScriptCoverage>,
}
pub fn merge_processes(
mut processes: Vec<ProcessCoverage>,
) -> Option<ProcessCoverage> {
if processes.len() <= 1 {
return processes.pop();
}
let mut url_to_scripts: BTreeMap<String, Vec<cdp::ScriptCoverage>> =
BTreeMap::new();
for process_cov in processes {
for script_cov in process_cov.result {
url_to_scripts
.entry(script_cov.url.clone())
.or_default()
.push(script_cov);
}
}
let result: Vec<cdp::ScriptCoverage> = url_to_scripts
.into_iter()
.enumerate()
.map(|(script_id, (_, scripts))| (script_id, scripts))
.map(|(script_id, scripts)| {
let mut merged: cdp::ScriptCoverage =
merge_scripts(scripts.to_vec()).unwrap();
merged.script_id = script_id.to_string();
merged
})
.collect();
Some(ProcessCoverage { result })
}
pub fn merge_scripts(
mut scripts: Vec<cdp::ScriptCoverage>,
) -> Option<cdp::ScriptCoverage> {
if scripts.len() <= 1 {
return scripts.pop();
}
let (script_id, url) = {
let first: &cdp::ScriptCoverage = &scripts[0];
(first.script_id.clone(), first.url.clone())
};
let mut range_to_funcs: BTreeMap<CharRange, Vec<cdp::FunctionCoverage>> =
BTreeMap::new();
for script_cov in scripts {
for func_cov in script_cov.functions {
let root_range = {
let root_range_cov: &cdp::CoverageRange = &func_cov.ranges[0];
CharRange {
start: root_range_cov.start_char_offset,
end: root_range_cov.end_char_offset,
}
};
range_to_funcs.entry(root_range).or_default().push(func_cov);
}
}
let functions: Vec<cdp::FunctionCoverage> = range_to_funcs
.into_values()
.map(|funcs| merge_functions(funcs).unwrap())
.collect();
Some(cdp::ScriptCoverage {
script_id,
url,
functions,
})
}
#[derive(Eq, PartialEq, Hash, Copy, Clone, Debug)]
struct CharRange {
start: usize,
end: usize,
}
impl Ord for CharRange {
fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {
if self.start != other.start {
self.start.cmp(&other.start)
} else {
other.end.cmp(&self.end)
}
}
}
impl PartialOrd for CharRange {
fn partial_cmp(&self, other: &Self) -> Option<::std::cmp::Ordering> {
Some(self.cmp(other))
}
}
pub fn merge_functions(
mut funcs: Vec<cdp::FunctionCoverage>,
) -> Option<cdp::FunctionCoverage> {
if funcs.len() <= 1 {
return funcs.pop();
}
let function_name = funcs[0].function_name.clone();
let rta_capacity: usize =
funcs.iter().fold(0, |acc, func| acc + func.ranges.len());
let rta = RangeTreeArena::with_capacity(rta_capacity);
let mut trees: Vec<&mut RangeTree> = Vec::new();
for func in funcs {
if let Some(tree) = RangeTree::from_sorted_ranges(&rta, &func.ranges) {
trees.push(tree);
}
}
let merged = RangeTree::normalize(merge_range_trees(&rta, trees).unwrap());
let ranges = merged.to_ranges();
let is_block_coverage: bool = !(ranges.len() == 1 && ranges[0].count == 0);
Some(cdp::FunctionCoverage {
function_name,
ranges,
is_block_coverage,
})
}
fn merge_range_trees<'a>(
rta: &'a RangeTreeArena<'a>,
mut trees: Vec<&'a mut RangeTree<'a>>,
) -> Option<&'a mut RangeTree<'a>> {
if trees.len() <= 1 {
return trees.pop();
}
let (start, end) = {
let first = &trees[0];
(first.start, first.end)
};
let delta: i64 = trees.iter().fold(0, |acc, tree| acc + tree.delta);
let children = merge_range_tree_children(rta, trees);
Some(rta.alloc(RangeTree::new(start, end, delta, children)))
}
struct StartEvent<'a> {
offset: usize,
trees: Vec<(usize, &'a mut RangeTree<'a>)>,
}
fn into_start_events<'a>(
trees: Vec<&'a mut RangeTree<'a>>,
) -> Vec<StartEvent<'a>> {
let mut result: BTreeMap<usize, Vec<(usize, &'a mut RangeTree<'a>)>> =
BTreeMap::new();
for (parent_index, tree) in trees.into_iter().enumerate() {
for child in tree.children.drain(..) {
result
.entry(child.start)
.or_default()
.push((parent_index, child));
}
}
result
.into_iter()
.map(|(offset, trees)| StartEvent { offset, trees })
.collect()
}
struct StartEventQueue<'a> {
pending: Option<StartEvent<'a>>,
queue: Peekable<::std::vec::IntoIter<StartEvent<'a>>>,
}
impl<'a> StartEventQueue<'a> {
pub fn new(queue: Vec<StartEvent<'a>>) -> StartEventQueue<'a> {
StartEventQueue {
pending: None,
queue: queue.into_iter().peekable(),
}
}
pub fn set_pending_offset(&mut self, offset: usize) {
self.pending = Some(StartEvent {
offset,
trees: Vec::new(),
});
}
pub fn push_pending_tree(&mut self, tree: (usize, &'a mut RangeTree<'a>)) {
self.pending = self.pending.take().map(|mut start_event| {
start_event.trees.push(tree);
start_event
});
}
}
impl<'a> Iterator for StartEventQueue<'a> {
type Item = StartEvent<'a>;
fn next(&mut self) -> Option<<Self as Iterator>::Item> {
let pending_offset: Option<usize> = match &self.pending {
Some(start_event) if !start_event.trees.is_empty() => {
Some(start_event.offset)
}
_ => None,
};
match pending_offset {
Some(pending_offset) => {
let queue_offset =
self.queue.peek().map(|start_event| start_event.offset);
match queue_offset {
None => self.pending.take(),
Some(queue_offset) => {
if pending_offset < queue_offset {
self.pending.take()
} else {
let mut result = self.queue.next().unwrap();
if pending_offset == queue_offset {
let pending_trees = self.pending.take().unwrap().trees;
result.trees.extend(pending_trees)
}
Some(result)
}
}
}
}
None => self.queue.next(),
}
}
}
fn merge_range_tree_children<'a>(
rta: &'a RangeTreeArena<'a>,
parent_trees: Vec<&'a mut RangeTree<'a>>,
) -> Vec<&'a mut RangeTree<'a>> {
let mut flat_children: Vec<Vec<&'a mut RangeTree<'a>>> =
Vec::with_capacity(parent_trees.len());
let mut wrapped_children: Vec<Vec<&'a mut RangeTree<'a>>> =
Vec::with_capacity(parent_trees.len());
let mut open_range: Option<CharRange> = None;
for _parent_tree in parent_trees.iter() {
flat_children.push(Vec::new());
wrapped_children.push(Vec::new());
}
let mut start_event_queue =
StartEventQueue::new(into_start_events(parent_trees));
let mut parent_to_nested: HashMap<usize, Vec<&'a mut RangeTree<'a>>> =
HashMap::new();
while let Some(event) = start_event_queue.next() {
open_range = if let Some(open_range) = open_range {
if open_range.end <= event.offset {
for (parent_index, nested) in parent_to_nested {
wrapped_children[parent_index].push(rta.alloc(RangeTree::new(
open_range.start,
open_range.end,
0,
nested,
)));
}
parent_to_nested = HashMap::new();
None
} else {
Some(open_range)
}
} else {
None
};
match open_range {
Some(open_range) => {
for (parent_index, tree) in event.trees {
let child = if tree.end > open_range.end {
let (left, right) = RangeTree::split(rta, tree, open_range.end);
start_event_queue.push_pending_tree((parent_index, right));
left
} else {
tree
};
parent_to_nested
.entry(parent_index)
.or_default()
.push(child);
}
}
None => {
let mut open_range_end: usize = event.offset + 1;
for (_, tree) in &event.trees {
open_range_end = if tree.end > open_range_end {
tree.end
} else {
open_range_end
};
}
for (parent_index, tree) in event.trees {
if tree.end == open_range_end {
flat_children[parent_index].push(tree);
continue;
}
parent_to_nested.entry(parent_index).or_default().push(tree);
}
start_event_queue.set_pending_offset(open_range_end);
open_range = Some(CharRange {
start: event.offset,
end: open_range_end,
});
}
}
}
if let Some(open_range) = open_range {
for (parent_index, nested) in parent_to_nested {
wrapped_children[parent_index].push(rta.alloc(RangeTree::new(
open_range.start,
open_range.end,
0,
nested,
)));
}
}
let child_forests: Vec<Vec<&'a mut RangeTree<'a>>> = flat_children
.into_iter()
.zip(wrapped_children)
.map(|(flat, wrapped)| merge_children_lists(flat, wrapped))
.collect();
let events = get_child_events_from_forests(&child_forests);
let mut child_forests: Vec<
Peekable<::std::vec::IntoIter<&'a mut RangeTree<'a>>>,
> = child_forests
.into_iter()
.map(|forest| forest.into_iter().peekable())
.collect();
let mut result: Vec<&'a mut RangeTree<'a>> = Vec::new();
for event in events.iter() {
let mut matching_trees: Vec<&'a mut RangeTree<'a>> = Vec::new();
for children in child_forests.iter_mut() {
let next_tree: Option<&'a mut RangeTree<'a>> = {
if children
.peek()
.map(|tree| tree.start == *event)
.unwrap_or(false)
{
children.next()
} else {
None
}
};
if let Some(next_tree) = next_tree {
matching_trees.push(next_tree);
}
}
if let Some(merged) = merge_range_trees(rta, matching_trees) {
result.push(merged);
}
}
result
}
fn get_child_events_from_forests<'a>(
forests: &[Vec<&'a mut RangeTree<'a>>],
) -> BTreeSet<usize> {
let mut event_set: BTreeSet<usize> = BTreeSet::new();
for forest in forests {
for tree in forest {
event_set.insert(tree.start);
event_set.insert(tree.end);
}
}
event_set
}
// TODO: itertools?
// https://play.integer32.com/?gist=ad2cd20d628e647a5dbdd82e68a15cb6&version=stable&mode=debug&edition=2015
fn merge_children_lists<'a>(
a: Vec<&'a mut RangeTree<'a>>,
b: Vec<&'a mut RangeTree<'a>>,
) -> Vec<&'a mut RangeTree<'a>> {
let mut merged: Vec<&'a mut RangeTree<'a>> = Vec::new();
let mut a = a.into_iter();
let mut b = b.into_iter();
let mut next_a = a.next();
let mut next_b = b.next();
loop {
match (next_a, next_b) {
(Some(tree_a), Some(tree_b)) => {
if tree_a.start < tree_b.start {
merged.push(tree_a);
next_a = a.next();
next_b = Some(tree_b);
} else {
merged.push(tree_b);
next_a = Some(tree_a);
next_b = b.next();
}
}
(Some(tree_a), None) => {
merged.push(tree_a);
merged.extend(a);
break;
}
(None, Some(tree_b)) => {
merged.push(tree_b);
merged.extend(b);
break;
}
(None, None) => break,
}
}
merged
}
#[cfg(test)]
mod tests {
use super::*;
// use test_generator::test_resources;
#[test]
fn empty() {
let inputs: Vec<ProcessCoverage> = Vec::new();
let expected: Option<ProcessCoverage> = None;
assert_eq!(merge_processes(inputs), expected);
}
#[test]
fn two_flat_trees() {
let inputs: Vec<ProcessCoverage> = vec![
ProcessCoverage {
result: vec![cdp::ScriptCoverage {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![cdp::FunctionCoverage {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![cdp::CoverageRange {
start_char_offset: 0,
end_char_offset: 9,
count: 1,
}],
}],
}],
},
ProcessCoverage {
result: vec![cdp::ScriptCoverage {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![cdp::FunctionCoverage {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![cdp::CoverageRange {
start_char_offset: 0,
end_char_offset: 9,
count: 2,
}],
}],
}],
},
];
let expected: Option<ProcessCoverage> = Some(ProcessCoverage {
result: vec![cdp::ScriptCoverage {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![cdp::FunctionCoverage {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![cdp::CoverageRange {
start_char_offset: 0,
end_char_offset: 9,
count: 3,
}],
}],
}],
});
assert_eq!(merge_processes(inputs), expected);
}
#[test]
fn two_trees_with_matching_children() {
let inputs: Vec<ProcessCoverage> = vec![
ProcessCoverage {
result: vec![cdp::ScriptCoverage {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![cdp::FunctionCoverage {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
cdp::CoverageRange {
start_char_offset: 0,
end_char_offset: 9,
count: 10,
},
cdp::CoverageRange {
start_char_offset: 3,
end_char_offset: 6,
count: 1,
},
],
}],
}],
},
ProcessCoverage {
result: vec![cdp::ScriptCoverage {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![cdp::FunctionCoverage {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
cdp::CoverageRange {
start_char_offset: 0,
end_char_offset: 9,
count: 20,
},
cdp::CoverageRange {
start_char_offset: 3,
end_char_offset: 6,
count: 2,
},
],
}],
}],
},
];
let expected: Option<ProcessCoverage> = Some(ProcessCoverage {
result: vec![cdp::ScriptCoverage {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![cdp::FunctionCoverage {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
cdp::CoverageRange {
start_char_offset: 0,
end_char_offset: 9,
count: 30,
},
cdp::CoverageRange {
start_char_offset: 3,
end_char_offset: 6,
count: 3,
},
],
}],
}],
});
assert_eq!(merge_processes(inputs), expected);
}
#[test]
fn two_trees_with_partially_overlapping_children() {
let inputs: Vec<ProcessCoverage> = vec![
ProcessCoverage {
result: vec![cdp::ScriptCoverage {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![cdp::FunctionCoverage {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
cdp::CoverageRange {
start_char_offset: 0,
end_char_offset: 9,
count: 10,
},
cdp::CoverageRange {
start_char_offset: 2,
end_char_offset: 5,
count: 1,
},
],
}],
}],
},
ProcessCoverage {
result: vec![cdp::ScriptCoverage {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![cdp::FunctionCoverage {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
cdp::CoverageRange {
start_char_offset: 0,
end_char_offset: 9,
count: 20,
},
cdp::CoverageRange {
start_char_offset: 4,
end_char_offset: 7,
count: 2,
},
],
}],
}],
},
];
let expected: Option<ProcessCoverage> = Some(ProcessCoverage {
result: vec![cdp::ScriptCoverage {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![cdp::FunctionCoverage {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
cdp::CoverageRange {
start_char_offset: 0,
end_char_offset: 9,
count: 30,
},
cdp::CoverageRange {
start_char_offset: 2,
end_char_offset: 5,
count: 21,
},
cdp::CoverageRange {
start_char_offset: 4,
end_char_offset: 5,
count: 3,
},
cdp::CoverageRange {
start_char_offset: 5,
end_char_offset: 7,
count: 12,
},
],
}],
}],
});
assert_eq!(merge_processes(inputs), expected);
}
#[test]
fn two_trees_with_with_complementary_children_summing_to_the_same_count() {
let inputs: Vec<ProcessCoverage> = vec![
ProcessCoverage {
result: vec![cdp::ScriptCoverage {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![cdp::FunctionCoverage {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
cdp::CoverageRange {
start_char_offset: 0,
end_char_offset: 9,
count: 1,
},
cdp::CoverageRange {
start_char_offset: 1,
end_char_offset: 8,
count: 6,
},
cdp::CoverageRange {
start_char_offset: 1,
end_char_offset: 5,
count: 5,
},
cdp::CoverageRange {
start_char_offset: 5,
end_char_offset: 8,
count: 7,
},
],
}],
}],
},
ProcessCoverage {
result: vec![cdp::ScriptCoverage {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![cdp::FunctionCoverage {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
cdp::CoverageRange {
start_char_offset: 0,
end_char_offset: 9,
count: 4,
},
cdp::CoverageRange {
start_char_offset: 1,
end_char_offset: 8,
count: 8,
},
cdp::CoverageRange {
start_char_offset: 1,
end_char_offset: 5,
count: 9,
},
cdp::CoverageRange {
start_char_offset: 5,
end_char_offset: 8,
count: 7,
},
],
}],
}],
},
];
let expected: Option<ProcessCoverage> = Some(ProcessCoverage {
result: vec![cdp::ScriptCoverage {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![cdp::FunctionCoverage {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
cdp::CoverageRange {
start_char_offset: 0,
end_char_offset: 9,
count: 5,
},
cdp::CoverageRange {
start_char_offset: 1,
end_char_offset: 8,
count: 14,
},
],
}],
}],
});
assert_eq!(merge_processes(inputs), expected);
}
#[test]
fn merges_a_similar_sliding_chain_a_bc() {
let inputs: Vec<ProcessCoverage> = vec![
ProcessCoverage {
result: vec![cdp::ScriptCoverage {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![cdp::FunctionCoverage {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
cdp::CoverageRange {
start_char_offset: 0,
end_char_offset: 7,
count: 10,
},
cdp::CoverageRange {
start_char_offset: 0,
end_char_offset: 4,
count: 1,
},
],
}],
}],
},
ProcessCoverage {
result: vec![cdp::ScriptCoverage {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![cdp::FunctionCoverage {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
cdp::CoverageRange {
start_char_offset: 0,
end_char_offset: 7,
count: 20,
},
cdp::CoverageRange {
start_char_offset: 1,
end_char_offset: 6,
count: 11,
},
cdp::CoverageRange {
start_char_offset: 2,
end_char_offset: 5,
count: 2,
},
],
}],
}],
},
];
let expected: Option<ProcessCoverage> = Some(ProcessCoverage {
result: vec![cdp::ScriptCoverage {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![cdp::FunctionCoverage {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
cdp::CoverageRange {
start_char_offset: 0,
end_char_offset: 7,
count: 30,
},
cdp::CoverageRange {
start_char_offset: 0,
end_char_offset: 6,
count: 21,
},
cdp::CoverageRange {
start_char_offset: 1,
end_char_offset: 5,
count: 12,
},
cdp::CoverageRange {
start_char_offset: 2,
end_char_offset: 4,
count: 3,
},
],
}],
}],
});
assert_eq!(merge_processes(inputs), expected);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/coverage/range_tree.rs | cli/tools/coverage/range_tree.rs | // Copyright 2018-2025 the Deno authors. MIT license.
//
// Forked from https://github.com/demurgos/v8-coverage/tree/d0ca18da8740198681e0bc68971b0a6cdb11db3e/rust
// Copyright 2021 Charles Samborski. All rights reserved. MIT license.
use std::iter::Peekable;
use typed_arena::Arena;
use crate::cdp;
pub struct RangeTreeArena<'a>(Arena<RangeTree<'a>>);
impl<'a> RangeTreeArena<'a> {
#[cfg(test)]
pub fn new() -> Self {
RangeTreeArena(Arena::new())
}
pub fn with_capacity(n: usize) -> Self {
RangeTreeArena(Arena::with_capacity(n))
}
#[allow(clippy::mut_from_ref)]
pub fn alloc(&'a self, value: RangeTree<'a>) -> &'a mut RangeTree<'a> {
self.0.alloc(value)
}
}
#[derive(Eq, PartialEq, Debug)]
pub struct RangeTree<'a> {
pub start: usize,
pub end: usize,
pub delta: i64,
pub children: Vec<&'a mut RangeTree<'a>>,
}
impl RangeTree<'_> {
pub fn new<'a>(
start: usize,
end: usize,
delta: i64,
children: Vec<&'a mut RangeTree<'a>>,
) -> RangeTree<'a> {
RangeTree {
start,
end,
delta,
children,
}
}
pub fn split<'a>(
rta: &'a RangeTreeArena<'a>,
tree: &'a mut RangeTree<'a>,
value: usize,
) -> (&'a mut RangeTree<'a>, &'a mut RangeTree<'a>) {
let mut left_children: Vec<&'a mut RangeTree<'a>> = Vec::new();
let mut right_children: Vec<&'a mut RangeTree<'a>> = Vec::new();
for child in tree.children.iter_mut() {
if child.end <= value {
left_children.push(child);
} else if value <= child.start {
right_children.push(child);
} else {
let (left_child, right_child) = Self::split(rta, child, value);
left_children.push(left_child);
right_children.push(right_child);
}
}
let left = RangeTree::new(tree.start, value, tree.delta, left_children);
let right = RangeTree::new(value, tree.end, tree.delta, right_children);
(rta.alloc(left), rta.alloc(right))
}
pub fn normalize<'a>(tree: &'a mut RangeTree<'a>) -> &'a mut RangeTree<'a> {
tree.children = {
let mut children: Vec<&'a mut RangeTree<'a>> = Vec::new();
let mut chain: Vec<&'a mut RangeTree<'a>> = Vec::new();
for child in tree.children.drain(..) {
let is_chain_end: bool =
match chain.last().map(|tree| (tree.delta, tree.end)) {
Some((delta, chain_end)) => {
(delta, chain_end) != (child.delta, child.start)
}
None => false,
};
if is_chain_end {
let mut chain_iter = chain.drain(..);
let head: &'a mut RangeTree<'a> = chain_iter.next().unwrap();
for tree in chain_iter {
head.end = tree.end;
for sub_child in tree.children.drain(..) {
sub_child.delta += tree.delta - head.delta;
head.children.push(sub_child);
}
}
children.push(RangeTree::normalize(head));
}
chain.push(child)
}
if !chain.is_empty() {
let mut chain_iter = chain.drain(..);
let head: &'a mut RangeTree<'a> = chain_iter.next().unwrap();
for tree in chain_iter {
head.end = tree.end;
for sub_child in tree.children.drain(..) {
sub_child.delta += tree.delta - head.delta;
head.children.push(sub_child);
}
}
children.push(RangeTree::normalize(head));
}
if children.len() == 1
&& children[0].start == tree.start
&& children[0].end == tree.end
{
let normalized = children.remove(0);
normalized.delta += tree.delta;
return normalized;
}
children
};
tree
}
pub fn to_ranges(&self) -> Vec<cdp::CoverageRange> {
let mut ranges: Vec<cdp::CoverageRange> = Vec::new();
let mut stack: Vec<(&RangeTree, i64)> = vec![(self, 0)];
while let Some((cur, parent_count)) = stack.pop() {
let count: i64 = parent_count + cur.delta;
ranges.push(cdp::CoverageRange {
start_char_offset: cur.start,
end_char_offset: cur.end,
count,
});
for child in cur.children.iter().rev() {
stack.push((child, count))
}
}
ranges
}
pub fn from_sorted_ranges<'a>(
rta: &'a RangeTreeArena<'a>,
ranges: &[cdp::CoverageRange],
) -> Option<&'a mut RangeTree<'a>> {
Self::from_sorted_ranges_inner(
rta,
&mut ranges.iter().peekable(),
usize::MAX,
0,
)
}
fn from_sorted_ranges_inner<'a, 'b, 'c: 'b>(
rta: &'a RangeTreeArena<'a>,
ranges: &'b mut Peekable<impl Iterator<Item = &'c cdp::CoverageRange>>,
parent_end: usize,
parent_count: i64,
) -> Option<&'a mut RangeTree<'a>> {
let has_range: bool = match ranges.peek() {
None => false,
Some(range) => range.start_char_offset < parent_end,
};
if !has_range {
return None;
}
let range = ranges.next().unwrap();
let start: usize = range.start_char_offset;
let end: usize = range.end_char_offset;
let count: i64 = range.count;
let delta: i64 = count - parent_count;
let mut children: Vec<&mut RangeTree> = Vec::new();
while let Some(child) =
Self::from_sorted_ranges_inner(rta, ranges, end, count)
{
children.push(child);
}
Some(rta.alloc(RangeTree::new(start, end, delta, children)))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn from_sorted_ranges_empty() {
let rta = RangeTreeArena::new();
let inputs: Vec<cdp::CoverageRange> = vec![cdp::CoverageRange {
start_char_offset: 0,
end_char_offset: 9,
count: 1,
}];
let actual: Option<&mut RangeTree> =
RangeTree::from_sorted_ranges(&rta, &inputs);
let expected: Option<&mut RangeTree> =
Some(rta.alloc(RangeTree::new(0, 9, 1, Vec::new())));
assert_eq!(actual, expected);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/coverage/ignore_directives.rs | cli/tools/coverage/ignore_directives.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashSet;
use deno_ast::MediaType;
use deno_ast::TextLines;
use deno_ast::swc::common::comments::CommentKind;
use deno_core::url::Url;
static COVERAGE_IGNORE_START_DIRECTIVE: &str = "deno-coverage-ignore-start";
static COVERAGE_IGNORE_STOP_DIRECTIVE: &str = "deno-coverage-ignore-stop";
static COVERAGE_IGNORE_NEXT_DIRECTIVE: &str = "deno-coverage-ignore";
static COVERAGE_IGNORE_FILE_DIRECTIVE: &str = "deno-coverage-ignore-file";
pub struct RangeIgnoreDirective {
pub start_line_index: usize,
pub stop_line_index: usize,
}
pub struct CoverageComment {
pub kind: CommentKind,
pub text: deno_ast::swc::atoms::Atom,
pub range: std::ops::Range<usize>,
}
pub struct CoverageComments {
pub comments: Vec<CoverageComment>,
pub first_token: Option<std::ops::Range<usize>>,
}
pub fn lex_comments(source: &str, media_type: MediaType) -> CoverageComments {
let mut first_token = None;
let mut comments = Vec::new();
for token in deno_ast::lex(source, media_type) {
match token.inner {
deno_ast::TokenOrComment::Token(inner) => {
if first_token.is_none()
&& !matches!(inner, deno_ast::swc::parser::token::Token::Shebang(..))
{
first_token = Some(token.range);
}
}
deno_ast::TokenOrComment::Comment { kind, text } => {
comments.push(CoverageComment {
kind,
text,
range: token.range,
})
}
}
}
CoverageComments {
first_token,
comments,
}
}
pub fn parse_range_ignore_directives(
script_module_specifier: &Url,
comments: &[CoverageComment],
text_lines: &TextLines,
) -> Vec<RangeIgnoreDirective> {
let mut depth: usize = 0;
let mut directives = Vec::<RangeIgnoreDirective>::new();
let mut current_range: Option<std::ops::Range<usize>> = None;
for comment in comments {
if comment.kind != CommentKind::Line {
continue;
}
let comment_text = comment.text.trim();
if let Some(prefix) = comment_text.split_whitespace().next() {
if prefix == COVERAGE_IGNORE_START_DIRECTIVE {
if log::log_enabled!(log::Level::Warn) && depth > 0 {
let unterminated_loc = text_lines
.line_and_column_display(current_range.as_ref().unwrap().start);
let loc = text_lines.line_and_column_display(comment.range.start);
log::warn!(
"WARNING: Nested {} comment at {}:{}:{}. A previous {} comment at {}:{}:{} is unterminated.",
COVERAGE_IGNORE_START_DIRECTIVE,
script_module_specifier,
loc.line_number,
loc.column_number,
COVERAGE_IGNORE_START_DIRECTIVE,
script_module_specifier,
unterminated_loc.line_number,
unterminated_loc.column_number,
);
}
depth += 1;
if current_range.is_none() {
current_range = Some(comment.range.clone());
}
} else if depth > 0 && prefix == COVERAGE_IGNORE_STOP_DIRECTIVE {
depth -= 1;
if depth == 0 {
let start_line_index =
text_lines.line_index(current_range.take().unwrap().start);
let stop_line_index = text_lines.line_index(comment.range.end);
directives.push(RangeIgnoreDirective {
start_line_index,
stop_line_index,
});
current_range = None;
}
} else if log::log_enabled!(log::Level::Warn)
&& depth == 0
&& prefix == COVERAGE_IGNORE_STOP_DIRECTIVE
{
let loc = text_lines.line_and_column_display(comment.range.start);
log::warn!(
"WARNING: {} comment with no corresponding {} comment at {}:{}:{} will be ignored.",
COVERAGE_IGNORE_STOP_DIRECTIVE,
COVERAGE_IGNORE_START_DIRECTIVE,
script_module_specifier,
loc.line_number,
loc.column_number,
);
}
}
}
// If the coverage ignore start directive has no corresponding close directive
// then log a warning and ignore the directive.
if let Some(range) = current_range.take()
&& log::log_enabled!(log::Level::Warn)
{
let loc = text_lines.line_and_column_display(range.start);
log::warn!(
"WARNING: Unterminated {} comment at {}:{}:{} will be ignored.",
COVERAGE_IGNORE_START_DIRECTIVE,
script_module_specifier,
loc.line_number,
loc.column_number,
);
}
directives
}
pub fn parse_next_ignore_directives(
comments: &[CoverageComment],
text_lines: &TextLines,
) -> HashSet<usize> {
comments
.iter()
.filter_map(|comment| {
if is_ignore_comment(COVERAGE_IGNORE_NEXT_DIRECTIVE, comment) {
Some(text_lines.line_index(comment.range.start))
} else {
None
}
})
.collect()
}
pub fn has_file_ignore_directive(comments: &CoverageComments) -> bool {
// We want to find the files first comment before the code starts. There are
// three cases:
// 1. No comments. There are no comments in the file, and therefore no
// coverage directives.
// 2. No code. There is at least one comment in the file, but no code. We can
// try to parse this as a file ignore directive.
// 3. Comments and code. There are comments and code in the file. We need to
// check if the first comment comes before the first line of code. If it
// does, we can try and parse it as a file ignore directive. Otherwise,
// there is no valid file ignore directive.
let first_comment = comments.comments.first();
let first_module_item = &comments.first_token;
match (first_comment, first_module_item) {
(None, _) => false,
(Some(first_comment), None) => {
is_ignore_comment(COVERAGE_IGNORE_FILE_DIRECTIVE, first_comment)
}
(Some(first_comment), Some(first_module_item)) => {
if first_comment.range.end <= first_module_item.start {
is_ignore_comment(COVERAGE_IGNORE_FILE_DIRECTIVE, first_comment)
} else {
false
}
}
}
}
fn is_ignore_comment(
ignore_diagnostic_directive: &str,
comment: &CoverageComment,
) -> bool {
if comment.kind != CommentKind::Line {
return false;
}
let comment_text = comment.text.trim();
if let Some(prefix) = comment_text.split_whitespace().next()
&& prefix == ignore_diagnostic_directive
{
return true;
}
false
}
#[cfg(test)]
mod tests {
use std::str::FromStr;
use deno_ast::MediaType;
use deno_ast::TextLines;
use super::*;
const TEST_FILE_NAME: &str = "file:///coverage_test.ts";
fn parse(source_code: &str) -> CoverageComments {
lex_comments(source_code, MediaType::TypeScript)
}
fn parse_with_text_lines(source_code: &str) -> (CoverageComments, TextLines) {
let comments = parse(source_code);
let text_lines = TextLines::new(source_code);
(comments, text_lines)
}
mod coverage_ignore_range {
use super::*;
#[test]
fn test_parse_range_ignore_comments() {
let source_code = r#"
// deno-coverage-ignore-start
function foo(): any {}
// deno-coverage-ignore-stop
function bar(): any {
// deno-coverage-ignore-start
foo();
// deno-coverage-ignore-stop
}
"#;
let (comments, text_line) = parse_with_text_lines(source_code);
let range_directives = parse_range_ignore_directives(
&Url::from_str(TEST_FILE_NAME).unwrap(),
&comments.comments,
&text_line,
);
assert_eq!(range_directives.len(), 2);
assert_eq!(range_directives[0].start_line_index, 1);
assert_eq!(range_directives[0].stop_line_index, 3);
assert_eq!(range_directives[1].start_line_index, 6);
assert_eq!(range_directives[1].stop_line_index, 8);
}
#[test]
fn test_parse_range_ignore_comments_unterminated() {
let source_code = r#"
// deno-coverage-ignore-start
function foo(): any {}
function bar(): any {
foo();
}
"#;
let (comments, text_lines) = parse_with_text_lines(source_code);
let range_directives = parse_range_ignore_directives(
&Url::from_str(TEST_FILE_NAME).unwrap(),
&comments.comments,
&text_lines,
);
assert!(range_directives.is_empty());
}
#[test]
fn test_parse_range_ignore_comments_nested() {
let source_code = r#"
// deno-coverage-ignore-start
function foo(): any {}
function bar(): any {
// deno-coverage-ignore-start
foo();
// deno-coverage-ignore-stop
}
// deno-coverage-ignore-stop
"#;
let (comments, text_lines) = parse_with_text_lines(source_code);
let range_directives = parse_range_ignore_directives(
&Url::from_str(TEST_FILE_NAME).unwrap(),
&comments.comments,
&text_lines,
);
assert_eq!(range_directives.len(), 1);
assert_eq!(range_directives[0].start_line_index, 1);
assert_eq!(range_directives[0].stop_line_index, 9);
}
}
mod coverage_ignore_next {
use super::*;
#[test]
fn test_parse_next_ignore_comments() {
let source_code = r#"
// deno-coverage-ignore
function foo(): any {}
function bar(): any {
// deno-coverage-ignore
foo();
}
"#;
let (comments, text_lines) = parse_with_text_lines(source_code);
let line_directives =
parse_next_ignore_directives(&comments.comments, &text_lines);
assert_eq!(line_directives.len(), 2);
assert!(line_directives.contains(&1));
assert!(line_directives.contains(&5));
}
}
mod coverage_ignore_file {
use super::*;
#[test]
fn test_parse_global_ignore_directives() {
let comments = parse("// deno-coverage-ignore-file");
assert!(has_file_ignore_directive(&comments));
}
#[test]
fn test_parse_global_ignore_directives_with_explanation() {
let comments =
parse("// deno-coverage-ignore-file -- reason for ignoring");
assert!(has_file_ignore_directive(&comments));
}
#[test]
fn test_parse_global_ignore_directives_argument_and_explanation() {
let comments =
parse("// deno-coverage-ignore-file foo -- reason for ignoring");
assert!(has_file_ignore_directive(&comments));
}
#[test]
fn test_parse_global_ignore_directives_not_first_comment() {
let comments = parse(
r#"
// The coverage ignore file comment must be first
// deno-coverage-ignore-file
const x = 42;
"#,
);
assert!(!has_file_ignore_directive(&comments));
}
#[test]
fn test_parse_global_ignore_directives_not_before_code() {
let comments = parse(
r#"
const x = 42;
// deno-coverage-ignore-file
"#,
);
assert!(!has_file_ignore_directive(&comments));
}
#[test]
fn test_parse_global_ignore_directives_shebang() {
let comments = parse(
r#"
#!/usr/bin/env -S deno run
// deno-coverage-ignore-file
const x = 42;
"#
.trim_start(),
);
assert!(has_file_ignore_directive(&comments));
}
#[test]
fn test_parse_global_ignore_directives_shebang_no_code() {
let comments = parse(
r#"
#!/usr/bin/env -S deno run
// deno-coverage-ignore-file
"#
.trim_start(),
);
assert!(has_file_ignore_directive(&comments));
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/coverage/util.rs | cli/tools/coverage/util.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use deno_core::url::Url;
pub fn find_root(urls: Vec<&Url>) -> Option<Url> {
if urls.is_empty() {
return None;
}
// Gets the common first part of all the urls.
let root = urls[0]
.as_ref()
.chars()
.enumerate()
.take_while(|(i, c)| {
urls.iter().all(|u| u.as_ref().chars().nth(*i) == Some(*c))
})
.map(|(_, c)| c)
.collect::<String>();
if let Some(index) = root.rfind('/') {
// Removes the basename part if exists.
Url::parse(&root[..index + 1]).ok()
} else {
Url::parse(&root).ok()
}
}
pub fn percent_to_class(percent: f32) -> &'static str {
match percent {
x if x < 50.0 => "low",
x if x < 80.0 => "medium",
_ => "high",
}
}
pub fn calc_coverage_display_info(
hit: usize,
miss: usize,
) -> (usize, f32, &'static str) {
let total = hit + miss;
let percent = if total == 0 {
100.0
} else {
(hit as f32 / total as f32) * 100.0
};
let class = percent_to_class(percent);
(total, percent, class)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_find_root() {
let urls = [
Url::parse("file:///a/b/c/d/e.ts").unwrap(),
Url::parse("file:///a/b/c/d/f.ts").unwrap(),
Url::parse("file:///a/b/c/d/g.ts").unwrap(),
];
let urls = urls.iter().collect();
assert_eq!(find_root(urls), Url::parse("file:///a/b/c/d/").ok());
}
#[test]
fn test_find_root_empty() {
let urls = vec![];
assert_eq!(find_root(urls), None);
}
#[test]
fn test_find_root_with_similar_filenames() {
let urls = [
Url::parse("file:///a/b/c/d/foo0.ts").unwrap(),
Url::parse("file:///a/b/c/d/foo1.ts").unwrap(),
Url::parse("file:///a/b/c/d/foo2.ts").unwrap(),
];
let urls = urls.iter().collect();
assert_eq!(find_root(urls), Url::parse("file:///a/b/c/d/").ok());
}
#[test]
fn test_find_root_with_similar_dirnames() {
let urls = [
Url::parse("file:///a/b/c/foo0/mod.ts").unwrap(),
Url::parse("file:///a/b/c/foo1/mod.ts").unwrap(),
Url::parse("file:///a/b/c/foo2/mod.ts").unwrap(),
];
let urls = urls.iter().collect();
assert_eq!(find_root(urls), Url::parse("file:///a/b/c/").ok());
}
#[test]
fn test_percent_to_class() {
assert_eq!(percent_to_class(0.0), "low");
assert_eq!(percent_to_class(49.9), "low");
assert_eq!(percent_to_class(50.0), "medium");
assert_eq!(percent_to_class(79.9), "medium");
assert_eq!(percent_to_class(80.0), "high");
assert_eq!(percent_to_class(100.0), "high");
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/coverage/mod.rs | cli/tools/coverage/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::fs;
use std::fs::File;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::MediaType;
use deno_ast::ModuleKind;
use deno_ast::ModuleSpecifier;
use deno_config::glob::FileCollector;
use deno_config::glob::FilePatterns;
use deno_config::glob::PathOrPattern;
use deno_config::glob::PathOrPatternSet;
use deno_core::anyhow::Context;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::sourcemap::SourceMap;
use deno_core::url::Url;
use deno_resolver::npm::DenoInNpmPackageChecker;
use node_resolver::InNpmPackageChecker;
use regex::Regex;
use reporter::CoverageReporter;
use text_lines::TextLines;
use self::ignore_directives::has_file_ignore_directive;
use self::ignore_directives::lex_comments;
use self::ignore_directives::parse_next_ignore_directives;
use self::ignore_directives::parse_range_ignore_directives;
use crate::args::CliOptions;
use crate::args::FileFlags;
use crate::args::Flags;
use crate::cdp;
use crate::factory::CliFactory;
use crate::file_fetcher::TextDecodedFile;
use crate::sys::CliSys;
use crate::tools::test::is_supported_test_path;
use crate::util::text_encoding::source_map_from_code;
mod ignore_directives;
mod merge;
mod range_tree;
pub mod reporter;
mod util;
use merge::ProcessCoverage;
#[derive(Debug, Clone)]
struct BranchCoverageItem {
line_index: usize,
block_number: usize,
branch_number: usize,
taken: Option<i64>,
is_hit: bool,
}
#[derive(Debug, Clone)]
struct FunctionCoverageItem {
name: String,
line_index: usize,
execution_count: i64,
}
#[derive(Debug, Clone)]
pub struct CoverageReport {
url: ModuleSpecifier,
named_functions: Vec<FunctionCoverageItem>,
branches: Vec<BranchCoverageItem>,
/// (line_index, number_of_hits)
found_lines: Vec<(usize, i64)>,
output: Option<PathBuf>,
}
struct GenerateCoverageReportOptions<'a> {
script_module_specifier: Url,
script_media_type: MediaType,
script_coverage: &'a cdp::ScriptCoverage,
script_original_source: String,
script_runtime_source: String,
maybe_source_map: &'a Option<Vec<u8>>,
output: &'a Option<PathBuf>,
}
fn generate_coverage_report(
options: GenerateCoverageReportOptions,
) -> Result<CoverageReport, AnyError> {
let original_comments =
lex_comments(&options.script_original_source, options.script_media_type);
let url = Url::parse(&options.script_coverage.url).unwrap();
if has_file_ignore_directive(&original_comments) {
return Ok(CoverageReport {
url,
named_functions: Vec::new(),
branches: Vec::new(),
found_lines: Vec::new(),
output: options.output.clone(),
});
}
let maybe_source_map = options
.maybe_source_map
.as_ref()
.map(|source_map| SourceMap::from_slice(source_map).unwrap());
let mut coverage_report = CoverageReport {
url,
named_functions: Vec::with_capacity(
options
.script_coverage
.functions
.iter()
.filter(|f| !f.function_name.is_empty())
.count(),
),
branches: Vec::new(),
found_lines: Vec::new(),
output: options.output.clone(),
};
let original_text_lines = TextLines::new(&options.script_original_source);
let coverage_ignore_next_directives = parse_next_ignore_directives(
&original_comments.comments,
&original_text_lines,
);
let coverage_ignore_range_directives = parse_range_ignore_directives(
&options.script_module_specifier,
&original_comments.comments,
&original_text_lines,
);
let runtime_comments =
lex_comments(&options.script_runtime_source, MediaType::JavaScript);
let runtime_text_lines = TextLines::new(&options.script_runtime_source);
for function in &options.script_coverage.functions {
if function.function_name.is_empty() {
continue;
}
let line_index = range_to_src_line_index(
&function.ranges[0],
&runtime_text_lines,
&maybe_source_map,
);
if line_index > 0
&& coverage_ignore_next_directives.contains(&(line_index - 1_usize))
{
continue;
}
if coverage_ignore_range_directives.iter().any(|range| {
range.start_line_index <= line_index
&& range.stop_line_index >= line_index
}) {
continue;
}
coverage_report.named_functions.push(FunctionCoverageItem {
name: function.function_name.clone(),
line_index,
execution_count: function.ranges[0].count,
});
}
for (block_number, function) in
options.script_coverage.functions.iter().enumerate()
{
let block_hits = function.ranges[0].count;
for (branch_number, range) in function.ranges[1..].iter().enumerate() {
let line_index =
range_to_src_line_index(range, &runtime_text_lines, &maybe_source_map);
if line_index > 0
&& coverage_ignore_next_directives.contains(&(line_index - 1_usize))
{
continue;
}
if coverage_ignore_range_directives.iter().any(|range| {
range.start_line_index <= line_index
&& range.stop_line_index >= line_index
}) {
continue;
}
// From https://manpages.debian.org/unstable/lcov/geninfo.1.en.html:
//
// Block number and branch number are gcc internal IDs for the branch. Taken is either '-'
// if the basic block containing the branch was never executed or a number indicating how
// often that branch was taken.
//
// However with the data we get from v8 coverage profiles it seems we can't actually hit
// this as appears it won't consider any nested branches it hasn't seen but its here for
// the sake of accuracy.
let taken = if block_hits > 0 {
Some(range.count)
} else {
None
};
coverage_report.branches.push(BranchCoverageItem {
line_index,
block_number,
branch_number,
taken,
is_hit: range.count > 0,
})
}
}
// TODO(caspervonb): collect uncovered ranges on the lines so that we can highlight specific
// parts of a line in color (word diff style) instead of the entire line.
let mut line_counts = Vec::with_capacity(runtime_text_lines.lines_count());
for line_index in 0..runtime_text_lines.lines_count() {
let (line_start_byte_offset, line_end_byte_offset) =
runtime_text_lines.line_range(line_index);
let line_start_char_offset =
runtime_text_lines.char_index(line_start_byte_offset);
let line_end_char_offset =
runtime_text_lines.char_index(line_end_byte_offset);
let ignore = runtime_comments.comments.iter().any(|comment| {
comment.range.start <= line_start_byte_offset
&& comment.range.end >= line_end_byte_offset
}) || options.script_runtime_source
[line_start_byte_offset..line_end_byte_offset]
.trim()
.is_empty();
let mut count = 0;
if ignore {
count = 1;
} else {
// Count the hits of ranges that include the entire line which will always be at-least one
// as long as the code has been evaluated.
for function in &options.script_coverage.functions {
for range in &function.ranges {
if range.start_char_offset <= line_start_char_offset
&& range.end_char_offset >= line_end_char_offset
{
count += range.count;
}
}
}
// We reset the count if any block with a zero count overlaps with the line range.
for function in &options.script_coverage.functions {
for range in &function.ranges {
if range.count > 0 {
continue;
}
let overlaps = range.start_char_offset < line_end_char_offset
&& range.end_char_offset > line_start_char_offset;
if overlaps {
count = 0;
}
}
}
}
line_counts.push(count);
}
let found_lines_coverage_filter = |(line, _): &(usize, i64)| -> bool {
if coverage_ignore_range_directives.iter().any(|range| {
range.start_line_index <= *line && range.stop_line_index >= *line
}) {
return false;
}
if coverage_ignore_next_directives.contains(line) {
return false;
}
if *line == 0_usize {
return true;
}
if coverage_ignore_next_directives.contains(&(line - 1_usize)) {
return false;
}
true
};
coverage_report.found_lines =
if let Some(source_map) = maybe_source_map.as_ref() {
let script_runtime_source_lines =
options.script_runtime_source.lines().collect::<Vec<_>>();
let mut found_lines = line_counts
.iter()
.enumerate()
.flat_map(|(index, count)| {
// get all the mappings from this destination line to a different src line
let mut results = source_map
.tokens()
.filter(|token| {
let dst_line = token.get_dst_line() as usize;
dst_line == index && {
let dst_col = token.get_dst_col() as usize;
let content = script_runtime_source_lines
.get(dst_line)
.and_then(|line| {
line.get(dst_col..std::cmp::min(dst_col + 2, line.len()))
})
.unwrap_or("");
!content.is_empty()
&& content != "/*"
&& content != "*/"
&& content != "//"
}
})
.map(move |token| (token.get_src_line() as usize, *count))
.collect::<Vec<_>>();
// only keep the results that point at different src lines
results.sort_unstable_by_key(|(index, _)| *index);
results.dedup_by_key(|(index, _)| *index);
results.into_iter()
})
.filter(found_lines_coverage_filter)
.collect::<Vec<(usize, i64)>>();
found_lines.sort_unstable_by_key(|(index, _)| *index);
// combine duplicated lines
for i in (1..found_lines.len()).rev() {
if found_lines[i].0 == found_lines[i - 1].0 {
found_lines[i - 1].1 += found_lines[i].1;
found_lines.remove(i);
}
}
found_lines
} else {
line_counts
.into_iter()
.enumerate()
.filter(found_lines_coverage_filter)
.collect::<Vec<(usize, i64)>>()
};
Ok(coverage_report)
}
fn range_to_src_line_index(
range: &cdp::CoverageRange,
text_lines: &TextLines,
maybe_source_map: &Option<SourceMap>,
) -> usize {
let source_lc = text_lines.line_and_column_index(
text_lines.byte_index_from_char_index(range.start_char_offset),
);
if let Some(source_map) = maybe_source_map.as_ref() {
source_map
.lookup_token(source_lc.line_index as u32, source_lc.column_index as u32)
.map(|token| token.get_src_line() as usize)
.unwrap_or(0)
} else {
source_lc.line_index
}
}
fn collect_coverages(
cli_options: &CliOptions,
files: FileFlags,
initial_cwd: &Path,
) -> Result<Vec<cdp::ScriptCoverage>, AnyError> {
let mut coverages: Vec<cdp::ScriptCoverage> = Vec::new();
let file_patterns = FilePatterns {
base: initial_cwd.to_path_buf(),
include: Some({
if files.include.is_empty() {
PathOrPatternSet::new(vec![PathOrPattern::Path(
initial_cwd.to_path_buf(),
)])
} else {
PathOrPatternSet::from_include_relative_path_or_patterns(
initial_cwd,
&files.include,
)?
}
}),
exclude: PathOrPatternSet::new(vec![]),
};
let file_paths = FileCollector::new(|e| {
e.path.extension().map(|ext| ext == "json").unwrap_or(false)
})
.ignore_git_folder()
.ignore_node_modules()
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
.collect_file_patterns(&CliSys::default(), &file_patterns);
let coverage_patterns = FilePatterns {
base: initial_cwd.to_path_buf(),
include: None,
exclude: PathOrPatternSet::from_exclude_relative_path_or_patterns(
initial_cwd,
&files.ignore,
)
.context("Invalid ignore pattern.")?,
};
for file_path in file_paths {
let new_coverage = fs::read_to_string(file_path.as_path())
.map_err(AnyError::from)
.and_then(|json| {
serde_json::from_str::<cdp::ScriptCoverage>(&json)
.map_err(AnyError::from)
})
.with_context(|| format!("Failed reading '{}'", file_path.display()))?;
let url = Url::parse(&new_coverage.url)?;
if coverage_patterns.matches_specifier(&url) {
coverages.push(new_coverage);
}
}
coverages.sort_by_key(|k| k.url.clone());
Ok(coverages)
}
fn filter_coverages(
coverages: Vec<cdp::ScriptCoverage>,
include: Vec<String>,
exclude: Vec<String>,
in_npm_pkg_checker: &DenoInNpmPackageChecker,
) -> Vec<cdp::ScriptCoverage> {
let include: Vec<Regex> =
include.iter().map(|e| Regex::new(e).unwrap()).collect();
let exclude: Vec<Regex> =
exclude.iter().map(|e| Regex::new(e).unwrap()).collect();
// Matches virtual file paths for doc testing
// e.g. file:///path/to/mod.ts$23-29.ts
let doc_test_re =
Regex::new(r"\$\d+-\d+\.(js|mjs|cjs|jsx|ts|mts|cts|tsx)$").unwrap();
coverages
.into_iter()
.filter(|e| {
let is_internal = e.url.starts_with("ext:")
|| e.url.starts_with("data:")
|| e.url.starts_with("blob:")
|| e.url.ends_with("__anonymous__")
|| e.url.ends_with("$deno$test.mjs")
|| e.url.contains("/$deno$stdin.")
|| e.url.ends_with(".snap")
|| is_supported_test_path(Path::new(e.url.as_str()))
|| doc_test_re.is_match(e.url.as_str())
|| Url::parse(&e.url)
.ok()
.map(|url| in_npm_pkg_checker.in_npm_package(&url))
.unwrap_or(false);
let is_included = include.iter().any(|p| p.is_match(&e.url));
let is_excluded = exclude.iter().any(|p| p.is_match(&e.url));
(include.is_empty() || is_included) && !is_excluded && !is_internal
})
.collect::<Vec<cdp::ScriptCoverage>>()
}
pub fn cover_files(
flags: Arc<Flags>,
files_include: Vec<String>,
files_ignore: Vec<String>,
include: Vec<String>,
exclude: Vec<String>,
output: Option<String>,
reporters: &[&dyn CoverageReporter],
) -> Result<(), AnyError> {
if files_include.is_empty() {
return Err(anyhow!("No matching coverage profiles found"));
}
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let in_npm_pkg_checker = factory.in_npm_pkg_checker()?;
let file_fetcher = factory.file_fetcher()?;
let emitter = factory.emitter()?;
let cjs_tracker = factory.cjs_tracker()?;
// Use the first include path as the default output path.
let coverage_root = cli_options.initial_cwd().join(&files_include[0]);
let script_coverages = collect_coverages(
cli_options,
FileFlags {
include: files_include,
ignore: files_ignore,
},
cli_options.initial_cwd(),
)?;
if script_coverages.is_empty() {
return Err(anyhow!("No coverage files found"));
}
let script_coverages =
filter_coverages(script_coverages, include, exclude, in_npm_pkg_checker);
if script_coverages.is_empty() {
return Err(anyhow!("No covered files included in the report"));
}
let proc_coverages: Vec<_> = script_coverages
.into_iter()
.map(|cov| ProcessCoverage { result: vec![cov] })
.collect();
let script_coverages = if let Some(c) = merge::merge_processes(proc_coverages)
{
c.result
} else {
vec![]
};
let out_mode = match output {
Some(ref path) => match File::create(path) {
Ok(_) => Some(PathBuf::from(path)),
Err(e) => {
return Err(anyhow!("Failed to create output file: {}", e));
}
},
None => None,
};
let get_message = |specifier: &ModuleSpecifier| -> String {
format!(
"Failed to fetch \"{}\" from cache. Before generating coverage report, run `deno test --coverage` to ensure consistent state.",
specifier,
)
};
let mut file_reports = Vec::with_capacity(script_coverages.len());
for script_coverage in script_coverages {
let module_specifier = deno_path_util::resolve_url_or_path(
&script_coverage.url,
cli_options.initial_cwd(),
)?;
let maybe_file_result =
file_fetcher.get_cached_source_or_local(&module_specifier);
let file = match maybe_file_result {
Ok(Some(file)) => TextDecodedFile::decode(file)?,
Ok(None) => return Err(anyhow!("{}", get_message(&module_specifier))),
Err(err) => return Err(err).context(get_message(&module_specifier)),
};
let original_source = file.source.clone();
// Check if file was transpiled
let transpiled_code = match file.media_type {
MediaType::JavaScript
| MediaType::Unknown
| MediaType::Css
| MediaType::Html
| MediaType::Sql
| MediaType::Wasm
| MediaType::Cjs
| MediaType::Mjs
| MediaType::Json
| MediaType::Jsonc
| MediaType::Json5 => None,
MediaType::Dts | MediaType::Dmts | MediaType::Dcts => Some(String::new()),
MediaType::TypeScript
| MediaType::Jsx
| MediaType::Mts
| MediaType::Cts
| MediaType::Tsx => {
let module_kind = ModuleKind::from_is_cjs(
cjs_tracker.is_maybe_cjs(&file.specifier, file.media_type)?,
);
Some(match emitter.maybe_cached_emit(&file.specifier, module_kind, &file.source)? {
Some(code) => code,
None => {
return Err(anyhow!(
"Missing transpiled source code for: \"{}\".
Before generating coverage report, run `deno test --coverage` to ensure consistent state.",
file.specifier,
))
}
})
}
MediaType::SourceMap => {
unreachable!()
}
};
let runtime_code: String = match transpiled_code {
Some(code) => code,
None => original_source.to_string(),
};
let source_map = source_map_from_code(runtime_code.as_bytes());
let coverage_report =
generate_coverage_report(GenerateCoverageReportOptions {
script_module_specifier: module_specifier.clone(),
script_media_type: file.media_type,
script_coverage: &script_coverage,
script_original_source: original_source.to_string(),
script_runtime_source: runtime_code.as_str().to_owned(),
maybe_source_map: &source_map,
output: &out_mode,
})
.with_context(|| {
format!(
"Failed to generate coverage report for file ({module_specifier})"
)
})?;
if !coverage_report.found_lines.is_empty() {
file_reports.push((coverage_report, original_source.to_string()));
}
}
// All covered files, might have had ignore directive and we can end up
// with no reports at this point.
if file_reports.is_empty() {
return Err(anyhow!("No covered files included in the report"));
}
for reporter in reporters {
reporter.done(&coverage_root, &file_reports);
}
Ok(())
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/coverage/reporter.rs | cli/tools/coverage/reporter.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap;
use std::fs;
use std::fs::File;
use std::io::Error;
use std::io::Write;
use std::io::{self};
use std::path::Path;
use std::path::PathBuf;
use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_lib::version::DENO_VERSION_INFO;
use super::CoverageReport;
use super::util;
use crate::args::CoverageType;
use crate::colors;
#[derive(Default, Debug)]
pub struct CoverageStats<'a> {
pub line_hit: usize,
pub line_miss: usize,
pub branch_hit: usize,
pub branch_miss: usize,
pub parent: Option<String>,
pub file_text: Option<String>,
pub report: Option<&'a CoverageReport>,
}
type CoverageSummary<'a> = HashMap<String, CoverageStats<'a>>;
pub fn create(kind: CoverageType) -> Box<dyn CoverageReporter + Send> {
match kind {
CoverageType::Summary => Box::new(SummaryCoverageReporter::new()),
CoverageType::Lcov => Box::new(LcovCoverageReporter::new()),
CoverageType::Detailed => Box::new(DetailedCoverageReporter::new()),
CoverageType::Html => Box::new(HtmlCoverageReporter::new()),
}
}
pub trait CoverageReporter {
fn done(
&self,
coverage_root: &Path,
file_reports: &[(CoverageReport, String)],
);
/// Collects the coverage summary of each file or directory.
fn collect_summary<'a>(
&'a self,
file_reports: &'a [(CoverageReport, String)],
) -> CoverageSummary<'a> {
let urls = file_reports.iter().map(|rep| &rep.0.url).collect();
let root = match util::find_root(urls)
.and_then(|root_path| root_path.to_file_path().ok())
{
Some(path) => path,
None => return HashMap::new(),
};
// summary by file or directory
// tuple of (line hit, line miss, branch hit, branch miss, parent)
let mut summary = HashMap::new();
summary.insert("".to_string(), CoverageStats::default()); // root entry
for (report, file_text) in file_reports {
let path = report.url.to_file_path().unwrap();
let relative_path = path.strip_prefix(&root).unwrap();
let mut file_text = Some(file_text.to_string());
let mut summary_path = Some(relative_path);
// From leaf to root, adds up the coverage stats
while let Some(path) = summary_path {
let path_str = path.to_str().unwrap().to_string();
let parent = path
.parent()
.and_then(|p| p.to_str())
.map(|p| p.to_string());
let stats = summary.entry(path_str).or_insert(CoverageStats {
parent,
file_text,
report: Some(report),
..CoverageStats::default()
});
stats.line_hit += report
.found_lines
.iter()
.filter(|(_, count)| *count > 0)
.count();
stats.line_miss += report
.found_lines
.iter()
.filter(|(_, count)| *count == 0)
.count();
stats.branch_hit += report.branches.iter().filter(|b| b.is_hit).count();
stats.branch_miss +=
report.branches.iter().filter(|b| !b.is_hit).count();
file_text = None;
summary_path = path.parent();
}
}
summary
}
}
pub struct SummaryCoverageReporter {}
#[allow(clippy::print_stdout)]
impl SummaryCoverageReporter {
pub fn new() -> SummaryCoverageReporter {
SummaryCoverageReporter {}
}
fn print_coverage_line(
&self,
node: &str,
node_max: usize,
stats: &CoverageStats,
) {
let CoverageStats {
line_hit,
line_miss,
branch_hit,
branch_miss,
..
} = stats;
let (_, line_percent, line_class) =
util::calc_coverage_display_info(*line_hit, *line_miss);
let (_, branch_percent, branch_class) =
util::calc_coverage_display_info(*branch_hit, *branch_miss);
let file_name = format!(
"{node:node_max$}",
node = node.replace('\\', "/"),
node_max = node_max
);
let file_name = if line_class == "high" {
format!("{}", colors::green(&file_name))
} else if line_class == "medium" {
format!("{}", colors::yellow(&file_name))
} else {
format!("{}", colors::red(&file_name))
};
let branch_percent = if branch_class == "high" {
format!("{}", colors::green(&format!("{:>8.1}", branch_percent)))
} else if branch_class == "medium" {
format!("{}", colors::yellow(&format!("{:>8.1}", branch_percent)))
} else {
format!("{}", colors::red(&format!("{:>8.1}", branch_percent)))
};
let line_percent = if line_class == "high" {
format!("{}", colors::green(&format!("{:>6.1}", line_percent)))
} else if line_class == "medium" {
format!("{}", colors::yellow(&format!("{:>6.1}", line_percent)))
} else {
format!("{}", colors::red(&format!("{:>6.1}", line_percent)))
};
println!(
"| {file_name} | {branch_percent} | {line_percent} |",
file_name = file_name,
branch_percent = branch_percent,
line_percent = line_percent,
);
}
}
#[allow(clippy::print_stdout)]
impl CoverageReporter for SummaryCoverageReporter {
fn done(
&self,
_coverage_root: &Path,
file_reports: &[(CoverageReport, String)],
) {
let summary = self.collect_summary(file_reports);
let root_stats = summary.get("").unwrap();
let mut entries = summary
.iter()
.filter(|(_, stats)| stats.file_text.is_some())
.collect::<Vec<_>>();
entries.sort_by_key(|(node, _)| node.to_owned());
let node_max = entries
.iter()
.map(|(node, _)| node.len())
.max()
.unwrap()
.max("All files".len());
let header =
format!("| {node:node_max$} | Branch % | Line % |", node = "File");
let separator = format!(
"| {} | {} | {} |",
"-".repeat(node_max),
"-".repeat(8),
"-".repeat(6)
);
println!("{}", header);
println!("{}", separator);
entries.iter().for_each(|(node, stats)| {
self.print_coverage_line(node, node_max, stats);
});
self.print_coverage_line("All files", node_max, root_stats);
}
}
pub struct LcovCoverageReporter {}
impl CoverageReporter for LcovCoverageReporter {
fn done(
&self,
_coverage_root: &Path,
file_reports: &[(CoverageReport, String)],
) {
file_reports.iter().for_each(|(report, file_text)| {
self.report(report, file_text).unwrap();
});
if let Some((report, _)) = file_reports.first()
&& let Some(ref output) = report.output
{
if let Ok(path) = output.canonicalize() {
let url = Url::from_file_path(path).unwrap();
log::info!("Lcov coverage report has been generated at {}", url);
} else {
log::error!(
"Failed to resolve the output path of Lcov report: {}",
output.display()
);
}
}
}
}
impl LcovCoverageReporter {
pub fn new() -> LcovCoverageReporter {
LcovCoverageReporter {}
}
fn report(
&self,
coverage_report: &CoverageReport,
_file_text: &str,
) -> Result<(), AnyError> {
// pipes output to stdout if no file is specified
let out_mode: Result<Box<dyn Write>, Error> = match coverage_report.output {
// only append to the file as the file should be created already
Some(ref path) => File::options()
.append(true)
.open(path)
.map(|f| Box::new(f) as Box<dyn Write>),
None => Ok(Box::new(io::stdout())),
};
let mut out_writer = out_mode?;
let file_path = coverage_report
.url
.to_file_path()
.ok()
.and_then(|p| p.to_str().map(|p| p.to_string()))
.unwrap_or_else(|| coverage_report.url.to_string());
writeln!(out_writer, "SF:{file_path}")?;
for function in &coverage_report.named_functions {
writeln!(
out_writer,
"FN:{},{}",
function.line_index + 1,
function.name
)?;
}
for function in &coverage_report.named_functions {
writeln!(
out_writer,
"FNDA:{},{}",
function.execution_count, function.name
)?;
}
let functions_found = coverage_report.named_functions.len();
writeln!(out_writer, "FNF:{functions_found}")?;
let functions_hit = coverage_report
.named_functions
.iter()
.filter(|f| f.execution_count > 0)
.count();
writeln!(out_writer, "FNH:{functions_hit}")?;
for branch in &coverage_report.branches {
let taken = if let Some(taken) = &branch.taken {
taken.to_string()
} else {
"-".to_string()
};
writeln!(
out_writer,
"BRDA:{},{},{},{}",
branch.line_index + 1,
branch.block_number,
branch.branch_number,
taken
)?;
}
let branches_found = coverage_report.branches.len();
writeln!(out_writer, "BRF:{branches_found}")?;
let branches_hit =
coverage_report.branches.iter().filter(|b| b.is_hit).count();
writeln!(out_writer, "BRH:{branches_hit}")?;
for (index, count) in &coverage_report.found_lines {
writeln!(out_writer, "DA:{},{}", index + 1, count)?;
}
let lines_hit = coverage_report
.found_lines
.iter()
.filter(|(_, count)| *count != 0)
.count();
writeln!(out_writer, "LH:{lines_hit}")?;
let lines_found = coverage_report.found_lines.len();
writeln!(out_writer, "LF:{lines_found}")?;
writeln!(out_writer, "end_of_record")?;
Ok(())
}
}
struct DetailedCoverageReporter {}
impl CoverageReporter for DetailedCoverageReporter {
fn done(
&self,
_coverage_root: &Path,
file_reports: &[(CoverageReport, String)],
) {
file_reports.iter().for_each(|(report, file_text)| {
self.report(report, file_text).unwrap();
});
}
}
#[allow(clippy::print_stdout)]
impl DetailedCoverageReporter {
pub fn new() -> DetailedCoverageReporter {
DetailedCoverageReporter {}
}
fn report(
&self,
coverage_report: &CoverageReport,
file_text: &str,
) -> Result<(), AnyError> {
let lines = file_text.split('\n').collect::<Vec<_>>();
print!("cover {} ... ", coverage_report.url);
let hit_lines = coverage_report
.found_lines
.iter()
.filter(|(_, count)| *count > 0)
.map(|(index, _)| *index);
let missed_lines = coverage_report
.found_lines
.iter()
.filter(|(_, count)| *count == 0)
.map(|(index, _)| *index);
let lines_found = coverage_report.found_lines.len();
let lines_hit = hit_lines.count();
let line_ratio = lines_hit as f32 / lines_found as f32;
let line_coverage =
format!("{:.3}% ({}/{})", line_ratio * 100.0, lines_hit, lines_found);
if line_ratio >= 0.9 {
println!("{}", colors::green(&line_coverage));
} else if line_ratio >= 0.75 {
println!("{}", colors::yellow(&line_coverage));
} else {
println!("{}", colors::red(&line_coverage));
}
let mut last_line = None;
for line_index in missed_lines {
const WIDTH: usize = 4;
const SEPARATOR: &str = "|";
// Put a horizontal separator between disjoint runs of lines
if let Some(last_line) = last_line
&& last_line + 1 != line_index
{
let dash = colors::gray("-".repeat(WIDTH + 1));
println!("{}{}{}", dash, colors::gray(SEPARATOR), dash);
}
println!(
"{:width$} {} {}",
line_index + 1,
colors::gray(SEPARATOR),
colors::red(&lines[line_index]),
width = WIDTH
);
last_line = Some(line_index);
}
Ok(())
}
}
pub struct HtmlCoverageReporter {}
impl CoverageReporter for HtmlCoverageReporter {
fn done(
&self,
coverage_root: &Path,
file_reports: &[(CoverageReport, String)],
) {
let summary = self.collect_summary(file_reports);
let now = chrono::Utc::now().to_rfc2822();
for (node, stats) in &summary {
let report_path =
self.get_report_path(coverage_root, node, stats.file_text.is_none());
let main_content = if let Some(file_text) = &stats.file_text {
self.create_html_code_table(file_text, stats.report.unwrap())
} else {
self.create_html_summary_table(node, &summary)
};
let is_dir = stats.file_text.is_none();
let html = self.create_html(node, is_dir, stats, &now, &main_content);
fs::create_dir_all(report_path.parent().unwrap()).unwrap();
fs::write(report_path, html).unwrap();
}
let root_report = Url::from_file_path(
coverage_root
.join("html")
.join("index.html")
.canonicalize()
.unwrap(),
)
.unwrap();
log::info!("HTML coverage report has been generated at {}", root_report);
}
}
impl HtmlCoverageReporter {
pub fn new() -> HtmlCoverageReporter {
HtmlCoverageReporter {}
}
/// Gets the report path for a single file
pub fn get_report_path(
&self,
coverage_root: &Path,
node: &str,
is_dir: bool,
) -> PathBuf {
if is_dir {
// e.g. /path/to/coverage/html/src/index.html
coverage_root.join("html").join(node).join("index.html")
} else {
// e.g. /path/to/coverage/html/src/main.ts.html
Path::new(&format!(
"{}.html",
coverage_root.join("html").join(node).to_str().unwrap()
))
.to_path_buf()
}
}
/// Creates single page of html report.
pub fn create_html(
&self,
node: &str,
is_dir: bool,
stats: &CoverageStats,
timestamp: &str,
main_content: &str,
) -> String {
let title = if node.is_empty() {
"Coverage report for all files".to_string()
} else {
let node = if is_dir {
format!("{}/", node)
} else {
node.to_string()
};
format!("Coverage report for {node}")
};
let title = title.replace(std::path::MAIN_SEPARATOR, "/");
let breadcrumbs_parts = node
.split(std::path::MAIN_SEPARATOR)
.filter(|s| !s.is_empty())
.collect::<Vec<_>>();
let head = self.create_html_head(&title);
let breadcrumb_navigation =
self.create_breadcrumbs_navigation(&breadcrumbs_parts, is_dir);
let header = self.create_html_header(&breadcrumb_navigation, stats);
let footer = self.create_html_footer(timestamp);
format!(
"<!doctype html>
<html lang='en-US'>
{head}
<body>
<div class='wrapper'>
{header}
<div class='pad1 overflow-auto'>
{main_content}
</div>
<div class='push'></div>
</div>
{footer}
</body>
</html>"
)
}
/// Creates <head> tag for html report.
pub fn create_html_head(&self, title: &str) -> String {
let style_css = include_str!("style.css");
let script = include_str!("script.js");
format!(
"
<head>
<meta charset='utf-8'>
<title>{title}</title>
<style>{style_css}</style>
<script>{script}</script>
<meta name='viewport' content='width=device-width, initial-scale=1'>
</head>"
)
}
/// Creates header part of the contents for html report.
pub fn create_html_header(
&self,
breadcrumb_navigation: &str,
stats: &CoverageStats,
) -> String {
let CoverageStats {
line_hit,
line_miss,
branch_hit,
branch_miss,
..
} = stats;
let (line_total, line_percent, line_class) =
util::calc_coverage_display_info(*line_hit, *line_miss);
let (branch_total, branch_percent, _) =
util::calc_coverage_display_info(*branch_hit, *branch_miss);
let moon_svg = include_str!("moon.svg");
let sun_svg = include_str!("sun.svg");
format!(
r#"
<div class='pad1 flex-header'>
<div>
<h1>{breadcrumb_navigation}</h1>
<div class='clearfix'>
<div class='fl pad1y space-right2'>
<span class='strong'>{branch_percent:.2}%</span>
<span class='quiet'>Branches</span>
<span class='fraction'>{branch_hit}/{branch_total}</span>
</div>
<div class='fl pad1y space-right2'>
<span class='strong'>{line_percent:.2}%</span>
<span class='quiet'>Lines</span>
<span class='fraction'>{line_hit}/{line_total}</span>
</div>
</div>
</div>
<button id="theme-toggle" type="button" aria-label="Toggle dark mode" style="display: none;">
{moon_svg}
{sun_svg}
</button>
</div>
<div class='status-line {line_class}'></div>"#
)
}
/// Creates footer part of the contents for html report.
pub fn create_html_footer(&self, now: &str) -> String {
let version = DENO_VERSION_INFO.deno;
format!(
"
<div class='footer quiet pad2 space-top1 center small'>
Code coverage generated by
<a href='https://deno.com/' target='_blank'>Deno v{version}</a>
at {now}
</div>"
)
}
/// Creates <table> of summary for html report.
pub fn create_html_summary_table(
&self,
node: &String,
summary: &CoverageSummary,
) -> String {
let mut children = summary
.iter()
.filter(|(_, stats)| stats.parent.as_ref() == Some(node))
.map(|(k, stats)| (stats.file_text.is_some(), k.clone()))
.collect::<Vec<_>>();
// Sort directories first, then files
children.sort();
let table_rows: Vec<String> = children.iter().map(|(is_file, c)| {
let CoverageStats { line_hit, line_miss, branch_hit, branch_miss, .. } =
summary.get(c).unwrap();
let (line_total, line_percent, line_class) =
util::calc_coverage_display_info(*line_hit, *line_miss);
let (branch_total, branch_percent, branch_class) =
util::calc_coverage_display_info(*branch_hit, *branch_miss);
let path = Path::new(c.strip_prefix(&format!("{node}{}", std::path::MAIN_SEPARATOR)).unwrap_or(c)).to_str().unwrap();
let path = path.replace(std::path::MAIN_SEPARATOR, "/");
let path_label = if *is_file { path.to_string() } else { format!("{}/", path) };
let path_link = if *is_file { format!("{}.html", path) } else { format!("{}index.html", path_label) };
format!("
<tr>
<td class='file {line_class}'><a href='{path_link}'>{path_label}</a></td>
<td class='pic {line_class}'>
<div class='chart'>
<div class='cover-fill' style='width: {line_percent:.1}%'></div><div class='cover-empty' style='width: calc(100% - {line_percent:.1}%)'></div>
</div>
</td>
<td class='pct {branch_class}'>{branch_percent:.2}%</td>
<td class='abs {branch_class}'>{branch_hit}/{branch_total}</td>
<td class='pct {line_class}'>{line_percent:.2}%</td>
<td class='abs {line_class}'>{line_hit}/{line_total}</td>
</tr>")}).collect();
let table_rows = table_rows.join("\n");
format!(
"
<table class='coverage-summary'>
<thead>
<tr>
<th class='file'>File</th>
<th class='pic'></th>
<th class='pct'>Branches</th>
<th class='abs'></th>
<th class='pct'>Lines</th>
<th class='abs'></th>
</tr>
</thead>
<tbody>
{table_rows}
</tbody>
</table>"
)
}
/// Creates <table> of single file code coverage.
pub fn create_html_code_table(
&self,
file_text: &str,
report: &CoverageReport,
) -> String {
let line_num = file_text.lines().count();
let line_count = (1..line_num + 1)
.map(|i| format!("<a href='#L{i}' id='L{i}'>{i}</a>"))
.collect::<Vec<_>>()
.join("\n");
let line_coverage = (0..line_num)
.map(|i| {
if let Some((_, count)) =
report.found_lines.iter().find(|(line, _)| i == *line)
{
if *count == 0 {
"<span class='cline-any cline-no'> </span>".to_string()
} else {
format!("<span class='cline-any cline-yes' title='This line is covered {count} time{}'>x{count}</span>", if *count > 1 { "s" } else { "" })
}
} else {
"<span class='cline-any cline-neutral'> </span>".to_string()
}
})
.collect::<Vec<_>>()
.join("\n");
let branch_coverage = (0..line_num)
.map(|i| {
let branch_is_missed = report.branches.iter().any(|b| b.line_index == i && !b.is_hit);
if branch_is_missed {
"<span class='missing-if-branch' title='branch condition is missed in this line'>I</span>".to_string()
} else {
"".to_string()
}
})
.collect::<Vec<_>>()
.join("\n");
let file_text = file_text
.replace('&', "&")
.replace('<', "<")
.replace('>', ">");
// TODO(kt3k): Add syntax highlight to source code
format!(
"<table class='coverage'>
<tr>
<td class='line-count quiet'><pre>{line_count}</pre></td>
<td class='line-coverage quiet'><pre>{line_coverage}</pre></td>
<td class='branch-coverage quiet'><pre>{branch_coverage}</pre></td>
<td class='text'><pre class='prettyprint'>{file_text}</pre></td>
</tr>
</table>"
)
}
pub fn create_breadcrumbs_navigation(
&self,
breadcrumbs_parts: &[&str],
is_dir: bool,
) -> String {
let mut breadcrumbs_html = Vec::new();
let root_repeats = if is_dir {
breadcrumbs_parts.len()
} else {
breadcrumbs_parts.len() - 1
};
let mut root_url = "../".repeat(root_repeats);
root_url += "index.html";
breadcrumbs_html.push(format!("<a href='{root_url}'>All files</a>"));
for (index, breadcrumb) in breadcrumbs_parts.iter().enumerate() {
let mut full_url = "../".repeat(breadcrumbs_parts.len() - (index + 1));
if index == breadcrumbs_parts.len() - 1 {
breadcrumbs_html.push(breadcrumb.to_string());
continue;
}
if is_dir {
full_url += "index.html";
} else {
full_url += breadcrumb;
if index != breadcrumbs_parts.len() - 1 {
full_url += "/index.html";
}
}
breadcrumbs_html.push(format!("<a href='{full_url}'>{breadcrumb}</a>"))
}
if breadcrumbs_parts.is_empty() {
return String::from("All files");
}
breadcrumbs_html.into_iter().collect::<Vec<_>>().join(" / ")
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/test/sanitizers.rs | cli/tools/test/sanitizers.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::HashMap;
use deno_core::error::CoreError;
use deno_core::stats::RuntimeActivity;
use deno_core::stats::RuntimeActivityDiff;
use deno_core::stats::RuntimeActivityStats;
use deno_core::stats::RuntimeActivityStatsFactory;
use deno_core::stats::RuntimeActivityStatsFilter;
use deno_core::stats::RuntimeActivityType;
use deno_runtime::worker::MainWorker;
use super::poll_event_loop;
/// How many times we're allowed to spin the event loop before considering something a leak.
const MAX_SANITIZER_LOOP_SPINS: usize = 16;
#[derive(Default)]
struct TopLevelSanitizerStats {
map: HashMap<(RuntimeActivityType, Cow<'static, str>), usize>,
}
fn get_sanitizer_item(
activity: RuntimeActivity,
) -> (RuntimeActivityType, Cow<'static, str>) {
let activity_type = activity.activity();
match activity {
RuntimeActivity::AsyncOp(_, _, name) => (activity_type, name.into()),
RuntimeActivity::Resource(_, _, name) => (activity_type, name.into()),
RuntimeActivity::Interval(_, _) => (activity_type, "".into()),
RuntimeActivity::Timer(_, _) => (activity_type, "".into()),
}
}
fn get_sanitizer_item_ref(
activity: &RuntimeActivity,
) -> (RuntimeActivityType, Cow<'_, str>) {
let activity_type = activity.activity();
match activity {
RuntimeActivity::AsyncOp(_, _, name) => (activity_type, (*name).into()),
RuntimeActivity::Resource(_, _, name) => (activity_type, name.into()),
RuntimeActivity::Interval(_, _) => (activity_type, "".into()),
RuntimeActivity::Timer(_, _) => (activity_type, "".into()),
}
}
pub struct TestSanitizerHelper {
activity_stats: RuntimeActivityStatsFactory,
activity_filter: RuntimeActivityStatsFilter,
top_level_sanitizer_stats: TopLevelSanitizerStats,
}
impl TestSanitizerHelper {
pub fn capture_stats(&self) -> RuntimeActivityStats {
self.activity_stats.clone().capture(&self.activity_filter)
}
}
pub fn create_test_sanitizer_helper(
worker: &mut MainWorker,
) -> TestSanitizerHelper {
let stats = worker.js_runtime.runtime_activity_stats_factory();
let ops = worker.js_runtime.op_names();
// These particular ops may start and stop independently of tests, so we just filter them out
// completely.
let op_id_host_recv_message = ops
.iter()
.position(|op| *op == "op_host_recv_message")
.unwrap();
let op_id_host_recv_ctrl = ops
.iter()
.position(|op| *op == "op_host_recv_ctrl")
.unwrap();
// For consistency between tests with and without sanitizers, we _always_ include
// the actual sanitizer capture before and after a test, but a test that ignores resource
// or op sanitization simply doesn't throw if one of these constraints is violated.
let mut filter = RuntimeActivityStatsFilter::default();
filter = filter.with_resources();
filter = filter.with_ops();
filter = filter.with_timers();
filter = filter.omit_op(op_id_host_recv_ctrl as _);
filter = filter.omit_op(op_id_host_recv_message as _);
// Count the top-level stats so we can filter them out if they complete and restart within
// a test.
let top_level_stats = stats.clone().capture(&filter);
let mut top_level = TopLevelSanitizerStats::default();
for activity in top_level_stats.dump().active {
top_level
.map
.entry(get_sanitizer_item(activity))
.and_modify(|n| *n += 1)
.or_insert(1);
}
TestSanitizerHelper {
activity_stats: stats,
activity_filter: filter,
top_level_sanitizer_stats: top_level,
}
}
/// The sanitizer must ignore ops, resources and timers that were started at the top-level, but
/// completed and restarted, replacing themselves with the same "thing". For example, if you run a
/// `Deno.serve` server at the top level and make fetch requests to it during the test, those ops
/// should not count as completed during the test because they are immediately replaced.
fn is_empty(
top_level: &TopLevelSanitizerStats,
diff: &RuntimeActivityDiff,
) -> bool {
// If the diff is empty, return empty
if diff.is_empty() {
return true;
}
// If the # of appeared != # of disappeared, we can exit fast with not empty
if diff.appeared.len() != diff.disappeared.len() {
return false;
}
// If there are no top-level ops and !diff.is_empty(), we can exit fast with not empty
if top_level.map.is_empty() {
return false;
}
// Otherwise we need to calculate replacement for top-level stats. Sanitizers will not fire
// if an op, resource or timer is replaced and has a corresponding top-level op.
let mut map = HashMap::new();
for item in &diff.appeared {
let item = get_sanitizer_item_ref(item);
let Some(n1) = top_level.map.get(&item) else {
return false;
};
let n2 = map.entry(item).and_modify(|n| *n += 1).or_insert(1);
// If more ops appeared than were created at the top-level, return false
if *n2 > *n1 {
return false;
}
}
// We know that we replaced no more things than were created at the top-level. So now we just want
// to make sure that whatever thing was created has a corresponding disappearance record.
for item in &diff.disappeared {
let item = get_sanitizer_item_ref(item);
// If more things of this type disappeared than appeared, return false
let Some(n1) = map.get_mut(&item) else {
return false;
};
*n1 -= 1;
if *n1 == 0 {
map.remove(&item);
}
}
// If everything is accounted for, we are empty
map.is_empty()
}
pub async fn wait_for_activity_to_stabilize(
worker: &mut MainWorker,
helper: &TestSanitizerHelper,
before_test_stats: RuntimeActivityStats,
sanitize_ops: bool,
sanitize_resources: bool,
) -> Result<Option<RuntimeActivityDiff>, CoreError> {
// First, check to see if there's any diff at all. If not, just continue.
let after_test_stats = helper.capture_stats();
let mut diff =
RuntimeActivityStats::diff(&before_test_stats, &after_test_stats);
if is_empty(&helper.top_level_sanitizer_stats, &diff) {
// No activity, so we return early
return Ok(None);
}
// We allow for up to MAX_SANITIZER_LOOP_SPINS to get to a point where there is no difference.
// TODO(mmastrac): We could be much smarter about this if we had the concept of "progress" in
// an event loop tick. Ideally we'd be able to tell if we were spinning and doing nothing, or
// spinning and resolving ops.
for _ in 0..MAX_SANITIZER_LOOP_SPINS {
// There was a diff, so let the event loop run once
poll_event_loop(worker).await?;
let after_test_stats = helper.capture_stats();
diff = RuntimeActivityStats::diff(&before_test_stats, &after_test_stats);
if is_empty(&helper.top_level_sanitizer_stats, &diff) {
return Ok(None);
}
}
if !sanitize_ops {
diff
.appeared
.retain(|activity| !matches!(activity, RuntimeActivity::AsyncOp(..)));
diff
.disappeared
.retain(|activity| !matches!(activity, RuntimeActivity::AsyncOp(..)));
}
if !sanitize_resources {
diff
.appeared
.retain(|activity| !matches!(activity, RuntimeActivity::Resource(..)));
diff
.disappeared
.retain(|activity| !matches!(activity, RuntimeActivity::Resource(..)));
}
// Since we don't have an option to disable timer sanitization, we use sanitize_ops == false &&
// sanitize_resources == false to disable those.
if !sanitize_ops && !sanitize_resources {
diff.appeared.retain(|activity| {
!matches!(
activity,
RuntimeActivity::Timer(..) | RuntimeActivity::Interval(..)
)
});
diff.disappeared.retain(|activity| {
!matches!(
activity,
RuntimeActivity::Timer(..) | RuntimeActivity::Interval(..)
)
});
}
Ok(if is_empty(&helper.top_level_sanitizer_stats, &diff) {
None
} else {
Some(diff)
})
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/cli/tools/test/mod.rs | cli/tools/test/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::cell::RefCell;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap;
use std::collections::HashSet;
use std::env;
use std::fmt::Write as _;
use std::future::poll_fn;
use std::io::Write;
use std::num::NonZeroUsize;
use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
use std::sync::LazyLock;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use std::task::Poll;
use std::time::Duration;
use std::time::Instant;
use deno_ast::MediaType;
use deno_cache_dir::file_fetcher::File;
use deno_config::glob::FilePatterns;
use deno_config::glob::WalkEntry;
use deno_core::ModuleSpecifier;
use deno_core::OpState;
use deno_core::PollEventLoopOptions;
use deno_core::anyhow;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::error::CoreError;
use deno_core::error::CoreErrorKind;
use deno_core::error::JsError;
use deno_core::futures::StreamExt;
use deno_core::futures::future;
use deno_core::futures::stream;
use deno_core::located_script_name;
use deno_core::serde_v8;
use deno_core::unsync::spawn;
use deno_core::unsync::spawn_blocking;
use deno_core::url::Url;
use deno_core::v8;
use deno_error::JsErrorBox;
use deno_npm_installer::graph::NpmCachingStrategy;
use deno_runtime::WorkerExecutionMode;
use deno_runtime::coverage::CoverageCollector;
use deno_runtime::deno_io::Stdio;
use deno_runtime::deno_io::StdioPipe;
use deno_runtime::deno_permissions::Permissions;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::permissions::RuntimePermissionDescriptorParser;
use deno_runtime::tokio_util::create_and_run_current_thread;
use deno_runtime::worker::MainWorker;
use indexmap::IndexMap;
use indexmap::IndexSet;
use log::Level;
use rand::SeedableRng;
use rand::rngs::SmallRng;
use rand::seq::SliceRandom;
use regex::Regex;
use serde::Deserialize;
use tokio::sync::mpsc::UnboundedSender;
use crate::args::CliOptions;
use crate::args::Flags;
use crate::args::TestFlags;
use crate::args::TestReporterConfig;
use crate::colors;
use crate::display;
use crate::factory::CliFactory;
use crate::file_fetcher::CliFileFetcher;
use crate::graph_container::CheckSpecifiersOptions;
use crate::graph_util::has_graph_root_local_dependent_changed;
use crate::ops;
use crate::sys::CliSys;
use crate::util::extract::extract_doc_tests;
use crate::util::file_watcher;
use crate::util::fs::CollectSpecifiersOptions;
use crate::util::fs::collect_specifiers;
use crate::util::path::get_extension;
use crate::util::path::is_script_ext;
use crate::util::path::matches_pattern_or_exact_path;
use crate::worker::CliMainWorkerFactory;
use crate::worker::CreateCustomWorkerError;
mod channel;
pub mod fmt;
pub mod reporters;
mod sanitizers;
pub use channel::TestEventReceiver;
pub use channel::TestEventSender;
pub use channel::TestEventWorkerSender;
pub use channel::create_single_test_event_channel;
pub use channel::create_test_event_channel;
use fmt::format_sanitizer_diff;
pub use fmt::format_test_error;
use reporters::CompoundTestReporter;
use reporters::DotTestReporter;
use reporters::JunitTestReporter;
use reporters::PrettyTestReporter;
use reporters::TapTestReporter;
use reporters::TestReporter;
use crate::tools::coverage::cover_files;
use crate::tools::coverage::reporter;
use crate::tools::test::channel::ChannelClosedError;
static SLOW_TEST_TIMEOUT: LazyLock<u64> = LazyLock::new(|| {
let base_timeout = env::var("DENO_SLOW_TEST_TIMEOUT").unwrap_or_default();
base_timeout.parse().unwrap_or(60).max(1)
});
/// The test mode is used to determine how a specifier is to be tested.
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum TestMode {
/// Test as documentation, type-checking fenced code blocks.
Documentation,
/// Test as an executable module, loading the module into the isolate and running each test it
/// defines.
Executable,
/// Test as both documentation and an executable module.
Both,
}
impl TestMode {
/// Returns `true` if the test mode indicates that code snippet extraction is
/// needed.
fn needs_test_extraction(&self) -> bool {
matches!(self, Self::Documentation | Self::Both)
}
/// Returns `true` if the test mode indicates that the test should be
/// type-checked and run.
fn needs_test_run(&self) -> bool {
matches!(self, Self::Executable | Self::Both)
}
}
#[derive(Clone, Debug, Default)]
pub struct TestFilter {
pub substring: Option<String>,
pub regex: Option<Regex>,
pub include: Option<Vec<String>>,
pub exclude: Vec<String>,
}
impl TestFilter {
pub fn includes(&self, name: &String) -> bool {
if let Some(substring) = &self.substring
&& !name.contains(substring)
{
return false;
}
if let Some(regex) = &self.regex
&& !regex.is_match(name)
{
return false;
}
if let Some(include) = &self.include
&& !include.contains(name)
{
return false;
}
if self.exclude.contains(name) {
return false;
}
true
}
pub fn from_flag(flag: &Option<String>) -> Self {
let mut substring = None;
let mut regex = None;
if let Some(flag) = flag {
if flag.starts_with('/') && flag.ends_with('/') {
let rs = flag.trim_start_matches('/').trim_end_matches('/');
regex =
Some(Regex::new(rs).unwrap_or_else(|_| Regex::new("$^").unwrap()));
} else {
substring = Some(flag.clone());
}
}
Self {
substring,
regex,
..Default::default()
}
}
}
#[derive(Debug, Clone, PartialEq, Deserialize, Eq, Hash)]
#[serde(rename_all = "camelCase")]
pub struct TestLocation {
pub file_name: String,
pub line_number: u32,
pub column_number: u32,
}
#[derive(Default)]
pub(crate) struct TestContainer {
descriptions: TestDescriptions,
test_functions: Vec<v8::Global<v8::Function>>,
test_hooks: TestHooks,
}
#[derive(Default)]
pub(crate) struct TestHooks {
pub before_all: Vec<v8::Global<v8::Function>>,
pub before_each: Vec<v8::Global<v8::Function>>,
pub after_each: Vec<v8::Global<v8::Function>>,
pub after_all: Vec<v8::Global<v8::Function>>,
}
impl TestContainer {
pub fn register(
&mut self,
description: TestDescription,
function: v8::Global<v8::Function>,
) {
self.descriptions.tests.insert(description.id, description);
self.test_functions.push(function)
}
pub fn register_hook(
&mut self,
hook_type: String,
function: v8::Global<v8::Function>,
) {
match hook_type.as_str() {
"beforeAll" => self.test_hooks.before_all.push(function),
"beforeEach" => self.test_hooks.before_each.push(function),
"afterEach" => self.test_hooks.after_each.push(function),
"afterAll" => self.test_hooks.after_all.push(function),
_ => {}
}
}
pub fn is_empty(&self) -> bool {
self.test_functions.is_empty()
}
}
#[derive(Default, Debug)]
pub struct TestDescriptions {
tests: IndexMap<usize, TestDescription>,
}
impl TestDescriptions {
pub fn len(&self) -> usize {
self.tests.len()
}
pub fn is_empty(&self) -> bool {
self.tests.is_empty()
}
}
impl<'a> IntoIterator for &'a TestDescriptions {
type Item = <&'a IndexMap<usize, TestDescription> as IntoIterator>::Item;
type IntoIter =
<&'a IndexMap<usize, TestDescription> as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
(&self.tests).into_iter()
}
}
#[derive(Debug, Clone, PartialEq, Deserialize, Eq, Hash)]
#[serde(rename_all = "camelCase")]
pub struct TestDescription {
pub id: usize,
pub name: String,
pub ignore: bool,
pub only: bool,
pub origin: String,
pub location: TestLocation,
pub sanitize_ops: bool,
pub sanitize_resources: bool,
}
/// May represent a failure of a test or test step.
#[derive(Debug, Clone, PartialEq, Deserialize, Eq, Hash)]
#[serde(rename_all = "camelCase")]
pub struct TestFailureDescription {
pub id: usize,
pub name: String,
pub origin: String,
pub location: TestLocation,
}
impl From<&TestDescription> for TestFailureDescription {
fn from(value: &TestDescription) -> Self {
Self {
id: value.id,
name: value.name.clone(),
origin: value.origin.clone(),
location: value.location.clone(),
}
}
}
#[derive(Debug, Default, Clone, PartialEq)]
pub struct TestFailureFormatOptions {
pub hide_stacktraces: bool,
pub strip_ascii_color: bool,
pub initial_cwd: Option<Url>,
}
#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Debug, Clone, PartialEq, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum TestFailure {
JsError(Box<JsError>),
FailedSteps(usize),
IncompleteSteps,
Leaked(Vec<String>, Vec<String>), // Details, trailer notes
// The rest are for steps only.
Incomplete,
OverlapsWithSanitizers(IndexSet<String>), // Long names of overlapped tests
HasSanitizersAndOverlaps(IndexSet<String>), // Long names of overlapped tests
}
impl TestFailure {
pub fn format(
&self,
options: &TestFailureFormatOptions,
) -> Cow<'static, str> {
match self {
TestFailure::JsError(js_error) => {
Cow::Owned(format_test_error(js_error, options))
}
TestFailure::FailedSteps(1) => Cow::Borrowed("1 test step failed."),
TestFailure::FailedSteps(n) => {
Cow::Owned(format!("{} test steps failed.", n))
}
TestFailure::IncompleteSteps => Cow::Borrowed(
"Completed while steps were still running. Ensure all steps are awaited with `await t.step(...)`.",
),
TestFailure::Incomplete => Cow::Borrowed(
"Didn't complete before parent. Await step with `await t.step(...)`.",
),
TestFailure::Leaked(details, trailer_notes) => {
let mut f = String::new();
write!(f, "Leaks detected:").ok();
for detail in details {
write!(f, "\n - {}", detail).ok();
}
for trailer in trailer_notes {
write!(f, "\n{}", trailer).ok();
}
Cow::Owned(f)
}
TestFailure::OverlapsWithSanitizers(long_names) => {
let mut f = String::new();
write!(f, "Started test step while another test step with sanitizers was running:").ok();
for long_name in long_names {
write!(f, "\n * {}", long_name).ok();
}
Cow::Owned(f)
}
TestFailure::HasSanitizersAndOverlaps(long_names) => {
let mut f = String::new();
write!(f, "Started test step with sanitizers while another test step was running:").ok();
for long_name in long_names {
write!(f, "\n * {}", long_name).ok();
}
Cow::Owned(f)
}
}
}
pub fn overview(&self) -> String {
match self {
TestFailure::JsError(js_error) => js_error.exception_message.clone(),
TestFailure::FailedSteps(1) => "1 test step failed".to_string(),
TestFailure::FailedSteps(n) => format!("{n} test steps failed"),
TestFailure::IncompleteSteps => {
"Completed while steps were still running".to_string()
}
TestFailure::Incomplete => "Didn't complete before parent".to_string(),
TestFailure::Leaked(_, _) => "Leaks detected".to_string(),
TestFailure::OverlapsWithSanitizers(_) => {
"Started test step while another test step with sanitizers was running"
.to_string()
}
TestFailure::HasSanitizersAndOverlaps(_) => {
"Started test step with sanitizers while another test step was running"
.to_string()
}
}
}
pub fn error_location(&self) -> Option<TestLocation> {
let TestFailure::JsError(js_error) = self else {
return None;
};
// The first line of user code comes above the test file.
// The call stack usually contains the top 10 frames, and cuts off after that.
// We need to explicitly check for the test runner here.
// - Checking for a `ext:` is not enough, since other Deno `ext:`s can appear in the call stack.
// - This check guarantees that the next frame is inside of the Deno.test(),
// and not somewhere else.
const TEST_RUNNER: &str = "ext:cli/40_test.js";
let runner_frame_index = js_error
.frames
.iter()
.position(|f| f.file_name.as_deref() == Some(TEST_RUNNER))?;
let frame = js_error
.frames
.split_at(runner_frame_index)
.0
.iter()
.rfind(|f| {
f.file_name.as_ref().is_some_and(|f| {
f.starts_with("file:") && !f.contains("node_modules")
})
})?;
let file_name = frame.file_name.as_ref()?.clone();
// Turn into zero based indices
let line_number = frame.line_number.map(|v| v - 1)? as u32;
let column_number = frame.column_number.map(|v| v - 1).unwrap_or(0) as u32;
Some(TestLocation {
file_name,
line_number,
column_number,
})
}
fn format_label(&self) -> String {
match self {
TestFailure::Incomplete => colors::gray("INCOMPLETE").to_string(),
_ => colors::red("FAILED").to_string(),
}
}
fn format_inline_summary(&self) -> Option<String> {
match self {
TestFailure::FailedSteps(1) => Some("due to 1 failed step".to_string()),
TestFailure::FailedSteps(n) => Some(format!("due to {} failed steps", n)),
TestFailure::IncompleteSteps => {
Some("due to incomplete steps".to_string())
}
_ => None,
}
}
fn hide_in_summary(&self) -> bool {
// These failure variants are hidden in summaries because they are caused
// by child errors that will be summarized separately.
matches!(
self,
TestFailure::FailedSteps(_) | TestFailure::IncompleteSteps
)
}
}
#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Debug, Clone, PartialEq, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum TestResult {
Ok,
Ignored,
Failed(TestFailure),
Cancelled,
}
#[derive(Debug, Clone, Eq, PartialEq, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TestStepDescription {
pub id: usize,
pub name: String,
pub origin: String,
pub location: TestLocation,
pub level: usize,
pub parent_id: usize,
pub root_id: usize,
pub root_name: String,
}
#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Debug, Clone, PartialEq, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum TestStepResult {
Ok,
Ignored,
Failed(TestFailure),
}
#[derive(Debug, Clone, Eq, PartialEq, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TestPlan {
pub origin: String,
pub total: usize,
pub filtered_out: usize,
pub used_only: bool,
}
// TODO(bartlomieju): in Rust 1.90 some structs started getting flagged as not used
#[allow(dead_code)]
#[derive(Debug, Copy, Clone, Eq, PartialEq, Deserialize)]
pub enum TestStdioStream {
Stdout,
Stderr,
}
#[derive(Debug)]
pub enum TestEvent {
Register(Arc<TestDescriptions>),
Plan(TestPlan),
Wait(usize),
Output(Vec<u8>),
Slow(usize, u64),
Result(usize, TestResult, u64),
UncaughtError(String, Box<JsError>),
StepRegister(TestStepDescription),
StepWait(usize),
StepResult(usize, TestStepResult, u64),
/// Indicates that this worker has completed running tests.
Completed,
/// Indicates that the user has cancelled the test run with Ctrl+C and
/// the run should be aborted.
Sigint,
/// Used by the REPL to force a report to end without closing the worker
/// or receiver.
ForceEndReport,
}
impl TestEvent {
// Certain messages require us to ensure that all output has been drained to ensure proper
// interleaving of output messages.
pub fn requires_stdio_sync(&self) -> bool {
matches!(
self,
TestEvent::Plan(..)
| TestEvent::Result(..)
| TestEvent::StepWait(..)
| TestEvent::StepResult(..)
| TestEvent::UncaughtError(..)
| TestEvent::ForceEndReport
| TestEvent::Completed
)
}
}
#[derive(Debug, Clone, Deserialize)]
pub struct TestSummary {
pub total: usize,
pub passed: usize,
pub failed: usize,
pub ignored: usize,
pub passed_steps: usize,
pub failed_steps: usize,
pub ignored_steps: usize,
pub filtered_out: usize,
pub measured: usize,
pub failures: Vec<(TestFailureDescription, TestFailure)>,
pub uncaught_errors: Vec<(String, Box<JsError>)>,
}
#[derive(Debug, Clone)]
struct TestSpecifiersOptions {
cwd: Url,
concurrent_jobs: NonZeroUsize,
fail_fast: Option<NonZeroUsize>,
log_level: Option<log::Level>,
filter: bool,
specifier: TestSpecifierOptions,
reporter: TestReporterConfig,
junit_path: Option<String>,
hide_stacktraces: bool,
}
#[derive(Debug, Default, Clone)]
pub struct TestSpecifierOptions {
pub shuffle: Option<u64>,
pub filter: TestFilter,
pub trace_leaks: bool,
}
impl TestSummary {
pub fn new() -> TestSummary {
TestSummary {
total: 0,
passed: 0,
failed: 0,
ignored: 0,
passed_steps: 0,
failed_steps: 0,
ignored_steps: 0,
filtered_out: 0,
measured: 0,
failures: Vec::new(),
uncaught_errors: Vec::new(),
}
}
fn has_failed(&self) -> bool {
self.failed > 0 || !self.failures.is_empty()
}
}
fn get_test_reporter(options: &TestSpecifiersOptions) -> Box<dyn TestReporter> {
let parallel = options.concurrent_jobs.get() > 1;
let failure_format_options = TestFailureFormatOptions {
hide_stacktraces: options.hide_stacktraces,
strip_ascii_color: false,
initial_cwd: Some(options.cwd.clone()),
};
let reporter: Box<dyn TestReporter> = match &options.reporter {
TestReporterConfig::Dot => Box::new(DotTestReporter::new(
options.cwd.clone(),
failure_format_options,
)),
TestReporterConfig::Pretty => Box::new(PrettyTestReporter::new(
parallel,
options.log_level != Some(Level::Error),
options.filter,
false,
options.cwd.clone(),
failure_format_options,
)),
TestReporterConfig::Junit => Box::new(JunitTestReporter::new(
options.cwd.clone(),
"-".to_string(),
TestFailureFormatOptions {
strip_ascii_color: true,
..failure_format_options
},
)),
TestReporterConfig::Tap => Box::new(TapTestReporter::new(
options.cwd.clone(),
options.concurrent_jobs > NonZeroUsize::new(1).unwrap(),
failure_format_options,
)),
};
if let Some(junit_path) = &options.junit_path {
let junit = Box::new(JunitTestReporter::new(
options.cwd.clone(),
junit_path.to_string(),
TestFailureFormatOptions {
hide_stacktraces: options.hide_stacktraces,
strip_ascii_color: true,
initial_cwd: Some(options.cwd.clone()),
},
));
return Box::new(CompoundTestReporter::new(vec![reporter, junit]));
}
reporter
}
#[allow(clippy::too_many_arguments)]
async fn configure_main_worker(
worker_factory: Arc<CliMainWorkerFactory>,
specifier: &Url,
preload_modules: Vec<Url>,
require_modules: Vec<Url>,
permissions_container: PermissionsContainer,
worker_sender: TestEventWorkerSender,
options: &TestSpecifierOptions,
sender: UnboundedSender<jupyter_protocol::messaging::StreamContent>,
) -> Result<(Option<CoverageCollector>, MainWorker), CreateCustomWorkerError> {
let mut worker = worker_factory
.create_custom_worker(
WorkerExecutionMode::Test,
specifier.clone(),
preload_modules,
require_modules,
permissions_container,
vec![
ops::testing::deno_test::init(worker_sender.sender),
ops::lint::deno_lint_ext_for_test::init(),
ops::jupyter::deno_jupyter_for_test::init(sender),
],
Stdio {
stdin: StdioPipe::inherit(),
stdout: StdioPipe::file(worker_sender.stdout),
stderr: StdioPipe::file(worker_sender.stderr),
},
None,
)
.await?;
let coverage_collector = worker.maybe_setup_coverage_collector();
if options.trace_leaks {
worker
.execute_script_static(
located_script_name!(),
"Deno[Deno.internal].core.setLeakTracingEnabled(true);",
)
.map_err(|e| CoreErrorKind::Js(e).into_box())?;
}
let op_state = worker.op_state();
let check_res =
|res: Result<(), CoreError>| match res.map_err(|err| err.into_kind()) {
Ok(()) => Ok(()),
Err(CoreErrorKind::Js(err)) => TestEventTracker::new(op_state.clone())
.uncaught_error(specifier.to_string(), err)
.map_err(|e| CoreErrorKind::JsBox(JsErrorBox::from_err(e)).into_box()),
Err(err) => Err(err.into_box()),
};
check_res(worker.execute_preload_modules().await)?;
check_res(worker.execute_side_module().await)?;
let worker = worker.into_main_worker();
Ok((coverage_collector, worker))
}
/// Test a single specifier as documentation containing test programs, an executable test module or
/// both.
#[allow(clippy::too_many_arguments)]
pub async fn test_specifier(
worker_factory: Arc<CliMainWorkerFactory>,
permissions_container: PermissionsContainer,
specifier: ModuleSpecifier,
preload_modules: Vec<ModuleSpecifier>,
require_modules: Vec<ModuleSpecifier>,
worker_sender: TestEventWorkerSender,
fail_fast_tracker: FailFastTracker,
options: TestSpecifierOptions,
) -> Result<(), AnyError> {
if fail_fast_tracker.should_stop() {
return Ok(());
}
let jupyter_channel = tokio::sync::mpsc::unbounded_channel();
let (coverage_collector, mut worker) = configure_main_worker(
worker_factory,
&specifier,
preload_modules,
require_modules,
permissions_container,
worker_sender,
&options,
jupyter_channel.0,
)
.await?;
let event_tracker = TestEventTracker::new(worker.js_runtime.op_state());
match test_specifier_inner(
&mut worker,
coverage_collector,
specifier.clone(),
fail_fast_tracker,
&event_tracker,
options,
)
.await
{
Ok(()) => Ok(()),
Err(TestSpecifierError::Core(err)) => match err.into_kind() {
CoreErrorKind::Js(err) => {
event_tracker.uncaught_error(specifier.to_string(), err)?;
Ok(())
}
err => Err(err.into_box().into()),
},
Err(e) => Err(e.into()),
}
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum TestSpecifierError {
#[class(inherit)]
#[error(transparent)]
Core(#[from] CoreError),
#[class(inherit)]
#[error(transparent)]
RunTestsForWorker(#[from] RunTestsForWorkerErr),
}
/// Test a single specifier as documentation containing test programs, an executable test module or
/// both.
async fn test_specifier_inner(
worker: &mut MainWorker,
mut coverage_collector: Option<CoverageCollector>,
specifier: ModuleSpecifier,
fail_fast_tracker: FailFastTracker,
event_tracker: &TestEventTracker,
options: TestSpecifierOptions,
) -> Result<(), TestSpecifierError> {
// Ensure that there are no pending exceptions before we start running tests
worker.run_up_to_duration(Duration::from_millis(0)).await?;
worker
.dispatch_load_event()
.map_err(|e| CoreErrorKind::Js(e).into_box())?;
run_tests_for_worker(
worker,
&specifier,
&options,
&fail_fast_tracker,
event_tracker,
)
.await?;
// Ignore `defaultPrevented` of the `beforeunload` event. We don't allow the
// event loop to continue beyond what's needed to await results.
worker
.dispatch_beforeunload_event()
.map_err(|e| CoreErrorKind::Js(e).into_box())?;
worker
.dispatch_unload_event()
.map_err(|e| CoreErrorKind::Js(e).into_box())?;
// Ensure all output has been flushed
_ = worker
.js_runtime
.op_state()
.borrow_mut()
.borrow_mut::<TestEventSender>()
.flush();
// Ensure the worker has settled so we can catch any remaining unhandled rejections. We don't
// want to wait forever here.
worker.run_up_to_duration(Duration::from_millis(0)).await?;
if let Some(coverage_collector) = &mut coverage_collector {
coverage_collector.stop_collecting()?;
}
Ok(())
}
pub fn worker_has_tests(worker: &mut MainWorker) -> bool {
let state_rc = worker.js_runtime.op_state();
let state = state_rc.borrow();
!state.borrow::<TestContainer>().is_empty()
}
// Each test needs a fresh reqwest connection pool to avoid inter-test weirdness with connections
// failing. If we don't do this, a connection to a test server we just tore down might be re-used in
// the next test.
// TODO(mmastrac): this should be some sort of callback that we can implement for any subsystem
pub fn worker_prepare_for_test(worker: &mut MainWorker) {
worker
.js_runtime
.op_state()
.borrow_mut()
.try_take::<deno_runtime::deno_fetch::Client>();
}
/// Yields to tokio to allow async work to process, and then polls
/// the event loop once.
#[must_use = "The event loop result should be checked"]
pub async fn poll_event_loop(worker: &mut MainWorker) -> Result<(), CoreError> {
// Allow any ops that to do work in the tokio event loop to do so
tokio::task::yield_now().await;
// Spin the event loop once
poll_fn(|cx| {
if let Poll::Ready(Err(err)) = worker
.js_runtime
.poll_event_loop(cx, PollEventLoopOptions::default())
{
return Poll::Ready(Err(err));
}
Poll::Ready(Ok(()))
})
.await
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum RunTestsForWorkerErr {
#[class(inherit)]
#[error(transparent)]
ChannelClosed(#[from] ChannelClosedError),
#[class(inherit)]
#[error(transparent)]
Core(#[from] CoreError),
#[class(inherit)]
#[error(transparent)]
SerdeV8(#[from] serde_v8::Error),
}
async fn slow_test_watchdog(event_tracker: TestEventTracker, test_id: usize) {
// The slow test warning should pop up every DENO_SLOW_TEST_TIMEOUT*(2**n) seconds,
// with a duration that is doubling each time. So for a warning time of 60s,
// we should get a warning at 60s, 120s, 240s, etc.
let base_timeout = *SLOW_TEST_TIMEOUT;
let mut multiplier = 1;
let mut elapsed = 0;
loop {
tokio::time::sleep(Duration::from_secs(
base_timeout * (multiplier - elapsed),
))
.await;
if event_tracker
.slow(test_id, Duration::from_secs(base_timeout * multiplier))
.is_err()
{
break;
}
multiplier *= 2;
elapsed += 1;
}
}
pub async fn run_tests_for_worker(
worker: &mut MainWorker,
specifier: &ModuleSpecifier,
options: &TestSpecifierOptions,
fail_fast_tracker: &FailFastTracker,
event_tracker: &TestEventTracker,
) -> Result<(), RunTestsForWorkerErr> {
let state_rc = worker.js_runtime.op_state();
// Take whatever tests have been registered
let container =
std::mem::take(&mut *state_rc.borrow_mut().borrow_mut::<TestContainer>());
let descriptions = Arc::new(container.descriptions);
event_tracker.register(descriptions.clone())?;
run_tests_for_worker_inner(
worker,
specifier,
descriptions,
container.test_functions,
container.test_hooks,
options,
event_tracker,
fail_fast_tracker,
)
.await
}
fn compute_tests_to_run(
descs: &TestDescriptions,
test_functions: Vec<v8::Global<v8::Function>>,
filter: TestFilter,
) -> (Vec<(&TestDescription, v8::Global<v8::Function>)>, bool) {
let mut tests_to_run = Vec::with_capacity(descs.len());
let mut used_only = false;
for ((_, d), f) in descs.tests.iter().zip(test_functions) {
if !filter.includes(&d.name) {
continue;
}
// If we've seen an "only: true" test, the remaining tests must be "only: true" to be added
if used_only && !d.only {
continue;
}
// If this is the first "only: true" test we've seen, clear the other tests since they were
// only: false.
if d.only && !used_only {
used_only = true;
tests_to_run.clear();
}
tests_to_run.push((d, f));
}
(tests_to_run, used_only)
}
async fn call_hooks<H>(
worker: &mut MainWorker,
hook_fns: impl Iterator<Item = &v8::Global<v8::Function>>,
mut error_handler: H,
) -> Result<(), RunTestsForWorkerErr>
where
H: FnMut(CoreErrorKind) -> Result<(), RunTestsForWorkerErr>,
{
for hook_fn in hook_fns {
let call = worker.js_runtime.call(hook_fn);
let result = worker
.js_runtime
.with_event_loop_promise(call, PollEventLoopOptions::default())
.await;
let Err(err) = result else {
continue;
};
error_handler(err.into_kind())?;
break;
}
Ok(())
}
#[allow(clippy::too_many_arguments)]
async fn run_tests_for_worker_inner(
worker: &mut MainWorker,
specifier: &ModuleSpecifier,
descs: Arc<TestDescriptions>,
test_functions: Vec<v8::Global<v8::Function>>,
test_hooks: TestHooks,
options: &TestSpecifierOptions,
event_tracker: &TestEventTracker,
fail_fast_tracker: &FailFastTracker,
) -> Result<(), RunTestsForWorkerErr> {
let unfiltered = descs.len();
let (mut tests_to_run, used_only) =
compute_tests_to_run(&descs, test_functions, options.filter.clone());
if let Some(seed) = options.shuffle {
tests_to_run.shuffle(&mut SmallRng::seed_from_u64(seed));
}
event_tracker.plan(TestPlan {
origin: specifier.to_string(),
total: tests_to_run.len(),
filtered_out: unfiltered - tests_to_run.len(),
used_only,
})?;
let mut had_uncaught_error = false;
let sanitizer_helper = sanitizers::create_test_sanitizer_helper(worker);
// Execute beforeAll hooks (FIFO order)
call_hooks(worker, test_hooks.before_all.iter(), |core_error| {
tests_to_run = vec![];
match core_error {
CoreErrorKind::Js(err) => {
event_tracker.uncaught_error(specifier.to_string(), err)?;
Ok(())
}
err => Err(err.into_box().into()),
}
})
.await?;
for (desc, function) in tests_to_run.into_iter() {
worker_prepare_for_test(worker);
if fail_fast_tracker.should_stop() {
break;
}
if desc.ignore {
event_tracker.ignored(desc)?;
continue;
}
if had_uncaught_error {
event_tracker.cancelled(desc)?;
continue;
}
event_tracker.wait(desc)?;
// Poll event loop once, to allow all ops that are already resolved, but haven't
// responded to settle.
// TODO(mmastrac): we should provide an API to poll the event loop until no further
// progress is made.
poll_event_loop(worker).await?;
// We always capture stats, regardless of sanitization state
let before_test_stats = sanitizer_helper.capture_stats();
let earlier = Instant::now();
// Execute beforeEach hooks (FIFO order)
let mut before_each_hook_errored = false;
call_hooks(worker, test_hooks.before_each.iter(), |core_error| {
match core_error {
CoreErrorKind::Js(err) => {
before_each_hook_errored = true;
let test_result = TestResult::Failed(TestFailure::JsError(err));
fail_fast_tracker.add_failure();
event_tracker.result(desc, test_result, earlier.elapsed())?;
Ok(())
}
err => Err(err.into_box().into()),
}
})
.await?;
// TODO(bartlomieju): this whole block/binding could be reworked into something better
let result = if !before_each_hook_errored {
let call = worker.js_runtime.call(&function);
let slow_test_warning =
spawn(slow_test_watchdog(event_tracker.clone(), desc.id));
let result = worker
.js_runtime
.with_event_loop_promise(call, PollEventLoopOptions::default())
.await;
slow_test_warning.abort();
let result = match result {
Ok(r) => r,
Err(error) => match error.into_kind() {
CoreErrorKind::Js(js_error) => {
event_tracker.uncaught_error(specifier.to_string(), js_error)?;
fail_fast_tracker.add_failure();
event_tracker.cancelled(desc)?;
had_uncaught_error = true;
continue;
}
err => return Err(err.into_box().into()),
},
};
// Check the result before we check for leaks
deno_core::scope!(scope, &mut worker.js_runtime);
let result = v8::Local::new(scope, result);
serde_v8::from_v8::<TestResult>(scope, result)?
} else {
TestResult::Ignored
};
if matches!(result, TestResult::Failed(_)) {
fail_fast_tracker.add_failure();
event_tracker.result(desc, result.clone(), earlier.elapsed())?;
}
// Execute afterEach hooks (LIFO order)
call_hooks(worker, test_hooks.after_each.iter().rev(), |core_error| {
match core_error {
CoreErrorKind::Js(err) => {
let test_result = TestResult::Failed(TestFailure::JsError(err));
fail_fast_tracker.add_failure();
event_tracker.result(desc, test_result, earlier.elapsed())?;
Ok(())
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.