repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1 value | license stringclasses 7 values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2 classes |
|---|---|---|---|---|---|---|---|---|
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/npm_installer/package_json.rs | libs/npm_installer/package_json.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::path::PathBuf;
use std::sync::Arc;
use deno_config::workspace::Workspace;
use deno_package_json::PackageJsonDepValue;
use deno_package_json::PackageJsonDepValueParseError;
use deno_package_json::PackageJsonDepWorkspaceReq;
use deno_semver::StackString;
use deno_semver::Version;
use deno_semver::VersionReq;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageName;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use serde_json;
use thiserror::Error;
use url::Url;
#[derive(Debug)]
pub struct InstallNpmRemotePkg {
pub alias: Option<StackString>,
pub base_dir: PathBuf,
pub req: PackageReq,
}
#[derive(Debug)]
pub struct InstallLocalPkg {
pub alias: Option<StackString>,
pub target_dir: PathBuf,
}
#[derive(Debug)]
pub struct InstallPatchPkg {
pub nv: PackageNv,
pub target_dir: PathBuf,
}
#[derive(Debug, Error, Clone)]
#[error("Failed to install '{}'\n at {}", alias, location)]
pub struct PackageJsonDepValueParseWithLocationError {
pub location: Url,
pub alias: StackString,
#[source]
pub source: PackageJsonDepValueParseError,
}
#[derive(Debug, Default)]
pub struct NpmInstallDepsProvider {
remote_pkgs: Vec<InstallNpmRemotePkg>,
local_pkgs: Vec<InstallLocalPkg>,
patch_pkgs: Vec<InstallPatchPkg>,
pkg_json_dep_errors: Vec<PackageJsonDepValueParseWithLocationError>,
}
impl NpmInstallDepsProvider {
pub fn empty() -> Self {
Self::default()
}
pub fn from_workspace(workspace: &Arc<Workspace>) -> Self {
// todo(dsherret): estimate capacity?
let mut local_pkgs = Vec::new();
let mut remote_pkgs = Vec::new();
let mut patch_pkgs = Vec::new();
let mut pkg_json_dep_errors = Vec::new();
let workspace_npm_pkgs = workspace.npm_packages();
for (_, folder) in workspace.config_folders() {
// deal with the deno.json first because it takes precedence during resolution
if let Some(deno_json) = &folder.deno_json {
// don't bother with externally referenced import maps as users
// should inline their import map to get this behaviour
if let Some(serde_json::Value::Object(obj)) = &deno_json.json.imports {
let mut pkg_pkgs = Vec::with_capacity(obj.len());
for (_alias, value) in obj {
let serde_json::Value::String(specifier) = value else {
continue;
};
let Ok(npm_req_ref) = NpmPackageReqReference::from_str(specifier)
else {
continue;
};
let pkg_req = npm_req_ref.into_inner().req;
let workspace_pkg = workspace_npm_pkgs
.iter()
.find(|pkg| pkg.matches_req(&pkg_req));
if let Some(pkg) = workspace_pkg {
local_pkgs.push(InstallLocalPkg {
alias: None,
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
});
} else {
pkg_pkgs.push(InstallNpmRemotePkg {
alias: None,
base_dir: deno_json.dir_path(),
req: pkg_req,
});
}
}
// sort within each package (more like npm resolution)
pkg_pkgs.sort_by(|a, b| a.req.cmp(&b.req));
remote_pkgs.extend(pkg_pkgs);
}
}
if let Some(pkg_json) = &folder.pkg_json {
let deps = pkg_json.resolve_local_package_json_deps();
let mut pkg_pkgs = Vec::with_capacity(
deps.dependencies.len() + deps.dev_dependencies.len(),
);
for (alias, dep) in
deps.dependencies.iter().chain(deps.dev_dependencies.iter())
{
let dep = match dep {
Ok(dep) => dep,
Err(err) => {
pkg_json_dep_errors.push(
PackageJsonDepValueParseWithLocationError {
location: pkg_json.specifier(),
alias: alias.clone(),
source: err.clone(),
},
);
continue;
}
};
match dep {
PackageJsonDepValue::File(specifier) => {
local_pkgs.push(InstallLocalPkg {
alias: Some(alias.clone()),
target_dir: pkg_json.dir_path().join(specifier),
})
}
PackageJsonDepValue::Req(pkg_req) => {
let workspace_pkg = workspace_npm_pkgs.iter().find(|pkg| {
pkg.matches_req(pkg_req)
// do not resolve to the current package
&& pkg.pkg_json.path != pkg_json.path
});
if let Some(pkg) = workspace_pkg {
local_pkgs.push(InstallLocalPkg {
alias: Some(alias.clone()),
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
});
} else {
pkg_pkgs.push(InstallNpmRemotePkg {
alias: Some(alias.clone()),
base_dir: pkg_json.dir_path().to_path_buf(),
req: pkg_req.clone(),
});
}
}
PackageJsonDepValue::Workspace(workspace_version_req) => {
let version_req = match workspace_version_req {
PackageJsonDepWorkspaceReq::VersionReq(version_req) => {
version_req.clone()
}
PackageJsonDepWorkspaceReq::Tilde
| PackageJsonDepWorkspaceReq::Caret => {
VersionReq::parse_from_npm("*").unwrap()
}
};
if let Some(pkg) = workspace_npm_pkgs.iter().find(|pkg| {
pkg.matches_name_and_version_req(alias, &version_req)
}) {
local_pkgs.push(InstallLocalPkg {
alias: Some(alias.clone()),
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
});
}
}
PackageJsonDepValue::JsrReq(_) => {
pkg_json_dep_errors.push(
PackageJsonDepValueParseWithLocationError {
location: pkg_json.specifier(),
alias: alias.clone(),
source: PackageJsonDepValueParseError(Box::new(
deno_package_json::PackageJsonDepValueParseErrorKind::Unsupported {
scheme: "jsr".to_string(),
},
)),
},
);
}
}
}
// sort within each package as npm does
pkg_pkgs.sort_by(|a, b| a.alias.cmp(&b.alias));
remote_pkgs.extend(pkg_pkgs);
}
}
for pkg in workspace.link_pkg_jsons() {
let Some(name) = pkg.name.as_ref() else {
continue;
};
let Some(version) = pkg
.version
.as_ref()
.and_then(|v| Version::parse_from_npm(v).ok())
else {
continue;
};
patch_pkgs.push(InstallPatchPkg {
nv: PackageNv {
name: PackageName::from_str(name),
version,
},
target_dir: pkg.dir_path().to_path_buf(),
})
}
remote_pkgs.shrink_to_fit();
local_pkgs.shrink_to_fit();
patch_pkgs.shrink_to_fit();
Self {
remote_pkgs,
local_pkgs,
patch_pkgs,
pkg_json_dep_errors,
}
}
pub fn remote_pkgs(&self) -> &[InstallNpmRemotePkg] {
&self.remote_pkgs
}
pub fn local_pkgs(&self) -> &[InstallLocalPkg] {
&self.local_pkgs
}
pub fn patch_pkgs(&self) -> &[InstallPatchPkg] {
&self.patch_pkgs
}
pub fn pkg_json_dep_errors(
&self,
) -> &[PackageJsonDepValueParseWithLocationError] {
&self.pkg_json_dep_errors
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/npm_installer/lifecycle_scripts.rs | libs/npm_installer/lifecycle_scripts.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
use anyhow::Error as AnyError;
use deno_error::JsErrorBox;
use deno_npm::NpmPackageExtraInfo;
use deno_npm::NpmResolutionPackage;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_semver::SmallStackString;
use deno_semver::Version;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use sys_traits::FsMetadata;
use crate::CachedNpmPackageExtraInfoProvider;
use crate::LifecycleScriptsConfig;
use crate::PackagesAllowedScripts;
pub struct PackageWithScript<'a> {
pub package: &'a NpmResolutionPackage,
pub scripts: HashMap<SmallStackString, String>,
pub package_folder: PathBuf,
}
pub struct LifecycleScriptsExecutorOptions<'a> {
pub init_cwd: &'a Path,
pub process_state: &'a str,
pub root_node_modules_dir_path: &'a Path,
pub on_ran_pkg_scripts:
&'a dyn Fn(&NpmResolutionPackage) -> Result<(), JsErrorBox>,
pub snapshot: &'a NpmResolutionSnapshot,
pub system_packages: &'a [NpmResolutionPackage],
pub packages_with_scripts: &'a [PackageWithScript<'a>],
pub extra_info_provider: &'a CachedNpmPackageExtraInfoProvider,
}
pub struct LifecycleScriptsWarning {
message: String,
did_warn_fn: DidWarnFn,
}
impl std::fmt::Debug for LifecycleScriptsWarning {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("LifecycleScriptsWarning")
.field("message", &self.message)
.finish()
}
}
type DidWarnFn =
Box<dyn FnOnce(&dyn sys_traits::boxed::FsOpenBoxed) + Send + Sync>;
impl LifecycleScriptsWarning {
pub(crate) fn new(message: String, did_warn_fn: DidWarnFn) -> Self {
Self {
message,
did_warn_fn,
}
}
pub fn into_message(
self,
sys: &dyn sys_traits::boxed::FsOpenBoxed,
) -> String {
(self.did_warn_fn)(sys);
self.message
}
}
#[derive(Debug)]
pub struct NullLifecycleScriptsExecutor;
#[async_trait::async_trait(?Send)]
impl LifecycleScriptsExecutor for NullLifecycleScriptsExecutor {
async fn execute(
&self,
_options: LifecycleScriptsExecutorOptions<'_>,
) -> Result<(), AnyError> {
Ok(())
}
}
#[async_trait::async_trait(?Send)]
pub trait LifecycleScriptsExecutor: Sync + Send {
async fn execute(
&self,
options: LifecycleScriptsExecutorOptions<'_>,
) -> Result<(), AnyError>;
}
pub trait LifecycleScriptsStrategy {
fn can_run_scripts(&self) -> bool {
true
}
fn warn_on_scripts_not_run(
&self,
packages: &[(&NpmResolutionPackage, PathBuf)],
) -> Result<(), std::io::Error>;
fn has_warned(&self, package: &NpmResolutionPackage) -> bool;
fn has_run(&self, package: &NpmResolutionPackage) -> bool;
}
pub fn has_lifecycle_scripts(
sys: &impl FsMetadata,
extra: &NpmPackageExtraInfo,
package_path: &Path,
) -> bool {
if let Some(install) = extra.scripts.get("install") {
{
// default script
if !is_broken_default_install_script(sys, install, package_path) {
return true;
}
}
}
extra.scripts.contains_key("preinstall")
|| extra.scripts.contains_key("postinstall")
}
// npm defaults to running `node-gyp rebuild` if there is a `binding.gyp` file
// but it always fails if the package excludes the `binding.gyp` file when they publish.
// (for example, `fsevents` hits this)
pub fn is_broken_default_install_script(
sys: &impl FsMetadata,
script: &str,
package_path: &Path,
) -> bool {
script == "node-gyp rebuild"
&& !sys.fs_exists_no_err(package_path.join("binding.gyp"))
}
pub struct LifecycleScripts<'a, TSys: FsMetadata> {
sys: &'a TSys,
packages_with_scripts: Vec<PackageWithScript<'a>>,
packages_with_scripts_not_run: Vec<(&'a NpmResolutionPackage, PathBuf)>,
config: &'a LifecycleScriptsConfig,
strategy: Box<dyn LifecycleScriptsStrategy + 'a>,
}
impl<'a, TSys: FsMetadata> LifecycleScripts<'a, TSys> {
pub fn new<TLifecycleScriptsStrategy: LifecycleScriptsStrategy + 'a>(
sys: &'a TSys,
config: &'a LifecycleScriptsConfig,
strategy: TLifecycleScriptsStrategy,
) -> Self {
Self {
sys,
config,
packages_with_scripts: Vec::new(),
packages_with_scripts_not_run: Vec::new(),
strategy: Box::new(strategy),
}
}
pub fn can_run_scripts(&self, package_nv: &PackageNv) -> bool {
fn matches_nv(req: &PackageReq, package_nv: &PackageNv) -> bool {
// we shouldn't support this being a tag because it's too complicated
debug_assert!(req.version_req.tag().is_none());
package_nv.name == req.name
&& req.version_req.matches(&package_nv.version)
}
if !self.strategy.can_run_scripts() {
return false;
}
let matches_allowed = match &self.config.allowed {
PackagesAllowedScripts::All => true,
PackagesAllowedScripts::Some(allow_list) => {
allow_list.iter().any(|req| matches_nv(req, package_nv))
}
PackagesAllowedScripts::None => false,
};
matches_allowed
&& !self
.config
.denied
.iter()
.any(|req| matches_nv(req, package_nv))
}
pub fn has_run_scripts(&self, package: &NpmResolutionPackage) -> bool {
self.strategy.has_run(package)
}
/// Register a package for running lifecycle scripts, if applicable.
///
/// `package_path` is the path containing the package's code (its root dir).
/// `package_meta_path` is the path to serve as the base directory for lifecycle
/// script-related metadata (e.g. to store whether the scripts have been run already)
pub fn add(
&mut self,
package: &'a NpmResolutionPackage,
extra: &NpmPackageExtraInfo,
package_path: Cow<'_, Path>,
) {
if has_lifecycle_scripts(self.sys, extra, &package_path) {
if self.can_run_scripts(&package.id.nv) {
if !self.has_run_scripts(package) {
self.packages_with_scripts.push(PackageWithScript {
package,
scripts: extra.scripts.clone(),
package_folder: package_path.into_owned(),
});
}
} else if !self.has_run_scripts(package)
&& (self.config.explicit_install || !self.strategy.has_warned(package))
&& !(self.config.denied.iter().any(|d| {
package.id.nv.name == d.name
&& d.version_req.matches(&package.id.nv.version)
}))
{
// Skip adding `esbuild` as it is known that it can work properly without lifecycle script
// being run, and it's also very popular - any project using Vite would raise warnings.
{
let nv = &package.id.nv;
if nv.name == "esbuild"
&& nv.version >= Version::parse_standard("0.18.0").unwrap()
{
return;
}
}
self
.packages_with_scripts_not_run
.push((package, package_path.into_owned()));
}
}
}
pub fn warn_not_run_scripts(&self) -> Result<(), std::io::Error> {
if !self.packages_with_scripts_not_run.is_empty() {
self
.strategy
.warn_on_scripts_not_run(&self.packages_with_scripts_not_run)?;
}
Ok(())
}
pub fn packages_with_scripts(&self) -> &[PackageWithScript<'a>] {
&self.packages_with_scripts
}
}
pub static LIFECYCLE_SCRIPTS_RUNNING_ENV_VAR: &str =
"DENO_INTERNAL_IS_LIFECYCLE_SCRIPT";
pub fn is_running_lifecycle_script(sys: &impl sys_traits::EnvVar) -> bool {
sys.env_var(LIFECYCLE_SCRIPTS_RUNNING_ENV_VAR).is_ok()
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/npm_installer/fs.rs | libs/npm_installer/fs.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::io::Error;
use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
use sys_traits::FsCreateDirAll;
use sys_traits::FsDirEntry;
use sys_traits::FsSymlinkDir;
#[sys_traits::auto_impl]
pub trait CloneDirRecursiveSys:
CopyDirRecursiveSys
+ sys_traits::FsCreateDirAll
+ sys_traits::FsRemoveFile
+ sys_traits::FsRemoveDirAll
+ sys_traits::ThreadSleep
{
}
/// Clones a directory to another directory. The exact method
/// is not guaranteed - it may be a hardlink, copy, or other platform-specific
/// operation.
///
/// Note: Does not handle symlinks.
pub fn clone_dir_recursive<TSys: CloneDirRecursiveSys>(
sys: &TSys,
from: &Path,
to: &Path,
) -> Result<(), CopyDirRecursiveError> {
if cfg!(target_vendor = "apple") {
if let Some(parent) = to.parent() {
sys.fs_create_dir_all(parent)?;
}
// Try to clone the whole directory
if let Err(err) = sys.fs_clone_file(from, to) {
if !matches!(
err.kind(),
std::io::ErrorKind::AlreadyExists | std::io::ErrorKind::Unsupported
) {
log::debug!(
"Failed to clone dir {:?} to {:?} via clonefile: {}",
from,
to,
err
);
}
// clonefile won't overwrite existing files, so if the dir exists
// we need to handle it recursively.
copy_dir_recursive(sys, from, to)?;
}
} else if let Err(e) = deno_npm_cache::hard_link_dir_recursive(sys, from, to)
{
log::debug!("Failed to hard link dir {:?} to {:?}: {}", from, to, e);
copy_dir_recursive(sys, from, to)?;
}
Ok(())
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum CopyDirRecursiveError {
#[class(inherit)]
#[error("Creating {path}")]
Creating {
path: PathBuf,
#[source]
#[inherit]
source: Error,
},
#[class(inherit)]
#[error("Reading {path}")]
Reading {
path: PathBuf,
#[source]
#[inherit]
source: Error,
},
#[class(inherit)]
#[error("Dir {from} to {to}")]
Dir {
from: PathBuf,
to: PathBuf,
#[source]
#[inherit]
source: Box<Self>,
},
#[class(inherit)]
#[error("Copying {from} to {to}")]
Copying {
from: PathBuf,
to: PathBuf,
#[source]
#[inherit]
source: Error,
},
#[class(inherit)]
#[error(transparent)]
Other(#[from] Error),
}
#[sys_traits::auto_impl]
pub trait CopyDirRecursiveSys:
sys_traits::FsCopy
+ sys_traits::FsCloneFile
+ sys_traits::FsCreateDir
+ sys_traits::FsHardLink
+ sys_traits::FsReadDir
{
}
/// Copies a directory to another directory.
///
/// Note: Does not handle symlinks.
pub fn copy_dir_recursive<TSys: CopyDirRecursiveSys>(
sys: &TSys,
from: &Path,
to: &Path,
) -> Result<(), CopyDirRecursiveError> {
sys.fs_create_dir_all(to).map_err(|source| {
CopyDirRecursiveError::Creating {
path: to.to_path_buf(),
source,
}
})?;
let read_dir =
sys
.fs_read_dir(from)
.map_err(|source| CopyDirRecursiveError::Reading {
path: from.to_path_buf(),
source,
})?;
for entry in read_dir {
let entry = entry?;
let file_type = entry.file_type()?;
let new_from = from.join(entry.file_name());
let new_to = to.join(entry.file_name());
if file_type.is_dir() {
copy_dir_recursive(sys, &new_from, &new_to).map_err(|source| {
CopyDirRecursiveError::Dir {
from: new_from.to_path_buf(),
to: new_to.to_path_buf(),
source: Box::new(source),
}
})?;
} else if file_type.is_file() {
sys.fs_copy(&new_from, &new_to).map_err(|source| {
CopyDirRecursiveError::Copying {
from: new_from.to_path_buf(),
to: new_to.to_path_buf(),
source,
}
})?;
}
}
Ok(())
}
pub fn symlink_dir<TSys: sys_traits::BaseFsSymlinkDir>(
sys: &TSys,
oldpath: &Path,
newpath: &Path,
) -> Result<(), Error> {
let err_mapper = |err: Error, kind: Option<ErrorKind>| {
Error::new(
kind.unwrap_or_else(|| err.kind()),
format!(
"{}, symlink '{}' -> '{}'",
err,
oldpath.display(),
newpath.display()
),
)
};
sys.fs_symlink_dir(oldpath, newpath).map_err(|err| {
#[cfg(windows)]
if let Some(code) = err.raw_os_error()
&& (code as u32 == winapi::shared::winerror::ERROR_PRIVILEGE_NOT_HELD
|| code as u32 == winapi::shared::winerror::ERROR_INVALID_FUNCTION)
{
return err_mapper(err, Some(ErrorKind::PermissionDenied));
}
err_mapper(err, None)
})
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/npm_installer/resolution.rs | libs/npm_installer/resolution.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::cell::RefCell;
use std::collections::BTreeMap;
use std::rc::Rc;
use std::sync::Arc;
use capacity_builder::StringBuilder;
use deno_error::JsErrorBox;
use deno_lockfile::NpmPackageDependencyLockfileInfo;
use deno_lockfile::NpmPackageLockfileInfo;
use deno_npm::NpmResolutionPackage;
use deno_npm::registry::NpmPackageInfo;
use deno_npm::registry::NpmRegistryApi;
use deno_npm::registry::NpmRegistryPackageInfoLoadError;
use deno_npm::resolution::AddPkgReqsOptions;
use deno_npm::resolution::DefaultTarballUrlProvider;
use deno_npm::resolution::NpmResolutionError;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::resolution::UnmetPeerDepDiagnostic;
use deno_npm_cache::NpmCacheHttpClient;
use deno_npm_cache::NpmCacheSys;
use deno_npm_cache::RegistryInfoProvider;
use deno_resolver::display::DisplayTreeNode;
use deno_resolver::factory::NpmVersionResolverRc;
use deno_resolver::lockfile::LockfileLock;
use deno_resolver::lockfile::LockfileSys;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_semver::SmallStackString;
use deno_semver::StackString;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageKind;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use deno_terminal::colors;
use deno_unsync::sync::AtomicFlag;
use deno_unsync::sync::TaskQueue;
pub struct AddPkgReqsResult {
/// Results from adding the individual packages.
///
/// The indexes of the results correspond to the indexes of the provided
/// package requirements.
pub results: Vec<Result<PackageNv, NpmResolutionError>>,
/// The final result of resolving and caching all the package requirements.
pub dependencies_result: Result<(), JsErrorBox>,
}
pub type HasJsExecutionStartedFlagRc = Arc<HasJsExecutionStartedFlag>;
/// A flag that indicates if JS execution has started, which
/// will tell the npm resolution to not do a deduplication pass
/// and instead npm resolution should only be additive.
#[derive(Debug, Default)]
pub struct HasJsExecutionStartedFlag(AtomicFlag);
impl HasJsExecutionStartedFlag {
#[inline(always)]
pub fn raise(&self) -> bool {
self.0.raise()
}
#[inline(always)]
pub fn is_raised(&self) -> bool {
self.0.is_raised()
}
}
#[sys_traits::auto_impl]
pub trait NpmResolutionInstallerSys: LockfileSys + NpmCacheSys {}
/// Updates the npm resolution with the provided package requirements.
#[derive(Debug)]
pub struct NpmResolutionInstaller<
TNpmCacheHttpClient: NpmCacheHttpClient,
TSys: NpmResolutionInstallerSys,
> {
has_js_execution_started_flag: HasJsExecutionStartedFlagRc,
npm_version_resolver: NpmVersionResolverRc,
registry_info_provider: Arc<RegistryInfoProvider<TNpmCacheHttpClient, TSys>>,
reporter: Option<Arc<dyn deno_npm::resolution::Reporter>>,
resolution: Arc<NpmResolutionCell>,
maybe_lockfile: Option<Arc<LockfileLock<TSys>>>,
update_queue: TaskQueue,
}
impl<TNpmCacheHttpClient: NpmCacheHttpClient, TSys: NpmResolutionInstallerSys>
NpmResolutionInstaller<TNpmCacheHttpClient, TSys>
{
pub fn new(
has_js_execution_started_flag: HasJsExecutionStartedFlagRc,
npm_version_resolver: NpmVersionResolverRc,
registry_info_provider: Arc<
RegistryInfoProvider<TNpmCacheHttpClient, TSys>,
>,
reporter: Option<Arc<dyn deno_npm::resolution::Reporter>>,
resolution: Arc<NpmResolutionCell>,
maybe_lockfile: Option<Arc<LockfileLock<TSys>>>,
) -> Self {
Self {
has_js_execution_started_flag,
npm_version_resolver,
registry_info_provider,
reporter,
resolution,
maybe_lockfile,
update_queue: Default::default(),
}
}
pub async fn cache_package_info(
&self,
package_name: &str,
) -> Result<Arc<NpmPackageInfo>, NpmRegistryPackageInfoLoadError> {
// this will internally cache the package information
self.registry_info_provider.package_info(package_name).await
}
/// Run a resolution install if the npm snapshot is in a pending state
/// due to a config file change.
pub async fn install_if_pending(&self) -> Result<(), NpmResolutionError> {
self.add_package_reqs_inner(&[]).await.1
}
pub async fn add_package_reqs(
&self,
package_reqs: &[PackageReq],
) -> AddPkgReqsResult {
let (results, dependencies_result) =
self.add_package_reqs_inner(package_reqs).await;
AddPkgReqsResult {
results,
dependencies_result: dependencies_result.map_err(JsErrorBox::from_err),
}
}
async fn add_package_reqs_inner(
&self,
package_reqs: &[PackageReq],
) -> (
Vec<Result<PackageNv, NpmResolutionError>>,
Result<(), NpmResolutionError>,
) {
// only allow one thread in here at a time
let _snapshot_lock = self.update_queue.acquire().await;
let result = self.add_package_reqs_to_snapshot(package_reqs).await;
(
result.results,
result.dep_graph_result.map(|snapshot| {
self.resolution.mark_not_pending();
self.resolution.set_snapshot(snapshot);
}),
)
}
async fn add_package_reqs_to_snapshot(
&self,
package_reqs: &[PackageReq],
) -> deno_npm::resolution::AddPkgReqsResult {
let snapshot = self.resolution.snapshot();
if !self.resolution.is_pending()
&& package_reqs
.iter()
.all(|req| snapshot.package_reqs().contains_key(req))
{
log::debug!("Snapshot already up to date. Skipping npm resolution.");
return deno_npm::resolution::AddPkgReqsResult {
results: package_reqs
.iter()
.map(|req| Ok(snapshot.package_reqs().get(req).unwrap().clone()))
.collect(),
dep_graph_result: Ok(snapshot),
unmet_peer_diagnostics: Default::default(),
};
}
log::debug!(
/* this string is used in tests */
"Running npm resolution."
);
let should_dedup = !self.has_js_execution_started_flag.is_raised();
let result = snapshot
.add_pkg_reqs(
self.registry_info_provider.as_ref(),
AddPkgReqsOptions {
package_reqs,
should_dedup,
version_resolver: &self.npm_version_resolver,
},
self.reporter.as_deref(),
)
.await;
let result = match &result.dep_graph_result {
Err(NpmResolutionError::Resolution(err))
if self.registry_info_provider.mark_force_reload() =>
{
log::debug!("{err:#}");
log::debug!("npm resolution failed. Trying again...");
// try again with forced reloading
let snapshot = self.resolution.snapshot();
snapshot
.add_pkg_reqs(
self.registry_info_provider.as_ref(),
AddPkgReqsOptions {
package_reqs,
should_dedup,
version_resolver: &self.npm_version_resolver,
},
self.reporter.as_deref(),
)
.await
}
_ => result,
};
self.registry_info_provider.clear_memory_cache();
if !result.unmet_peer_diagnostics.is_empty()
&& log::log_enabled!(log::Level::Warn)
{
let root_node =
peer_dep_diagnostics_to_display_tree(&result.unmet_peer_diagnostics);
let mut text = String::new();
_ = root_node.print(&mut text);
log::warn!("{}", text);
}
if let Ok(snapshot) = &result.dep_graph_result {
self.populate_lockfile_from_snapshot(snapshot);
}
result
}
fn populate_lockfile_from_snapshot(&self, snapshot: &NpmResolutionSnapshot) {
fn npm_package_to_lockfile_info(
pkg: &NpmResolutionPackage,
) -> NpmPackageLockfileInfo {
let dependencies = pkg
.dependencies
.iter()
.filter_map(|(name, id)| {
if pkg.optional_dependencies.contains(name) {
None
} else {
Some(NpmPackageDependencyLockfileInfo {
name: name.clone(),
id: id.as_serialized(),
})
}
})
.collect();
let optional_dependencies = pkg
.optional_dependencies
.iter()
.filter_map(|name| {
let id = pkg.dependencies.get(name)?;
Some(NpmPackageDependencyLockfileInfo {
name: name.clone(),
id: id.as_serialized(),
})
})
.collect();
let optional_peers = pkg
.optional_peer_dependencies
.iter()
.filter_map(|name| {
let id = pkg.dependencies.get(name)?;
Some(NpmPackageDependencyLockfileInfo {
name: name.clone(),
id: id.as_serialized(),
})
})
.collect();
NpmPackageLockfileInfo {
serialized_id: pkg.id.as_serialized(),
integrity: pkg.dist.as_ref().and_then(|dist| {
dist.integrity().for_lockfile().map(|s| s.into_owned())
}),
dependencies,
optional_dependencies,
os: pkg.system.os.clone(),
cpu: pkg.system.cpu.clone(),
tarball: pkg.dist.as_ref().and_then(|dist| {
// Omit the tarball URL if it's the standard NPM registry URL
let tarbal_url_provider =
deno_npm::resolution::NpmRegistryDefaultTarballUrlProvider;
if dist.tarball == tarbal_url_provider.default_tarball_url(&pkg.id.nv)
{
None
} else {
Some(StackString::from_str(&dist.tarball))
}
}),
deprecated: pkg.is_deprecated,
bin: pkg.has_bin,
scripts: pkg.has_scripts,
optional_peers,
}
}
let Some(lockfile) = &self.maybe_lockfile else {
return;
};
let mut lockfile = lockfile.lock();
lockfile.content.packages.npm.clear();
lockfile
.content
.packages
.specifiers
.retain(|req, _| match req.kind {
PackageKind::Npm => false,
PackageKind::Jsr => true,
});
for (package_req, nv) in snapshot.package_reqs() {
let id = &snapshot.resolve_package_from_deno_module(nv).unwrap().id;
lockfile.insert_package_specifier(
JsrDepPackageReq::npm(package_req.clone()),
{
StringBuilder::<SmallStackString>::build(|builder| {
builder.append(&id.nv.version);
builder.append(&id.peer_dependencies);
})
.unwrap()
},
);
}
for package in snapshot.all_packages_for_every_system() {
lockfile.insert_npm_package(npm_package_to_lockfile_info(package));
}
}
}
fn peer_dep_diagnostics_to_display_tree(
diagnostics: &[UnmetPeerDepDiagnostic],
) -> DisplayTreeNode {
struct MergedNode {
text: Rc<String>,
children: RefCell<Vec<Rc<MergedNode>>>,
}
// combine the nodes into a unified tree
let mut nodes: BTreeMap<Rc<String>, Rc<MergedNode>> = BTreeMap::new();
let mut top_level_nodes = Vec::new();
for diagnostic in diagnostics {
let text = Rc::new(format!(
"peer {}: resolved to {}",
diagnostic.dependency, diagnostic.resolved
));
let mut node = Rc::new(MergedNode {
text: text.clone(),
children: Default::default(),
});
let mut found_ancestor = false;
for ancestor in &diagnostic.ancestors {
let nv_string = Rc::new(ancestor.to_string());
if let Some(current_node) = nodes.get(&nv_string) {
{
let mut children = current_node.children.borrow_mut();
if let Err(insert_index) =
children.binary_search_by(|n| n.text.cmp(&node.text))
{
children.insert(insert_index, node);
}
}
node = current_node.clone();
found_ancestor = true;
break;
} else {
let current_node = Rc::new(MergedNode {
text: nv_string.clone(),
children: RefCell::new(vec![node]),
});
nodes.insert(nv_string.clone(), current_node.clone());
node = current_node;
}
}
if !found_ancestor {
top_level_nodes.push(node);
}
}
// now output it
let mut root_node = DisplayTreeNode {
text: format!(
"{} The following peer dependency issues were found:",
colors::yellow("Warning")
),
children: Vec::new(),
};
fn convert_node(node: &Rc<MergedNode>) -> DisplayTreeNode {
DisplayTreeNode {
text: node.text.to_string(),
children: node.children.borrow().iter().map(convert_node).collect(),
}
}
for top_level_node in top_level_nodes {
root_node.children.push(convert_node(&top_level_node));
}
root_node
}
#[cfg(test)]
mod test {
use deno_semver::Version;
use deno_semver::package::PackageNv;
use super::*;
#[test]
fn same_ancestor_peer_dep_message() {
let peer_deps = Vec::from([
UnmetPeerDepDiagnostic {
ancestors: vec![PackageNv::from_str("a@1.0.0").unwrap()],
dependency: PackageReq::from_str("b@*").unwrap(),
resolved: Version::parse_standard("1.0.0").unwrap(),
},
UnmetPeerDepDiagnostic {
// same ancestor as above
ancestors: vec![PackageNv::from_str("a@1.0.0").unwrap()],
dependency: PackageReq::from_str("c@*").unwrap(),
resolved: Version::parse_standard("1.0.0").unwrap(),
},
]);
let display_tree = peer_dep_diagnostics_to_display_tree(&peer_deps);
assert_eq!(display_tree.children.len(), 1);
assert_eq!(display_tree.children[0].children.len(), 2);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/npm_installer/flag.rs | libs/npm_installer/flag.rs | // Copyright 2018-2025 the Deno authors. MIT license.
pub use inner::LaxSingleProcessFsFlag;
pub use inner::LaxSingleProcessFsFlagSys;
#[cfg(not(target_arch = "wasm32"))]
mod inner {
use std::path::PathBuf;
use std::sync::Arc;
use std::time::Duration;
use parking_lot::Mutex;
use sys_traits::FsFileLock;
use sys_traits::FsMetadataValue;
use crate::Reporter;
#[sys_traits::auto_impl]
pub trait LaxSingleProcessFsFlagSys:
sys_traits::FsOpen
+ sys_traits::FsMetadata
+ sys_traits::FsRemoveFile
+ sys_traits::FsWrite
+ sys_traits::ThreadSleep
+ sys_traits::SystemTimeNow
+ Clone
+ Send
+ Sync
+ 'static
{
}
struct PollFile<TSys: LaxSingleProcessFsFlagSys> {
sys: TSys,
file_path: PathBuf,
count: usize,
}
impl<TSys: LaxSingleProcessFsFlagSys> Drop for PollFile<TSys> {
fn drop(&mut self) {
// cleanup the poll file so the node_modules folder is more
// deterministic and so it doesn't end up in `deno compile`
_ = self.sys.fs_remove_file(&self.file_path);
}
}
impl<TSys: LaxSingleProcessFsFlagSys> PollFile<TSys> {
pub fn new(sys: TSys, file_path: PathBuf) -> Self {
Self {
sys,
file_path,
count: 0,
}
}
pub fn touch(&mut self) {
self.count += 1;
_ = self.sys.fs_write(&self.file_path, self.count.to_string());
}
}
struct LaxSingleProcessFsFlagInner<TSys: LaxSingleProcessFsFlagSys> {
file_path: PathBuf,
fs_file: TSys::File,
poll_file: Arc<Mutex<Option<PollFile<TSys>>>>,
}
impl<TSys: LaxSingleProcessFsFlagSys> Drop
for LaxSingleProcessFsFlagInner<TSys>
{
fn drop(&mut self) {
// kill the poll thread and clean up the poll file
self.poll_file.lock().take();
// release the file lock
if let Err(err) = self.fs_file.fs_file_unlock() {
log::debug!(
"Failed releasing lock for {}. {:#}",
self.file_path.display(),
err
);
}
}
}
/// A file system based flag that will attempt to synchronize multiple
/// processes so they go one after the other. In scenarios where
/// synchronization cannot be achieved, it will allow the current process
/// to proceed.
///
/// This should only be used in places where it's ideal for multiple
/// processes to not update something on the file system at the same time,
/// but it's not that big of a deal.
pub struct LaxSingleProcessFsFlag<TSys: LaxSingleProcessFsFlagSys>(
#[allow(dead_code)] Option<LaxSingleProcessFsFlagInner<TSys>>,
);
impl<TSys: LaxSingleProcessFsFlagSys> LaxSingleProcessFsFlag<TSys> {
pub async fn lock(
sys: &TSys,
file_path: PathBuf,
reporter: &impl Reporter,
long_wait_message: &str,
) -> Self {
log::debug!("Acquiring file lock at {}", file_path.display());
let last_updated_path = file_path.with_extension("lock.poll");
let start_instant = std::time::Instant::now();
let mut open_options = sys_traits::OpenOptions::new();
open_options.create = true;
open_options.read = true;
open_options.write = true;
let open_result = sys.fs_open(&file_path, &open_options);
match open_result {
Ok(mut fs_file) => {
let mut pb_update_guard = None;
let mut error_count = 0;
while error_count < 10 {
let lock_result =
fs_file.fs_file_try_lock(sys_traits::FsFileLockMode::Exclusive);
let poll_file_update_ms = 100;
match lock_result {
Ok(_) => {
log::debug!("Acquired file lock at {}", file_path.display());
let mut poll_file =
PollFile::new(sys.clone(), last_updated_path);
poll_file.touch();
let poll_file = Arc::new(Mutex::new(Some(poll_file)));
// Spawn a blocking task that will continually update a file
// signalling the lock is alive. This is a fail safe for when
// a file lock is never released. For example, on some operating
// systems, if a process does not release the lock (say it's
// killed), then the OS may release it at an indeterminate time
//
// This uses a blocking task because we use a single threaded
// runtime and this is time sensitive so we don't want it to update
// at the whims of whatever is occurring on the runtime thread.
let sys = sys.clone();
deno_unsync::spawn_blocking({
let poll_file = poll_file.clone();
move || loop {
sys
.thread_sleep(Duration::from_millis(poll_file_update_ms));
match &mut *poll_file.lock() {
Some(poll_file) => poll_file.touch(),
None => return,
}
}
});
return Self(Some(LaxSingleProcessFsFlagInner {
file_path,
fs_file,
poll_file,
}));
}
Err(_) => {
// show a message if it's been a while
if pb_update_guard.is_none()
&& start_instant.elapsed().as_millis() > 1_000
{
let guard = reporter.on_blocking(long_wait_message);
pb_update_guard = Some(guard);
}
// sleep for a little bit
tokio::time::sleep(Duration::from_millis(20)).await;
// Poll the last updated path to check if it's stopped updating,
// which is an indication that the file lock is claimed, but
// was never properly released.
match sys
.fs_metadata(&last_updated_path)
.and_then(|p| p.modified())
{
Ok(last_updated_time) => {
let current_time = sys.sys_time_now();
match current_time.duration_since(last_updated_time) {
Ok(duration) => {
if duration.as_millis()
> (poll_file_update_ms * 2) as u128
{
// the other process hasn't updated this file in a long time
// so maybe it was killed and the operating system hasn't
// released the file lock yet
return Self(None);
} else {
error_count = 0; // reset
}
}
Err(_) => {
error_count += 1;
}
}
}
Err(_) => {
error_count += 1;
}
}
}
}
}
drop(pb_update_guard); // explicit for clarity
Self(None)
}
Err(err) => {
log::debug!(
"Failed to open file lock at {}. {:#}",
file_path.display(),
err
);
Self(None) // let the process through
}
}
}
}
}
#[cfg(target_arch = "wasm32")]
mod inner {
use std::marker::PhantomData;
use std::path::PathBuf;
use crate::Reporter;
// Don't bother locking the folder when installing via Wasm for now.
// In the future, what we'd need is a way to spawn a thread (worker)
// and have it reliably do the update of the .poll file
#[sys_traits::auto_impl]
pub trait LaxSingleProcessFsFlagSys: Clone + Send + Sync + 'static {}
pub struct LaxSingleProcessFsFlag<TSys: LaxSingleProcessFsFlagSys> {
_data: PhantomData<TSys>,
}
impl<TSys: LaxSingleProcessFsFlagSys> LaxSingleProcessFsFlag<TSys> {
pub async fn lock(
_sys: &TSys,
_file_path: PathBuf,
_reporter: &impl Reporter,
_long_wait_message: &str,
) -> Self {
Self {
_data: Default::default(),
}
}
}
}
#[allow(clippy::disallowed_methods)]
#[cfg(all(test, not(target_arch = "wasm32")))]
mod test {
use std::sync::Arc;
use std::time::Duration;
use parking_lot::Mutex;
use test_util::TempDir;
use tokio::sync::Notify;
use super::*;
use crate::LogReporter;
#[tokio::test]
async fn lax_fs_lock_basic() {
let temp_dir = TempDir::new();
let lock_path = temp_dir.path().join("file.lock");
let signal1 = Arc::new(Notify::new());
let signal2 = Arc::new(Notify::new());
let signal3 = Arc::new(Notify::new());
let signal4 = Arc::new(Notify::new());
tokio::spawn({
let lock_path = lock_path.clone();
let signal1 = signal1.clone();
let signal2 = signal2.clone();
let signal3 = signal3.clone();
let signal4 = signal4.clone();
let temp_dir = temp_dir.clone();
async move {
let flag = LaxSingleProcessFsFlag::lock(
&sys_traits::impls::RealSys,
lock_path.to_path_buf(),
&LogReporter,
"waiting",
)
.await;
signal1.notify_one();
signal2.notified().await;
tokio::time::sleep(Duration::from_millis(10)).await; // give the other thread time to acquire the lock
temp_dir.write("file.txt", "update1");
signal3.notify_one();
signal4.notified().await;
drop(flag);
}
});
let signal5 = Arc::new(Notify::new());
tokio::spawn({
let lock_path = lock_path.clone();
let temp_dir = temp_dir.clone();
let signal5 = signal5.clone();
async move {
signal1.notified().await;
signal2.notify_one();
let flag = LaxSingleProcessFsFlag::lock(
&sys_traits::impls::RealSys,
lock_path.to_path_buf(),
&LogReporter,
"waiting",
)
.await;
temp_dir.write("file.txt", "update2");
signal5.notify_one();
drop(flag);
}
});
signal3.notified().await;
assert_eq!(temp_dir.read_to_string("file.txt"), "update1");
signal4.notify_one();
signal5.notified().await;
assert_eq!(temp_dir.read_to_string("file.txt"), "update2");
// ensure this is cleaned up
assert!(!lock_path.with_extension("lock.poll").exists())
}
#[tokio::test]
async fn lax_fs_lock_ordered() {
let temp_dir = TempDir::new();
let lock_path = temp_dir.path().join("file.lock");
let output_path = temp_dir.path().join("output");
let expected_order = Arc::new(Mutex::new(Vec::new()));
let count = 10;
let mut tasks = Vec::with_capacity(count);
std::fs::write(&output_path, "").unwrap();
for i in 0..count {
let lock_path = lock_path.clone();
let output_path = output_path.clone();
let expected_order = expected_order.clone();
tasks.push(tokio::spawn(async move {
let flag = LaxSingleProcessFsFlag::lock(
&sys_traits::impls::RealSys,
lock_path.to_path_buf(),
&LogReporter,
"waiting",
)
.await;
expected_order.lock().push(i.to_string());
// be extremely racy
let mut output = std::fs::read_to_string(&output_path).unwrap();
if !output.is_empty() {
output.push('\n');
}
output.push_str(&i.to_string());
std::fs::write(&output_path, output).unwrap();
drop(flag);
}));
}
futures::future::join_all(tasks).await;
let expected_output = expected_order.lock().join("\n");
assert_eq!(
std::fs::read_to_string(output_path).unwrap(),
expected_output
);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/npm_installer/bin_entries.rs | libs/npm_installer/bin_entries.rs | // Copyright 2018-2025 the Deno authors. MIT license.
mod windows_shim;
use std::borrow::Cow;
use std::collections::HashMap;
use std::collections::HashSet;
use std::collections::VecDeque;
use std::path::Path;
use std::path::PathBuf;
use deno_npm::NpmPackageExtraInfo;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_npm::resolution::NpmResolutionSnapshot;
use sys_traits::FsCreateDirAll;
use sys_traits::FsFileMetadata;
use sys_traits::FsFileSetPermissions;
use sys_traits::FsMetadata;
use sys_traits::FsMetadataValue;
use sys_traits::FsOpen;
use sys_traits::FsReadLink;
use sys_traits::FsRemoveFile;
use sys_traits::FsSymlinkFile;
use sys_traits::FsWrite;
/// Returns the name of the default binary for the given package.
/// This is the package name without the organization (`@org/`), if any.
fn default_bin_name(package: &NpmResolutionPackage) -> &str {
package
.id
.nv
.name
.as_str()
.rsplit_once('/')
.map(|(_, name)| name)
.unwrap_or(package.id.nv.name.as_str())
}
pub fn warn_missing_entrypoint(
bin_name: &str,
package_path: &Path,
entrypoint: &Path,
) {
log::warn!(
"{} Trying to set up '{}' bin for \"{}\", but the entry point \"{}\" doesn't exist.",
deno_terminal::colors::yellow("Warning"),
bin_name,
package_path.display(),
entrypoint.display()
);
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum BinEntriesError {
#[class(inherit)]
#[error("Creating '{path}'")]
Creating {
path: PathBuf,
#[source]
#[inherit]
source: std::io::Error,
},
#[class(inherit)]
#[error("Setting permissions on '{path}'")]
Permissions {
path: PathBuf,
#[source]
#[inherit]
source: std::io::Error,
},
#[class(inherit)]
#[error("Can't set up '{name}' bin at {path}")]
SetUpBin {
name: String,
path: PathBuf,
#[source]
#[inherit]
source: Box<Self>,
},
#[class(inherit)]
#[error("Setting permissions on '{path}'")]
RemoveBinSymlink {
path: PathBuf,
#[source]
#[inherit]
source: std::io::Error,
},
#[class(inherit)]
#[error(transparent)]
Io(#[from] std::io::Error),
}
pub struct BinEntries<'a, TSys: SetupBinEntrySys> {
/// Packages that have colliding bin names
collisions: HashSet<&'a NpmPackageId>,
seen_names: HashMap<String, &'a NpmPackageId>,
/// The bin entries
entries: Vec<(&'a NpmResolutionPackage, PathBuf, NpmPackageExtraInfo)>,
sorted: bool,
sys: &'a TSys,
}
impl<'a, TSys: SetupBinEntrySys> BinEntries<'a, TSys> {
pub fn new(sys: &'a TSys) -> Self {
Self {
collisions: Default::default(),
seen_names: Default::default(),
entries: Default::default(),
sorted: false,
sys,
}
}
/// Add a new bin entry (package with a bin field)
pub fn add<'b>(
&mut self,
package: &'a NpmResolutionPackage,
extra: &'b NpmPackageExtraInfo,
package_path: PathBuf,
) {
self.sorted = false;
// check for a new collision, if we haven't already
// found one
match extra.bin.as_ref().unwrap() {
deno_npm::registry::NpmPackageVersionBinEntry::String(_) => {
let bin_name = default_bin_name(package);
if let Some(other) =
self.seen_names.insert(bin_name.to_string(), &package.id)
{
self.collisions.insert(&package.id);
self.collisions.insert(other);
}
}
deno_npm::registry::NpmPackageVersionBinEntry::Map(entries) => {
for name in entries.keys() {
if let Some(other) =
self.seen_names.insert(name.to_string(), &package.id)
{
self.collisions.insert(&package.id);
self.collisions.insert(other);
}
}
}
}
self.entries.push((package, package_path, extra.clone()));
}
fn for_each_entry(
&mut self,
snapshot: &NpmResolutionSnapshot,
mut already_seen: impl FnMut(
&Path,
&str, // bin script
) -> Result<(), BinEntriesError>,
mut new: impl FnMut(
&NpmResolutionPackage,
&NpmPackageExtraInfo,
&Path,
&str, // bin name
&str, // bin script
) -> Result<(), BinEntriesError>,
mut filter: impl FnMut(&NpmResolutionPackage) -> bool,
) -> Result<(), BinEntriesError> {
if !self.collisions.is_empty() && !self.sorted {
// walking the dependency tree to find out the depth of each package
// is sort of expensive, so we only do it if there's a collision
sort_by_depth(snapshot, &mut self.entries, &mut self.collisions);
self.sorted = true;
}
let mut seen = HashSet::new();
for (package, package_path, extra) in &self.entries {
if !filter(package) {
continue;
}
if let Some(bin_entries) = &extra.bin {
match bin_entries {
deno_npm::registry::NpmPackageVersionBinEntry::String(script) => {
let name = default_bin_name(package);
if !seen.insert(name) {
already_seen(package_path, script)?;
// we already set up a bin entry with this name
continue;
}
new(package, extra, package_path, name, script)?;
}
deno_npm::registry::NpmPackageVersionBinEntry::Map(entries) => {
for (name, script) in entries {
if !seen.insert(name) {
already_seen(package_path, script)?;
// we already set up a bin entry with this name
continue;
}
new(package, extra, package_path, name, script)?;
}
}
}
}
}
Ok(())
}
/// Collect the bin entries into a vec of (name, script path)
pub fn collect_bin_files(
&mut self,
snapshot: &NpmResolutionSnapshot,
) -> Vec<(String, PathBuf)> {
let mut bins = Vec::new();
self
.for_each_entry(
snapshot,
|_, _| Ok(()),
|_, _, package_path, name, script| {
bins.push((name.to_string(), package_path.join(script)));
Ok(())
},
|_| true,
)
.unwrap();
bins
}
fn set_up_entries_filtered(
mut self,
snapshot: &NpmResolutionSnapshot,
bin_node_modules_dir_path: &Path,
filter: impl FnMut(&NpmResolutionPackage) -> bool,
mut handler: impl FnMut(&EntrySetupOutcome<'_>),
) -> Result<(), BinEntriesError> {
if !self.entries.is_empty()
&& !self.sys.fs_exists_no_err(bin_node_modules_dir_path)
{
self
.sys
.fs_create_dir_all(bin_node_modules_dir_path)
.map_err(|source| BinEntriesError::Creating {
path: bin_node_modules_dir_path.to_path_buf(),
source,
})?;
}
self.for_each_entry(
snapshot,
|_package_path, _script| {
if !sys_traits::impls::is_windows() {
let path = _package_path.join(_script);
make_executable_if_exists(self.sys, &path)?;
}
Ok(())
},
|package, extra, package_path, name, script| {
let outcome = set_up_bin_entry(
self.sys,
package,
extra,
name,
script,
package_path,
bin_node_modules_dir_path,
)?;
handler(&outcome);
Ok(())
},
filter,
)?;
Ok(())
}
/// Finish setting up the bin entries, writing the necessary files
/// to disk.
pub fn finish(
self,
snapshot: &NpmResolutionSnapshot,
bin_node_modules_dir_path: &Path,
handler: impl FnMut(&EntrySetupOutcome<'_>),
) -> Result<(), BinEntriesError> {
self.set_up_entries_filtered(
snapshot,
bin_node_modules_dir_path,
|_| true,
handler,
)
}
/// Finish setting up the bin entries, writing the necessary files
/// to disk.
pub fn finish_only(
self,
snapshot: &NpmResolutionSnapshot,
bin_node_modules_dir_path: &Path,
handler: impl FnMut(&EntrySetupOutcome<'_>),
only: &HashSet<&NpmPackageId>,
) -> Result<(), BinEntriesError> {
self.set_up_entries_filtered(
snapshot,
bin_node_modules_dir_path,
|package| only.contains(&package.id),
handler,
)
}
}
// walk the dependency tree to find out the depth of each package
// that has a bin entry, then sort them by depth
fn sort_by_depth(
snapshot: &NpmResolutionSnapshot,
bin_entries: &mut [(&NpmResolutionPackage, PathBuf, NpmPackageExtraInfo)],
collisions: &mut HashSet<&NpmPackageId>,
) {
enum Entry<'a> {
Pkg(&'a NpmPackageId),
IncreaseDepth,
}
let mut seen = HashSet::new();
let mut depths: HashMap<&NpmPackageId, u64> =
HashMap::with_capacity(collisions.len());
let mut queue = VecDeque::new();
queue.extend(snapshot.top_level_packages().map(Entry::Pkg));
seen.extend(snapshot.top_level_packages());
queue.push_back(Entry::IncreaseDepth);
let mut current_depth = 0u64;
while let Some(entry) = queue.pop_front() {
if collisions.is_empty() {
break;
}
let id = match entry {
Entry::Pkg(id) => id,
Entry::IncreaseDepth => {
current_depth += 1;
if queue.is_empty() {
break;
}
queue.push_back(Entry::IncreaseDepth);
continue;
}
};
if let Some(package) = snapshot.package_from_id(id) {
if collisions.remove(&package.id) {
depths.insert(&package.id, current_depth);
}
for dep in package.dependencies.values() {
if seen.insert(dep) {
queue.push_back(Entry::Pkg(dep));
}
}
}
}
bin_entries.sort_by(|(a, _, _), (b, _, _)| {
depths
.get(&a.id)
.unwrap_or(&u64::MAX)
.cmp(depths.get(&b.id).unwrap_or(&u64::MAX))
.then_with(|| a.id.nv.cmp(&b.id.nv).reverse())
});
}
#[sys_traits::auto_impl]
pub trait SetupBinEntrySys:
FsOpen
+ FsWrite
+ FsSymlinkFile
+ FsRemoveFile
+ FsCreateDirAll
+ FsMetadata
+ FsReadLink
{
}
pub fn set_up_bin_entry<'a>(
sys: &impl SetupBinEntrySys,
package: &'a NpmResolutionPackage,
extra: &'a NpmPackageExtraInfo,
bin_name: &'a str,
bin_script: &str,
package_path: &'a Path,
bin_node_modules_dir_path: &Path,
) -> Result<EntrySetupOutcome<'a>, BinEntriesError> {
if sys_traits::impls::is_windows() {
windows_shim::set_up_bin_shim(
sys,
package,
extra,
bin_name,
bin_script,
package_path,
bin_node_modules_dir_path,
)?;
Ok(EntrySetupOutcome::Success)
} else {
symlink_bin_entry(
sys,
package,
extra,
bin_name,
bin_script,
package_path,
bin_node_modules_dir_path,
)
}
}
/// Make the file at `path` executable if it exists.
/// Returns `true` if the file exists, `false` otherwise.
fn make_executable_if_exists(
sys: &impl FsOpen,
path: &Path,
) -> Result<bool, BinEntriesError> {
let mut open_options = sys_traits::OpenOptions::new();
open_options.read = true;
open_options.write = true;
open_options.truncate = false; // ensure false
let mut file = match sys.fs_open(path, &open_options) {
Ok(file) => file,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
return Ok(false);
}
Err(err) => return Err(err.into()),
};
let metadata = file.fs_file_metadata()?;
let mode = metadata.mode()?;
if mode & 0o111 == 0 {
// if the original file is not executable, make it executable
file
.fs_file_set_permissions(mode | 0o111)
.map_err(|source| BinEntriesError::Permissions {
path: path.to_path_buf(),
source,
})?;
}
Ok(true)
}
pub enum EntrySetupOutcome<'a> {
#[cfg_attr(windows, allow(dead_code))]
MissingEntrypoint {
bin_name: &'a str,
package_path: &'a Path,
entrypoint: PathBuf,
package: &'a NpmResolutionPackage,
extra: &'a NpmPackageExtraInfo,
},
Success,
}
impl EntrySetupOutcome<'_> {
pub fn warn_if_failed(&self) {
match self {
EntrySetupOutcome::MissingEntrypoint {
bin_name,
package_path,
entrypoint,
..
} => warn_missing_entrypoint(bin_name, package_path, entrypoint),
EntrySetupOutcome::Success => {}
}
}
}
fn relative_path(from: &Path, to: &Path) -> Option<PathBuf> {
pathdiff::diff_paths(to, from)
}
fn symlink_bin_entry<'a>(
sys: &(impl FsOpen + FsSymlinkFile + FsRemoveFile + FsReadLink),
package: &'a NpmResolutionPackage,
extra: &'a NpmPackageExtraInfo,
bin_name: &'a str,
bin_script: &str,
package_path: &'a Path,
bin_node_modules_dir_path: &Path,
) -> Result<EntrySetupOutcome<'a>, BinEntriesError> {
let link = bin_node_modules_dir_path.join(bin_name);
let original = package_path.join(bin_script);
let original_relative = relative_path(bin_node_modules_dir_path, &original)
.map(Cow::Owned)
.unwrap_or_else(|| Cow::Borrowed(&original));
if let Ok(original_link) = sys.fs_read_link(&link)
&& *original_link == *original_relative
{
return Ok(EntrySetupOutcome::Success);
}
let found = make_executable_if_exists(sys, &original).map_err(|source| {
BinEntriesError::SetUpBin {
name: bin_name.to_string(),
path: original.to_path_buf(),
source: Box::new(source),
}
})?;
if !found {
return Ok(EntrySetupOutcome::MissingEntrypoint {
bin_name,
package_path,
entrypoint: original,
package,
extra,
});
}
if let Err(err) = sys.fs_symlink_file(&*original_relative, &link) {
if err.kind() == std::io::ErrorKind::AlreadyExists {
// remove and retry
sys.fs_remove_file(&link).map_err(|source| {
BinEntriesError::RemoveBinSymlink {
path: link.clone(),
source,
}
})?;
sys
.fs_symlink_file(&*original_relative, &link)
.map_err(|source| BinEntriesError::SetUpBin {
name: bin_name.to_string(),
path: original_relative.to_path_buf(),
source: Box::new(source.into()),
})?;
return Ok(EntrySetupOutcome::Success);
}
return Err(BinEntriesError::SetUpBin {
name: bin_name.to_string(),
path: original_relative.to_path_buf(),
source: Box::new(err.into()),
});
}
Ok(EntrySetupOutcome::Success)
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/npm_installer/rt.rs | libs/npm_installer/rt.rs | // Copyright 2018-2025 the Deno authors. MIT license.
#[cfg(not(target_arch = "wasm32"))]
use deno_unsync::JoinHandle;
#[cfg(target_arch = "wasm32")]
pub type JoinHandle<T> =
std::future::Ready<Result<T, std::convert::Infallible>>;
pub fn spawn_blocking<
F: (FnOnce() -> R) + Send + 'static,
R: Send + 'static,
>(
f: F,
) -> JoinHandle<R> {
#[cfg(target_arch = "wasm32")]
{
let result = f();
std::future::ready(Ok(result))
}
#[cfg(not(target_arch = "wasm32"))]
{
deno_unsync::spawn_blocking(f)
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/npm_installer/initializer.rs | libs/npm_installer/initializer.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::path::PathBuf;
use std::sync::Arc;
use deno_error::JsError;
use deno_error::JsErrorBox;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_resolver::lockfile::LockfileLock;
use deno_resolver::lockfile::LockfileSys;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_resolver::workspace::WorkspaceNpmLinkPackagesRc;
use deno_unsync::sync::TaskQueue;
use parking_lot::Mutex;
use thiserror::Error;
#[derive(Debug, Clone)]
pub enum NpmResolverManagedSnapshotOption<TSys: LockfileSys> {
ResolveFromLockfile(Arc<LockfileLock<TSys>>),
Specified(Option<ValidSerializedNpmResolutionSnapshot>),
}
#[derive(Debug)]
enum SyncState<TSys: LockfileSys> {
Pending(Option<NpmResolverManagedSnapshotOption<TSys>>),
Err(ResolveSnapshotError),
Success,
}
#[derive(Debug)]
pub struct NpmResolutionInitializer<TSys: LockfileSys> {
npm_resolution: Arc<NpmResolutionCell>,
link_packages: WorkspaceNpmLinkPackagesRc,
queue: TaskQueue,
sync_state: Mutex<SyncState<TSys>>,
}
impl<TSys: LockfileSys> NpmResolutionInitializer<TSys> {
pub fn new(
npm_resolution: Arc<NpmResolutionCell>,
link_packages: WorkspaceNpmLinkPackagesRc,
snapshot_option: NpmResolverManagedSnapshotOption<TSys>,
) -> Self {
Self {
npm_resolution,
link_packages,
queue: Default::default(),
sync_state: Mutex::new(SyncState::Pending(Some(snapshot_option))),
}
}
#[cfg(debug_assertions)]
pub fn debug_assert_initialized(&self) {
if !matches!(*self.sync_state.lock(), SyncState::Success) {
panic!(
"debug assert: npm resolution must be initialized before calling this code"
);
}
}
pub async fn ensure_initialized(&self) -> Result<(), JsErrorBox> {
// fast exit if not pending
{
match &*self.sync_state.lock() {
SyncState::Pending(_) => {}
SyncState::Err(err) => return Err(JsErrorBox::from_err(err.clone())),
SyncState::Success => return Ok(()),
}
}
// only allow one task in here at a time
let _guard = self.queue.acquire().await;
let snapshot_option = {
let mut sync_state = self.sync_state.lock();
match &mut *sync_state {
SyncState::Pending(snapshot_option) => {
// this should never panic, but if it does it means that a
// previous future was dropped while initialization occurred...
// that should never happen because this is initialized during
// startup
snapshot_option.take().unwrap()
}
// another thread updated the state while we were waiting
SyncState::Err(resolve_snapshot_error) => {
return Err(JsErrorBox::from_err(resolve_snapshot_error.clone()));
}
SyncState::Success => {
return Ok(());
}
}
};
match resolve_snapshot(snapshot_option, &self.link_packages) {
Ok(maybe_snapshot) => {
if let Some(snapshot) = maybe_snapshot {
self
.npm_resolution
.set_snapshot(NpmResolutionSnapshot::new(snapshot.snapshot));
if snapshot.is_pending {
self.npm_resolution.mark_pending();
}
}
let mut sync_state = self.sync_state.lock();
*sync_state = SyncState::Success;
Ok(())
}
Err(err) => {
let mut sync_state = self.sync_state.lock();
*sync_state = SyncState::Err(err.clone());
Err(JsErrorBox::from_err(err))
}
}
}
}
#[derive(Debug, Error, Clone, JsError)]
#[error("failed reading lockfile '{}'", lockfile_path.display())]
#[class(inherit)]
pub struct ResolveSnapshotError {
lockfile_path: PathBuf,
#[inherit]
#[source]
source: SnapshotFromLockfileError,
}
#[allow(clippy::result_large_err)]
fn resolve_snapshot<TSys: LockfileSys>(
snapshot: NpmResolverManagedSnapshotOption<TSys>,
link_packages: &WorkspaceNpmLinkPackagesRc,
) -> Result<Option<SnapshotWithPending>, ResolveSnapshotError> {
match snapshot {
NpmResolverManagedSnapshotOption::ResolveFromLockfile(lockfile) => {
if !lockfile.overwrite() {
let snapshot = snapshot_from_lockfile(lockfile.clone(), link_packages)
.map_err(|source| ResolveSnapshotError {
lockfile_path: lockfile.filename.clone(),
source,
})?;
Ok(Some(snapshot))
} else {
Ok(None)
}
}
NpmResolverManagedSnapshotOption::Specified(maybe_snapshot) => {
Ok(maybe_snapshot.map(|snapshot| SnapshotWithPending {
snapshot,
is_pending: false,
}))
}
}
}
#[derive(Debug, Error, Clone, JsError)]
pub enum SnapshotFromLockfileError {
#[error(transparent)]
#[class(inherit)]
SnapshotFromLockfile(#[from] deno_npm::resolution::SnapshotFromLockfileError),
}
struct SnapshotWithPending {
snapshot: ValidSerializedNpmResolutionSnapshot,
is_pending: bool,
}
fn snapshot_from_lockfile<TSys: LockfileSys>(
lockfile: Arc<LockfileLock<TSys>>,
link_packages: &WorkspaceNpmLinkPackagesRc,
) -> Result<SnapshotWithPending, SnapshotFromLockfileError> {
let lockfile = lockfile.lock();
let snapshot = deno_npm::resolution::snapshot_from_lockfile(
deno_npm::resolution::SnapshotFromLockfileParams {
link_packages: &link_packages.0,
lockfile: &lockfile,
default_tarball_url: Default::default(),
},
)?;
Ok(SnapshotWithPending {
snapshot,
is_pending: lockfile.has_content_changed,
})
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/npm_installer/extra_info.rs | libs/npm_installer/extra_info.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::path::Path;
use std::sync::Arc;
use deno_error::JsErrorBox;
use deno_npm::NpmPackageExtraInfo;
use deno_npm::NpmResolutionPackage;
use deno_npm::registry::NpmRegistryApi;
use deno_resolver::workspace::WorkspaceNpmLinkPackagesRc;
use deno_semver::package::PackageNv;
use parking_lot::RwLock;
pub struct CachedNpmPackageExtraInfoProvider {
inner: Arc<NpmPackageExtraInfoProvider>,
cache: RwLock<rustc_hash::FxHashMap<PackageNv, NpmPackageExtraInfo>>,
}
impl CachedNpmPackageExtraInfoProvider {
pub fn new(inner: Arc<NpmPackageExtraInfoProvider>) -> Self {
Self {
inner,
cache: Default::default(),
}
}
pub async fn get_package_extra_info(
&self,
package_nv: &PackageNv,
package_path: &Path,
expected: ExpectedExtraInfo,
) -> Result<NpmPackageExtraInfo, JsErrorBox> {
if let Some(extra_info) = self.cache.read().get(package_nv) {
return Ok(extra_info.clone());
}
let extra_info = self
.inner
.get_package_extra_info(package_nv, package_path, expected)
.await?;
self
.cache
.write()
.insert(package_nv.clone(), extra_info.clone());
Ok(extra_info)
}
}
#[derive(Debug, Clone, Copy, Default)]
pub struct ExpectedExtraInfo {
pub deprecated: bool,
pub bin: bool,
pub scripts: bool,
}
impl ExpectedExtraInfo {
pub fn from_package(package: &NpmResolutionPackage) -> Self {
Self {
deprecated: package.is_deprecated,
bin: package.has_bin,
scripts: package.has_scripts,
}
}
}
#[sys_traits::auto_impl]
pub trait NpmPackageExtraInfoProviderSys:
sys_traits::BaseFsRead + Send + Sync
{
}
pub struct NpmPackageExtraInfoProvider {
npm_registry_info_provider: Arc<dyn NpmRegistryApi + Send + Sync>,
sys: Arc<dyn NpmPackageExtraInfoProviderSys>,
workspace_link_packages: WorkspaceNpmLinkPackagesRc,
}
impl std::fmt::Debug for NpmPackageExtraInfoProvider {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("NpmPackageExtraInfoProvider").finish()
}
}
impl NpmPackageExtraInfoProvider {
pub fn new(
npm_registry_info_provider: Arc<dyn NpmRegistryApi + Send + Sync>,
sys: Arc<dyn NpmPackageExtraInfoProviderSys>,
workspace_link_packages: WorkspaceNpmLinkPackagesRc,
) -> Self {
Self {
npm_registry_info_provider,
sys,
workspace_link_packages,
}
}
pub async fn get_package_extra_info(
&self,
package_nv: &PackageNv,
package_path: &Path,
expected: ExpectedExtraInfo,
) -> Result<NpmPackageExtraInfo, JsErrorBox> {
if expected.deprecated {
// we need the registry version info to get the deprecated string, since it's not in the
// package's package.json
self.fetch_from_registry(package_nv).await
} else {
match self.fetch_from_package_json(package_path).await {
Ok(extra_info) => {
// some packages that use "directories.bin" have a "bin" entry in
// the packument, but not in package.json (e.g. esbuild-wasm)
if (expected.bin && extra_info.bin.is_none())
|| (expected.scripts && extra_info.scripts.is_empty())
{
self.fetch_from_registry(package_nv).await
} else {
Ok(extra_info)
}
}
Err(err) => {
log::debug!(
"failed to get extra info for {} from package.json at {}: {}",
package_nv,
package_path.join("package.json").display(),
err
);
self.fetch_from_registry(package_nv).await
}
}
}
}
async fn fetch_from_registry(
&self,
package_nv: &PackageNv,
) -> Result<NpmPackageExtraInfo, JsErrorBox> {
let mut package_info = self
.npm_registry_info_provider
.package_info(&package_nv.name)
.await
.map_err(JsErrorBox::from_err)?;
let version_info = match package_info
.version_info(package_nv, &self.workspace_link_packages.0)
{
Ok(version_info) => version_info,
Err(deno_npm::resolution::NpmPackageVersionNotFound { .. }) => {
// Don't bother checking the return value of mark_force_reload to tell
// whether to reload because we could race here with another task within
// this method. That said, ideally this code would only reload the
// specific packument that's out of date to be a bit more efficient.
self.npm_registry_info_provider.mark_force_reload();
package_info = self
.npm_registry_info_provider
.package_info(&package_nv.name)
.await
.map_err(JsErrorBox::from_err)?;
package_info
.version_info(package_nv, &self.workspace_link_packages.0)
.map_err(JsErrorBox::from_err)?
}
};
Ok(NpmPackageExtraInfo {
deprecated: version_info.deprecated.clone(),
bin: version_info.bin.clone(),
scripts: version_info.scripts.clone(),
})
}
async fn fetch_from_package_json(
&self,
package_path: &Path,
) -> Result<NpmPackageExtraInfo, JsErrorBox> {
let package_json_path = package_path.join("package.json");
let sys = self.sys.clone();
let extra_info: NpmPackageExtraInfo =
crate::rt::spawn_blocking(move || {
let package_json = sys
.base_fs_read(&package_json_path)
.map_err(JsErrorBox::from_err)?;
let extra_info: NpmPackageExtraInfo =
serde_json::from_slice(&package_json)
.map_err(JsErrorBox::from_err)?;
Ok::<_, JsErrorBox>(extra_info)
})
.await
.map_err(JsErrorBox::from_err)??;
Ok(extra_info)
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/npm_installer/graph.rs | libs/npm_installer/graph.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::sync::Arc;
use deno_error::JsErrorBox;
use deno_graph::NpmLoadError;
use deno_graph::NpmResolvePkgReqsResult;
use deno_npm::resolution::NpmResolutionError;
use deno_npm_cache::NpmCacheHttpClient;
use deno_resolver::graph::FoundPackageJsonDepFlag;
use deno_semver::package::PackageReq;
use crate::NpmInstaller;
use crate::NpmInstallerSys;
use crate::PackageCaching;
#[derive(Debug, Clone, Copy)]
pub enum NpmCachingStrategy {
Eager,
Lazy,
Manual,
}
impl NpmCachingStrategy {
pub fn as_package_caching<'a>(
&self,
package_reqs: &'a [PackageReq],
) -> Option<PackageCaching<'a>> {
match self {
NpmCachingStrategy::Eager => Some(PackageCaching::All),
NpmCachingStrategy::Lazy => {
Some(PackageCaching::Only(Cow::Borrowed(package_reqs)))
}
NpmCachingStrategy::Manual => None,
}
}
}
#[derive(Debug)]
pub struct NpmDenoGraphResolver<
TNpmCacheHttpClient: NpmCacheHttpClient,
TSys: NpmInstallerSys,
> {
npm_installer: Option<Arc<NpmInstaller<TNpmCacheHttpClient, TSys>>>,
found_package_json_dep_flag: Arc<FoundPackageJsonDepFlag>,
npm_caching: NpmCachingStrategy,
}
impl<TNpmCacheHttpClient: NpmCacheHttpClient, TSys: NpmInstallerSys>
NpmDenoGraphResolver<TNpmCacheHttpClient, TSys>
{
pub fn new(
npm_installer: Option<Arc<NpmInstaller<TNpmCacheHttpClient, TSys>>>,
found_package_json_dep_flag: Arc<FoundPackageJsonDepFlag>,
npm_caching: NpmCachingStrategy,
) -> Self {
Self {
npm_installer,
found_package_json_dep_flag,
npm_caching,
}
}
}
#[async_trait::async_trait(?Send)]
impl<TNpmCacheHttpClient: NpmCacheHttpClient, TSys: NpmInstallerSys>
deno_graph::source::NpmResolver
for NpmDenoGraphResolver<TNpmCacheHttpClient, TSys>
{
fn load_and_cache_npm_package_info(&self, package_name: &str) {
// ok not to do this in Wasm because this is just an optimization
#[cfg(target_arch = "wasm32")]
{
_ = package_name;
}
#[cfg(not(target_arch = "wasm32"))]
{
if let Some(npm_installer) = &self.npm_installer {
let npm_installer = npm_installer.clone();
let package_name = package_name.to_string();
deno_unsync::spawn(async move {
let _ignore = npm_installer.cache_package_info(&package_name).await;
});
}
}
}
async fn resolve_pkg_reqs(
&self,
package_reqs: &[PackageReq],
) -> NpmResolvePkgReqsResult {
match &self.npm_installer {
Some(npm_installer) => {
let top_level_result = if self.found_package_json_dep_flag.is_raised() {
npm_installer
.ensure_top_level_package_json_install()
.await
.map(|_| ())
} else {
Ok(())
};
let result = npm_installer
.add_package_reqs_raw(
package_reqs,
self.npm_caching.as_package_caching(package_reqs),
)
.await;
NpmResolvePkgReqsResult {
results: result
.results
.into_iter()
.map(|r| {
r.map_err(|err| match err {
NpmResolutionError::Registry(e) => {
NpmLoadError::RegistryInfo(Arc::new(e))
}
NpmResolutionError::Resolution(e) => {
NpmLoadError::PackageReqResolution(Arc::new(e))
}
NpmResolutionError::DependencyEntry(e) => {
NpmLoadError::PackageReqResolution(Arc::new(e))
}
})
})
.collect(),
dep_graph_result: match top_level_result {
Ok(()) => result
.dependencies_result
.map_err(|e| Arc::new(e) as Arc<dyn deno_error::JsErrorClass>),
Err(err) => Err(Arc::new(err)),
},
}
}
None => {
if package_reqs.is_empty() {
NpmResolvePkgReqsResult {
results: Default::default(),
dep_graph_result: Ok(()),
}
} else {
let err =
Arc::new(JsErrorBox::from_err(deno_resolver::npm::NoNpmError));
NpmResolvePkgReqsResult {
results: package_reqs
.iter()
.map(|_| Err(NpmLoadError::RegistryInfo(err.clone())))
.collect(),
dep_graph_result: Err(err),
}
}
}
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/npm_installer/factory.rs | libs/npm_installer/factory.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::sync::Arc;
use deno_npm::resolution::PackageIdNotFoundError;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm_cache::NpmCache;
use deno_npm_cache::NpmCacheHttpClient;
use deno_npm_cache::NpmCacheSetting;
use deno_npm_cache::RegistryInfoProvider;
use deno_npm_cache::TarballCache;
use deno_resolver::factory::ResolverFactory;
use deno_resolver::factory::WorkspaceFactory;
use deno_resolver::factory::WorkspaceFactorySys;
use deno_resolver::lockfile::LockfileLock;
use deno_resolver::lockfile::LockfileNpmPackageInfoApiAdapter;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageKind;
use deno_semver::package::PackageReq;
use futures::FutureExt;
use crate::LifecycleScriptsConfig;
use crate::NpmInstaller;
use crate::NpmInstallerOptions;
use crate::Reporter;
use crate::graph::NpmCachingStrategy;
use crate::graph::NpmDenoGraphResolver;
use crate::initializer::NpmResolutionInitializer;
use crate::initializer::NpmResolverManagedSnapshotOption;
use crate::lifecycle_scripts::LifecycleScriptsExecutor;
use crate::package_json::NpmInstallDepsProvider;
use crate::resolution::HasJsExecutionStartedFlagRc;
use crate::resolution::NpmResolutionInstaller;
// todo(https://github.com/rust-lang/rust/issues/109737): remove once_cell after get_or_try_init is stabilized
type Deferred<T> = once_cell::sync::OnceCell<T>;
#[sys_traits::auto_impl]
pub trait NpmInstallerFactorySys:
crate::NpmInstallerSys + WorkspaceFactorySys
{
}
type ResolveNpmResolutionSnapshotFn = Box<
dyn Fn() -> Result<
Option<ValidSerializedNpmResolutionSnapshot>,
PackageIdNotFoundError,
>,
>;
pub struct NpmInstallerFactoryOptions {
pub cache_setting: NpmCacheSetting,
pub caching_strategy: NpmCachingStrategy,
pub lifecycle_scripts_config: LifecycleScriptsConfig,
/// Resolves the npm resolution snapshot from the environment.
pub resolve_npm_resolution_snapshot: ResolveNpmResolutionSnapshotFn,
}
pub trait InstallReporter:
deno_npm::resolution::Reporter
+ deno_graph::source::Reporter
+ deno_npm_cache::TarballCacheReporter
+ crate::InstallProgressReporter
{
}
impl<
T: deno_npm::resolution::Reporter
+ deno_graph::source::Reporter
+ deno_npm_cache::TarballCacheReporter
+ crate::InstallProgressReporter,
> InstallReporter for T
{
}
pub struct NpmInstallerFactory<
TNpmCacheHttpClient: NpmCacheHttpClient,
TReporter: Reporter,
TSys: NpmInstallerFactorySys,
> {
resolver_factory: Arc<ResolverFactory<TSys>>,
has_js_execution_started_flag: HasJsExecutionStartedFlagRc,
http_client: Arc<TNpmCacheHttpClient>,
lifecycle_scripts_config: Deferred<Arc<LifecycleScriptsConfig>>,
lifecycle_scripts_executor: Arc<dyn LifecycleScriptsExecutor>,
reporter: TReporter,
lockfile_npm_package_info_provider:
Deferred<LockfileNpmPackageInfoApiAdapter>,
npm_cache: Deferred<Arc<NpmCache<TSys>>>,
npm_deno_graph_resolver: async_once_cell::OnceCell<
Arc<NpmDenoGraphResolver<TNpmCacheHttpClient, TSys>>,
>,
npm_installer:
async_once_cell::OnceCell<Arc<NpmInstaller<TNpmCacheHttpClient, TSys>>>,
npm_resolution_initializer:
async_once_cell::OnceCell<Arc<NpmResolutionInitializer<TSys>>>,
npm_resolution_installer: async_once_cell::OnceCell<
Arc<NpmResolutionInstaller<TNpmCacheHttpClient, TSys>>,
>,
registry_info_provider:
Deferred<Arc<RegistryInfoProvider<TNpmCacheHttpClient, TSys>>>,
tarball_cache: Deferred<Arc<TarballCache<TNpmCacheHttpClient, TSys>>>,
options: NpmInstallerFactoryOptions,
install_reporter: Option<Arc<dyn InstallReporter + 'static>>,
}
impl<
TNpmCacheHttpClient: NpmCacheHttpClient,
TReporter: Reporter,
TSys: NpmInstallerFactorySys,
> NpmInstallerFactory<TNpmCacheHttpClient, TReporter, TSys>
{
pub fn new(
resolver_factory: Arc<ResolverFactory<TSys>>,
http_client: Arc<TNpmCacheHttpClient>,
lifecycle_scripts_executor: Arc<dyn LifecycleScriptsExecutor>,
reporter: TReporter,
install_reporter: Option<Arc<dyn InstallReporter + 'static>>,
options: NpmInstallerFactoryOptions,
) -> Self {
Self {
resolver_factory,
has_js_execution_started_flag: Default::default(),
http_client,
lifecycle_scripts_config: Default::default(),
lifecycle_scripts_executor,
reporter,
lockfile_npm_package_info_provider: Default::default(),
npm_cache: Default::default(),
npm_deno_graph_resolver: Default::default(),
npm_installer: Default::default(),
npm_resolution_initializer: Default::default(),
npm_resolution_installer: Default::default(),
registry_info_provider: Default::default(),
tarball_cache: Default::default(),
install_reporter,
options,
}
}
pub fn has_js_execution_started_flag(&self) -> &HasJsExecutionStartedFlagRc {
&self.has_js_execution_started_flag
}
pub fn http_client(&self) -> &Arc<TNpmCacheHttpClient> {
&self.http_client
}
pub async fn initialize_npm_resolution_if_managed(
&self,
) -> Result<(), anyhow::Error> {
let npm_resolver = self.resolver_factory().npm_resolver()?;
if npm_resolver.is_managed() {
self
.npm_resolution_initializer()
.await?
.ensure_initialized()
.await?;
}
Ok(())
}
pub fn lifecycle_scripts_config(
&self,
) -> Result<&Arc<LifecycleScriptsConfig>, anyhow::Error> {
use crate::PackagesAllowedScripts;
fn jsr_deps_to_reqs(deps: Vec<JsrDepPackageReq>) -> Vec<PackageReq> {
deps
.into_iter()
.filter_map(|p| {
if p.kind == PackageKind::Npm {
Some(p.req)
} else {
None
}
})
.collect::<Vec<_>>()
}
self.lifecycle_scripts_config.get_or_try_init(|| {
let workspace_factory = self.workspace_factory();
let workspace = &workspace_factory.workspace_directory()?.workspace;
let allow_scripts = workspace.allow_scripts()?;
let args = &self.options.lifecycle_scripts_config;
Ok(Arc::new(LifecycleScriptsConfig {
allowed: match &args.allowed {
PackagesAllowedScripts::All => PackagesAllowedScripts::All,
PackagesAllowedScripts::Some(package_reqs) => {
PackagesAllowedScripts::Some(package_reqs.clone())
}
PackagesAllowedScripts::None => match allow_scripts.allow {
deno_config::deno_json::AllowScriptsValueConfig::All => {
PackagesAllowedScripts::All
}
deno_config::deno_json::AllowScriptsValueConfig::Limited(deps) => {
let reqs = jsr_deps_to_reqs(deps);
if reqs.is_empty() {
PackagesAllowedScripts::None
} else {
PackagesAllowedScripts::Some(reqs)
}
}
},
},
denied: match &args.allowed {
PackagesAllowedScripts::All | PackagesAllowedScripts::Some(_) => {
vec![]
}
PackagesAllowedScripts::None => jsr_deps_to_reqs(allow_scripts.deny),
},
initial_cwd: args.initial_cwd.clone(),
root_dir: args.root_dir.clone(),
explicit_install: args.explicit_install,
}))
})
}
pub fn lockfile_npm_package_info_provider(
&self,
) -> Result<&LockfileNpmPackageInfoApiAdapter, anyhow::Error> {
self.lockfile_npm_package_info_provider.get_or_try_init(|| {
Ok(LockfileNpmPackageInfoApiAdapter::new(
self.registry_info_provider()?.clone(),
self
.workspace_factory()
.workspace_npm_link_packages()?
.clone(),
))
})
}
pub async fn maybe_lockfile(
&self,
) -> Result<Option<&Arc<LockfileLock<TSys>>>, anyhow::Error> {
let workspace_factory = self.workspace_factory();
let package_info_provider = self.lockfile_npm_package_info_provider()?;
workspace_factory
.maybe_lockfile(package_info_provider)
.await
}
pub fn npm_cache(&self) -> Result<&Arc<NpmCache<TSys>>, anyhow::Error> {
self.npm_cache.get_or_try_init(|| {
let workspace_factory = self.workspace_factory();
Ok(Arc::new(NpmCache::new(
workspace_factory.npm_cache_dir()?.clone(),
workspace_factory.sys().clone(),
self.options.cache_setting.clone(),
workspace_factory.npmrc()?.clone(),
)))
})
}
pub async fn npm_deno_graph_resolver(
&self,
) -> Result<
&Arc<NpmDenoGraphResolver<TNpmCacheHttpClient, TSys>>,
anyhow::Error,
> {
self
.npm_deno_graph_resolver
.get_or_try_init(
async {
Ok(Arc::new(NpmDenoGraphResolver::new(
self.npm_installer_if_managed().await?.cloned(),
self
.resolver_factory()
.found_package_json_dep_flag()
.clone(),
self.options.caching_strategy,
)))
}
.boxed_local(),
)
.await
}
pub async fn npm_resolution_initializer(
&self,
) -> Result<&Arc<NpmResolutionInitializer<TSys>>, anyhow::Error> {
self
.npm_resolution_initializer
.get_or_try_init(async move {
let workspace_factory = self.workspace_factory();
Ok(Arc::new(NpmResolutionInitializer::new(
self.resolver_factory.npm_resolution().clone(),
workspace_factory.workspace_npm_link_packages()?.clone(),
match (self.options.resolve_npm_resolution_snapshot)()? {
Some(snapshot) => {
NpmResolverManagedSnapshotOption::Specified(Some(snapshot))
}
None => match self.maybe_lockfile().await? {
Some(lockfile) => {
NpmResolverManagedSnapshotOption::ResolveFromLockfile(
lockfile.clone(),
)
}
None => NpmResolverManagedSnapshotOption::Specified(None),
},
},
)))
})
.await
}
pub async fn npm_resolution_installer(
&self,
) -> Result<
&Arc<NpmResolutionInstaller<TNpmCacheHttpClient, TSys>>,
anyhow::Error,
> {
self
.npm_resolution_installer
.get_or_try_init(async move {
Ok(Arc::new(NpmResolutionInstaller::new(
self.has_js_execution_started_flag.clone(),
self.resolver_factory.npm_version_resolver()?.clone(),
self.registry_info_provider()?.clone(),
self
.install_reporter
.as_ref()
.map(|r| r.clone() as Arc<dyn deno_npm::resolution::Reporter>),
self.resolver_factory.npm_resolution().clone(),
self.maybe_lockfile().await?.cloned(),
)))
})
.await
}
pub async fn npm_installer_if_managed(
&self,
) -> Result<
Option<&Arc<NpmInstaller<TNpmCacheHttpClient, TSys>>>,
anyhow::Error,
> {
if self.resolver_factory().use_byonm()? || self.workspace_factory().no_npm()
{
Ok(None)
} else {
Ok(Some(self.npm_installer().await?))
}
}
pub async fn npm_installer(
&self,
) -> Result<&Arc<NpmInstaller<TNpmCacheHttpClient, TSys>>, anyhow::Error> {
self
.npm_installer
.get_or_try_init(
async move {
let workspace_factory = self.workspace_factory();
let npm_cache = self.npm_cache()?;
let registry_info_provider = self.registry_info_provider()?;
let workspace_npm_link_packages =
workspace_factory.workspace_npm_link_packages()?;
Ok(Arc::new(NpmInstaller::new(
self.install_reporter.clone(),
self.lifecycle_scripts_executor.clone(),
npm_cache.clone(),
Arc::new(NpmInstallDepsProvider::from_workspace(
&workspace_factory.workspace_directory()?.workspace,
)),
registry_info_provider.clone(),
self.resolver_factory.npm_resolution().clone(),
self.npm_resolution_initializer().await?.clone(),
self.npm_resolution_installer().await?.clone(),
&self.reporter,
workspace_factory.sys().clone(),
self.tarball_cache()?.clone(),
NpmInstallerOptions {
maybe_lockfile: self.maybe_lockfile().await?.cloned(),
maybe_node_modules_path: workspace_factory
.node_modules_dir_path()?
.map(|p| p.to_path_buf()),
lifecycle_scripts: self.lifecycle_scripts_config()?.clone(),
system_info: self.resolver_factory.npm_system_info().clone(),
workspace_link_packages: workspace_npm_link_packages.clone(),
},
)))
}
.boxed_local(),
)
.await
}
pub fn registry_info_provider(
&self,
) -> Result<
&Arc<RegistryInfoProvider<TNpmCacheHttpClient, TSys>>,
anyhow::Error,
> {
self.registry_info_provider.get_or_try_init(|| {
Ok(Arc::new(RegistryInfoProvider::new(
self.npm_cache()?.clone(),
self.http_client().clone(),
self.workspace_factory().npmrc()?.clone(),
)))
})
}
pub fn tarball_cache(
&self,
) -> Result<&Arc<TarballCache<TNpmCacheHttpClient, TSys>>, anyhow::Error> {
self.tarball_cache.get_or_try_init(|| {
let workspace_factory = self.workspace_factory();
Ok(Arc::new(TarballCache::new(
self.npm_cache()?.clone(),
self.http_client.clone(),
workspace_factory.sys().clone(),
workspace_factory.npmrc()?.clone(),
self
.install_reporter
.as_ref()
.map(|r| r.clone() as Arc<dyn deno_npm_cache::TarballCacheReporter>),
)))
})
}
pub fn resolver_factory(&self) -> &Arc<ResolverFactory<TSys>> {
&self.resolver_factory
}
pub fn workspace_factory(&self) -> &Arc<WorkspaceFactory<TSys>> {
self.resolver_factory.workspace_factory()
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/npm_installer/process_state.rs | libs/npm_installer/process_state.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::ffi::OsStr;
use std::ffi::OsString;
use std::io::ErrorKind;
use std::io::Read;
use std::io::Seek;
use std::path::Path;
use std::path::PathBuf;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use serde::Deserialize;
use serde::Serialize;
use sys_traits::BoxableFsFile;
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum NpmProcessStateKind {
Snapshot(deno_npm::resolution::SerializedNpmResolutionSnapshot),
Byonm,
}
#[sys_traits::auto_impl]
pub trait NpmProcessStateFromEnvVarSys: sys_traits::FsOpen {}
/// The serialized npm process state which can be written to a file and then
/// the FD or path can be passed to a spawned deno process via the
/// `DENO_DONT_USE_INTERNAL_NODE_COMPAT_STATE_FD` env var.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct NpmProcessState {
pub kind: NpmProcessStateKind,
pub local_node_modules_path: Option<String>,
}
impl NpmProcessState {
pub fn new_managed(
snapshot: ValidSerializedNpmResolutionSnapshot,
node_modules_path: Option<&Path>,
) -> Self {
NpmProcessState {
kind: NpmProcessStateKind::Snapshot(snapshot.into_serialized()),
local_node_modules_path: node_modules_path
.map(|p| p.to_string_lossy().into_owned()),
}
}
pub fn new_local(
snapshot: ValidSerializedNpmResolutionSnapshot,
node_modules_path: &Path,
) -> Self {
NpmProcessState::new_managed(snapshot, Some(node_modules_path))
}
pub fn from_env_var(
sys: &impl NpmProcessStateFromEnvVarSys,
value: OsString,
) -> std::io::Result<Self> {
/// Allows for passing either a file descriptor or file path.
enum FdOrPath {
Fd(usize),
Path(PathBuf),
}
impl FdOrPath {
pub fn parse(value: &OsStr) -> Self {
match value.to_string_lossy().parse::<usize>() {
Ok(value) => FdOrPath::Fd(value),
Err(_) => FdOrPath::Path(PathBuf::from(value)),
}
}
pub fn open(
&self,
sys: &impl NpmProcessStateFromEnvVarSys,
) -> std::io::Result<sys_traits::boxed::BoxedFsFile> {
match self {
FdOrPath::Fd(fd) => {
#[cfg(target_arch = "wasm32")]
{
let _fd = fd;
return Err(std::io::Error::new(
ErrorKind::Unsupported,
"Cannot pass fd for npm process state to Wasm. Use a file path instead.",
));
}
#[cfg(all(unix, not(target_arch = "wasm32")))]
return Ok(
// SAFETY: Assume valid file descriptor
unsafe {
sys_traits::impls::RealFsFile::from_raw(
<std::fs::File as std::os::unix::io::FromRawFd>::from_raw_fd(
*fd as _,
),
)
.into_boxed()
},
);
#[cfg(windows)]
Ok(
// SAFETY: Assume valid file descriptor
unsafe {
sys_traits::impls::RealFsFile::from_raw(<std::fs::File as std::os::windows::io::FromRawHandle>::from_raw_handle(*fd as _)).into_boxed()
},
)
}
FdOrPath::Path(path) => Ok(
sys
.fs_open(path, &sys_traits::OpenOptions::new_read())?
.into_boxed(),
),
}
}
}
let fd_or_path = FdOrPath::parse(&value);
let mut file = fd_or_path.open(sys)?;
let mut buf = Vec::new();
// seek to beginning. after the file is written the position will be inherited by this subprocess,
// and also this file might have been read before
file.seek(std::io::SeekFrom::Start(0))?;
file.read_to_end(&mut buf).map_err(|err| {
std::io::Error::new(
err.kind(),
format!(
"failed to reading from {}: {}",
match fd_or_path {
FdOrPath::Fd(fd) => format!("fd {}", fd),
FdOrPath::Path(path) => path.display().to_string(),
},
err,
),
)
})?;
let state: NpmProcessState =
serde_json::from_slice(&buf).map_err(|err| {
std::io::Error::new(
ErrorKind::InvalidData,
format!(
"failed to deserialize npm process state: {}\n{}",
err,
String::from_utf8_lossy(&buf)
),
)
})?;
Ok(state)
}
pub fn as_serialized(&self) -> String {
serde_json::to_string(self).unwrap()
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/npm_installer/bin_entries/windows_shim.rs | libs/npm_installer/bin_entries/windows_shim.rs | // Copyright 2018-2025 the Deno authors. MIT license.
// Windows shim generation ported from https://github.com/npm/cmd-shim
// Original code licensed under the ISC License:
//
// The ISC License
//
// Copyright (c) npm, Inc. and Contributors
//
// Permission to use, copy, modify, and/or distribute this software for any
// purpose with or without fee is hereby granted, provided that the above
// copyright notice and this permission notice appear in all copies.
//
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
// IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
use std::io::BufRead;
use std::path::Path;
use std::path::PathBuf;
use sys_traits::FsMetadata;
use sys_traits::FsOpen;
use sys_traits::FsWrite;
use crate::BinEntriesError;
use crate::bin_entries::EntrySetupOutcome;
use crate::bin_entries::relative_path;
// note: parts of logic and pretty much all of the shims ported from https://github.com/npm/cmd-shim
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub(crate) struct Shebang {
pub program: String,
pub args: String,
pub vars: String,
}
fn parse_shebang(s: &str) -> Option<Shebang> {
let s = s.trim();
if !s.starts_with("#!") {
return None;
}
// lifted from npm/cmd-shim
let regex = lazy_regex::regex!(
r"^#!\s*(?:/usr/bin/env\s+(?:-S\s+)?((?:[^ \t=]+=[^ \t=]+\s+)*))?([^ \t\r\n]+)(.*)$"
);
let captures = regex.captures(s)?;
Some(Shebang {
vars: captures
.get(1)
.map(|m| m.as_str().to_string())
.unwrap_or_default(),
program: captures.get(2)?.as_str().to_string(),
args: captures
.get(3)
.map(|m| m.as_str().trim().to_string())
.unwrap_or_default(),
})
}
fn resolve_shebang(sys: &impl FsOpen, path: &Path) -> Option<Shebang> {
let file = sys
.fs_open(path, sys_traits::OpenOptions::new().read())
.ok()?;
let mut reader = std::io::BufReader::new(file);
let mut line = String::new();
reader.read_line(&mut line).ok()?;
parse_shebang(&line)
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct ShimData {
pub shebang: Option<Shebang>,
pub target: String,
}
impl ShimData {
pub fn new(target: impl Into<String>, shebang: Option<Shebang>) -> Self {
Self {
shebang,
target: target.into().replace('\\', "/"),
}
}
fn target_win(&self) -> String {
self.target.replace('/', "\\")
}
// these are all kinda hard to read, look at the unit tests to see what they generate
pub fn generate_cmd(&self) -> String {
let target_win = self.target_win();
let shebang_data = self.shebang.as_ref().map(
|Shebang {
program,
args,
vars,
}| (program.replace('\\', "/"), args.as_str(), vars.as_str()),
);
capacity_builder::StringBuilder::build(|builder| {
builder.append("@ECHO off\r\n");
builder.append("GOTO start\r\n");
builder.append(":find_dp0\r\n");
builder.append("SET dp0=%~dp0\r\n");
builder.append("EXIT /b\r\n");
builder.append(":start\r\n");
builder.append("SETLOCAL\r\n");
builder.append("CALL :find_dp0\r\n");
match &shebang_data {
None => {
builder.append("\"%dp0%\\");
builder.append(&target_win);
builder.append("\" %*\r\n");
}
Some((prog, args, vars)) => {
for var in vars.split_whitespace().filter(|v| v.contains('=')) {
builder.append("SET ");
builder.append(var);
builder.append("\r\n");
}
builder.append("\r\n");
builder.append("IF EXIST \"%dp0%\\");
builder.append(prog);
builder.append(".exe\" (\r\n");
builder.append(" SET \"_prog=%dp0%\\");
builder.append(prog);
builder.append(".exe\"\r\n");
builder.append(") ELSE (\r\n");
builder.append(" SET \"_prog=");
builder.append(prog);
builder.append("\"\r\n");
builder.append(" SET PATHEXT=%PATHEXT:;.JS;=;%\r\n");
builder.append(")\r\n");
builder.append("\r\n");
builder.append("endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & \"%_prog%\" ");
builder.append(*args);
builder.append(" \"%dp0%\\");
builder.append(&target_win);
builder.append("\" %*\r\n");
}
}
})
.unwrap()
}
// these are all kinda hard to read, look at the unit tests to see what they generate
pub fn generate_sh(&self) -> String {
let shebang_data = self.shebang.as_ref().map(
|Shebang {
program,
args,
vars,
}| (program.replace('\\', "/"), args.as_str(), vars.as_str()),
);
capacity_builder::StringBuilder::build(|builder| {
builder.append("#!/bin/sh\n");
builder.append(
"basedir=$(dirname \"$(echo \"$0\" | sed -e 's,\\\\,/,g')\")\n",
);
builder.append("\n");
builder.append("case `uname` in\n");
builder.append(" *CYGWIN*|*MINGW*|*MSYS*)\n");
builder.append(" if command -v cygpath > /dev/null 2>&1; then\n");
builder.append(" basedir=`cygpath -w \"$basedir\"`\n");
builder.append(" fi\n");
builder.append(" ;;\n");
builder.append("esac\n");
builder.append("\n");
match &shebang_data {
None => {
builder.append("exec \"$basedir/");
builder.append(&self.target);
builder.append("\" \"$@\"\n");
}
Some((prog, args, vars)) => {
builder.append("if [ -x \"$basedir/");
builder.append(prog);
builder.append("\" ]; then\n");
builder.append(" exec ");
builder.append(*vars);
builder.append("\"$basedir/");
builder.append(prog);
builder.append("\" ");
builder.append(*args);
builder.append(" \"$basedir/");
builder.append(&self.target);
builder.append("\" \"$@\"\n");
builder.append("else\n");
builder.append(" exec ");
builder.append(*vars);
builder.append(prog);
builder.append(" ");
builder.append(*args);
builder.append(" \"$basedir/");
builder.append(&self.target);
builder.append("\" \"$@\"\n");
builder.append("fi\n");
}
}
})
.unwrap()
}
// these are all kinda hard to read, look at the unit tests to see what they generate
pub fn generate_pwsh(&self) -> String {
let shebang_data = self.shebang.as_ref().map(
|Shebang {
program,
args,
vars: _,
}| (program.replace('\\', "/"), args.as_str()),
);
capacity_builder::StringBuilder::build(|builder| {
builder.append("#!/usr/bin/env pwsh\n");
builder.append(
"$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent\n",
);
builder.append("\n");
builder.append("$exe=\"\"\n");
builder.append(
"if ($PSVersionTable.PSVersion -lt \"6.0\" -or $IsWindows) {\n",
);
builder.append(" $exe=\".exe\"\n");
builder.append("}\n");
match &shebang_data {
None => {
builder.append(" if ($MyInvocation.ExpectingInput) {\n");
builder.append(" $input | & \"$basedir/");
builder.append(&self.target);
builder.append("\" $args\n");
builder.append(" } else {\n");
builder.append(" & \"$basedir/");
builder.append(&self.target);
builder.append("\" $args\n");
builder.append(" }\n");
builder.append("exit $LASTEXITCODE\n");
}
Some((prog, args)) => {
builder.append("$ret=0\n");
builder.append("if (Test-Path \"$basedir/");
builder.append(prog);
builder.append("$exe\") {\n");
builder.append(" if ($MyInvocation.ExpectingInput) {\n");
builder.append(" $input | & \"$basedir/");
builder.append(prog);
builder.append("$exe\" ");
builder.append(*args);
builder.append(" \"$basedir/");
builder.append(&self.target);
builder.append("\" $args\n");
builder.append(" } else {\n");
builder.append(" & \"$basedir/");
builder.append(prog);
builder.append("$exe\" ");
builder.append(*args);
builder.append(" \"$basedir/");
builder.append(&self.target);
builder.append("\" $args\n");
builder.append(" }\n");
builder.append(" $ret=$LASTEXITCODE\n");
builder.append("} else {\n");
builder.append(" if ($MyInvocation.ExpectingInput) {\n");
builder.append(" $input | & \"");
builder.append(prog);
builder.append("$exe\" ");
builder.append(*args);
builder.append(" \"$basedir/");
builder.append(&self.target);
builder.append("\" $args\n");
builder.append(" } else {\n");
builder.append(" & \"");
builder.append(prog);
builder.append("$exe\" ");
builder.append(*args);
builder.append(" \"$basedir/");
builder.append(&self.target);
builder.append("\" $args\n");
builder.append(" }\n");
builder.append(" $ret=$LASTEXITCODE\n");
builder.append("}\n");
builder.append("exit $ret\n");
}
}
})
.unwrap()
}
}
pub fn set_up_bin_shim<'a>(
sys: &(impl FsOpen + FsWrite + FsMetadata),
package: &'a deno_npm::NpmResolutionPackage,
extra: &'a deno_npm::NpmPackageExtraInfo,
bin_name: &'a str,
bin_script: &'a str,
package_path: &'a Path,
bin_node_modules_dir_path: &'a Path,
) -> Result<EntrySetupOutcome<'a>, BinEntriesError> {
let shim_path = bin_node_modules_dir_path.join(bin_name);
let target_file = package_path.join(bin_script);
let target_file = if !sys.fs_exists_no_err(&target_file) {
let target_file_exe = target_file.with_extension("exe");
if !sys.fs_exists_no_err(&target_file_exe) {
return Ok(EntrySetupOutcome::MissingEntrypoint {
bin_name,
package_path,
entrypoint: target_file,
package,
extra,
});
}
target_file_exe
} else {
target_file
};
let rel_target =
relative_path(bin_node_modules_dir_path, &target_file).unwrap();
let shebang = resolve_shebang(sys, &target_file);
let shim = ShimData::new(rel_target.to_string_lossy(), shebang);
let write_shim = |path: PathBuf, contents: &str| {
sys
.fs_write(&path, contents)
.map_err(|err| BinEntriesError::SetUpBin {
name: bin_name.to_string(),
path,
source: Box::new(err.into()),
})
};
write_shim(shim_path.with_extension("cmd"), &shim.generate_cmd())?;
write_shim(shim_path.clone(), &shim.generate_sh())?;
write_shim(shim_path.with_extension("ps1"), &shim.generate_pwsh())?;
Ok(EntrySetupOutcome::Success)
}
#[cfg(test)]
mod tests {
use super::*;
/// Trims the minimum indent from each line of a multiline string,
/// removing leading and trailing blank lines.
fn trim_indent(text: &str) -> String {
trim_indent_with(text, "\n")
}
fn trim_indent_crlf(text: &str) -> String {
trim_indent_with(text, "\r\n")
}
fn trim_indent_with(text: &str, line_ending: &str) -> String {
let text = text.strip_prefix('\n').unwrap_or(text);
let lines: Vec<&str> = text.lines().collect();
let min_indent = lines
.iter()
.filter(|line| !line.trim().is_empty())
.map(|line| line.len() - line.trim_start().len())
.min()
.unwrap_or(0);
lines
.iter()
.map(|line| {
if line.len() <= min_indent {
line.trim_start()
} else {
&line[min_indent..]
}
})
.collect::<Vec<_>>()
.join(line_ending)
}
#[test]
fn test_parse_shebang_node() {
assert_eq!(
parse_shebang("#!/usr/bin/env node\n"),
Some(Shebang {
program: "node".into(),
args: String::new(),
vars: String::new(),
})
);
}
#[test]
fn test_parse_shebang_with_args() {
assert_eq!(
parse_shebang("#!/usr/bin/env node --experimental"),
Some(Shebang {
program: "node".into(),
args: "--experimental".into(),
vars: String::new(),
})
);
}
#[test]
fn test_parse_shebang_with_env_vars() {
assert_eq!(
parse_shebang("#!/usr/bin/env -S NODE_ENV=production node"),
Some(Shebang {
program: "node".into(),
args: String::new(),
vars: "NODE_ENV=production ".into(), // trailing space is intentional
})
);
}
#[test]
fn test_parse_shebang_direct_path() {
assert_eq!(
parse_shebang("#!/bin/bash"),
Some(Shebang {
program: "/bin/bash".into(),
args: String::new(),
vars: String::new(),
})
);
}
#[test]
fn test_parse_shebang_invalid() {
assert_eq!(parse_shebang("not a shebang"), None);
assert_eq!(parse_shebang(""), None);
}
#[test]
fn test_sh_shim_raw() {
let shim = ShimData::new("../pkg/bin/cli.js", None);
assert_eq!(
shim.generate_sh(),
trim_indent(
r#"
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
exec "$basedir/../pkg/bin/cli.js" "$@"
"#
)
);
}
#[test]
fn test_sh_shim_with_program() {
let shim = ShimData::new(
"../pkg/bin/cli.js",
Some(Shebang {
program: "node".into(),
args: String::new(),
vars: String::new(),
}),
);
assert_eq!(
shim.generate_sh(),
trim_indent(
r#"
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../pkg/bin/cli.js" "$@"
else
exec node "$basedir/../pkg/bin/cli.js" "$@"
fi
"#
)
);
}
#[test]
fn test_cmd_shim_raw() {
let shim = ShimData::new("../pkg/bin/cli.js", None);
assert_eq!(
shim.generate_cmd(),
trim_indent_crlf(
r#"
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
"%dp0%\..\pkg\bin\cli.js" %*
"#
)
);
}
#[test]
fn test_cmd_shim_with_program() {
let shim = ShimData::new(
"../pkg/bin/cli.js",
Some(Shebang {
program: "node".into(),
args: String::new(),
vars: String::new(),
}),
);
assert_eq!(
shim.generate_cmd(),
trim_indent_crlf(
r#"
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\pkg\bin\cli.js" %*
"#
)
);
}
#[test]
fn test_pwsh_shim_raw() {
let shim = ShimData::new("../pkg/bin/cli.js", None);
assert_eq!(
shim.generate_pwsh(),
trim_indent(
r#"
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
$exe=".exe"
}
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/../pkg/bin/cli.js" $args
} else {
& "$basedir/../pkg/bin/cli.js" $args
}
exit $LASTEXITCODE
"#
)
);
}
#[test]
fn test_pwsh_shim_with_program() {
let shim = ShimData::new(
"../pkg/bin/cli.js",
Some(Shebang {
program: "node".into(),
args: String::new(),
vars: String::new(),
}),
);
assert_eq!(
shim.generate_pwsh(),
trim_indent(
r#"
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../pkg/bin/cli.js" $args
} else {
& "$basedir/node$exe" "$basedir/../pkg/bin/cli.js" $args
}
$ret=$LASTEXITCODE
} else {
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../pkg/bin/cli.js" $args
} else {
& "node$exe" "$basedir/../pkg/bin/cli.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret
"#
)
);
}
#[test]
fn test_shim_with_args_and_vars() {
let shim = ShimData::new(
"bin/cli.js",
Some(Shebang {
program: "node".into(),
args: "--experimental-modules".into(),
vars: "NODE_ENV=prod ".into(), // trailing space is intentional
}),
);
let sh = shim.generate_sh();
assert!(
sh.contains("NODE_ENV=prod \"$basedir/node\" --experimental-modules")
);
assert!(sh.contains("NODE_ENV=prod node --experimental-modules"));
let cmd = shim.generate_cmd();
assert!(cmd.contains("SET NODE_ENV=prod\r\n"));
assert!(cmd.contains("\"%_prog%\" --experimental-modules"));
let pwsh = shim.generate_pwsh();
assert!(pwsh.contains("\"$basedir/node$exe\" --experimental-modules"));
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/lockfile.rs | libs/resolver/lockfile.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashSet;
use std::path::PathBuf;
use anyhow::Context;
use anyhow::Error as AnyError;
use capacity_builder::StringBuilder;
use deno_config::workspace::Workspace;
use deno_error::JsErrorBox;
use deno_lockfile::Lockfile;
use deno_lockfile::NpmPackageInfoProvider;
use deno_lockfile::WorkspaceMemberConfig;
use deno_maybe_sync::MaybeSend;
use deno_maybe_sync::MaybeSync;
use deno_npm::registry::NpmRegistryApi;
use deno_npm::resolution::DefaultTarballUrlProvider;
use deno_npm::resolution::NpmRegistryDefaultTarballUrlProvider;
use deno_package_json::PackageJsonDepValue;
use deno_path_util::fs::atomic_write_file_with_retries;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageNv;
use futures::TryStreamExt;
use futures::stream::FuturesOrdered;
use indexmap::IndexMap;
use node_resolver::PackageJson;
use parking_lot::Mutex;
use parking_lot::MutexGuard;
use crate::workspace::WorkspaceNpmLinkPackagesRc;
pub trait NpmRegistryApiEx: NpmRegistryApi + MaybeSend + MaybeSync {}
impl<T> NpmRegistryApiEx for T where T: NpmRegistryApi + MaybeSend + MaybeSync {}
#[allow(clippy::disallowed_types)]
type NpmRegistryApiRc = deno_maybe_sync::MaybeArc<dyn NpmRegistryApiEx>;
pub struct LockfileNpmPackageInfoApiAdapter {
api: NpmRegistryApiRc,
workspace_link_packages: WorkspaceNpmLinkPackagesRc,
}
impl LockfileNpmPackageInfoApiAdapter {
pub fn new(
api: NpmRegistryApiRc,
workspace_link_packages: WorkspaceNpmLinkPackagesRc,
) -> Self {
Self {
api,
workspace_link_packages,
}
}
async fn get_infos(
&self,
values: &[PackageNv],
) -> Result<
Vec<deno_lockfile::Lockfile5NpmInfo>,
Box<dyn std::error::Error + Send + Sync>,
> {
let futs = values
.iter()
.map(|v| async move {
let info = self.api.package_info(v.name.as_str()).await?;
let version_info =
info.version_info(v, &self.workspace_link_packages.0)?;
Ok::<_, Box<dyn std::error::Error + Send + Sync>>(
deno_lockfile::Lockfile5NpmInfo {
tarball_url: version_info.dist.as_ref().and_then(|d| {
let tarball_url_provider = NpmRegistryDefaultTarballUrlProvider;
if d.tarball == tarball_url_provider.default_tarball_url(v) {
None
} else {
Some(d.tarball.clone())
}
}),
optional_dependencies: version_info
.optional_dependencies
.iter()
.map(|(k, v)| (k.to_string(), v.to_string()))
.collect::<std::collections::BTreeMap<_, _>>(),
cpu: version_info.cpu.iter().map(|s| s.to_string()).collect(),
os: version_info.os.iter().map(|s| s.to_string()).collect(),
deprecated: version_info.deprecated.is_some(),
bin: version_info.bin.is_some(),
scripts: version_info.scripts.contains_key("preinstall")
|| version_info.scripts.contains_key("install")
|| version_info.scripts.contains_key("postinstall"),
optional_peers: version_info
.peer_dependencies_meta
.iter()
.filter_map(|(k, v)| {
if v.optional {
version_info
.peer_dependencies
.get(k)
.map(|v| (k.to_string(), v.to_string()))
} else {
None
}
})
.collect::<std::collections::BTreeMap<_, _>>(),
},
)
})
.collect::<FuturesOrdered<_>>();
let package_infos = futs.try_collect::<Vec<_>>().await?;
Ok(package_infos)
}
}
#[async_trait::async_trait(?Send)]
impl deno_lockfile::NpmPackageInfoProvider
for LockfileNpmPackageInfoApiAdapter
{
async fn get_npm_package_info(
&self,
values: &[PackageNv],
) -> Result<
Vec<deno_lockfile::Lockfile5NpmInfo>,
Box<dyn std::error::Error + Send + Sync>,
> {
let package_infos = self.get_infos(values).await;
match package_infos {
Ok(package_infos) => Ok(package_infos),
Err(err) => {
if self.api.mark_force_reload() {
self.get_infos(values).await
} else {
Err(err)
}
}
}
}
}
#[derive(Debug)]
pub struct LockfileReadFromPathOptions {
pub file_path: PathBuf,
pub frozen: bool,
/// Causes the lockfile to only be read from, but not written to.
pub skip_write: bool,
}
#[sys_traits::auto_impl]
pub trait LockfileSys:
deno_path_util::fs::AtomicWriteFileWithRetriesSys
+ sys_traits::FsRead
+ std::fmt::Debug
{
}
pub struct Guard<'a, T> {
guard: MutexGuard<'a, T>,
}
impl<T> std::ops::Deref for Guard<'_, T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.guard
}
}
impl<T> std::ops::DerefMut for Guard<'_, T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.guard
}
}
#[derive(Debug, Clone)]
pub struct LockfileFlags {
pub no_lock: bool,
pub frozen_lockfile: Option<bool>,
pub lock: Option<PathBuf>,
pub skip_write: bool,
pub no_config: bool,
pub no_npm: bool,
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum LockfileWriteError {
#[class(inherit)]
#[error(transparent)]
Changed(JsErrorBox),
#[class(inherit)]
#[error("Failed writing lockfile")]
Io(#[source] std::io::Error),
}
#[allow(clippy::disallowed_types)]
pub type LockfileLockRc<TSys> = deno_maybe_sync::MaybeArc<LockfileLock<TSys>>;
#[derive(Debug)]
pub struct LockfileLock<TSys: LockfileSys> {
sys: TSys,
lockfile: Mutex<Lockfile>,
pub filename: PathBuf,
frozen: bool,
skip_write: bool,
}
impl<TSys: LockfileSys> LockfileLock<TSys> {
/// Get the inner deno_lockfile::Lockfile.
pub fn lock(&self) -> Guard<'_, Lockfile> {
Guard {
guard: self.lockfile.lock(),
}
}
/// Creates an adapter for the lockfile that can be provided to deno_graph.
#[cfg(feature = "graph")]
pub fn as_deno_graph_locker(&self) -> DenoGraphLocker<'_, TSys> {
DenoGraphLocker(self)
}
pub fn set_workspace_config(
&self,
options: deno_lockfile::SetWorkspaceConfigOptions,
) {
self.lockfile.lock().set_workspace_config(options);
}
#[cfg(feature = "graph")]
pub fn fill_graph(&self, graph: &mut deno_graph::ModuleGraph) {
let lockfile = self.lockfile.lock();
graph.fill_from_lockfile(deno_graph::FillFromLockfileOptions {
redirects: lockfile
.content
.redirects
.iter()
.map(|(from, to)| (from.as_str(), to.as_str())),
package_specifiers: lockfile
.content
.packages
.specifiers
.iter()
.map(|(dep, id)| (dep, id.as_str())),
});
}
pub fn overwrite(&self) -> bool {
self.lockfile.lock().overwrite
}
pub fn write_if_changed(&self) -> Result<(), LockfileWriteError> {
if self.skip_write {
return Ok(());
}
self
.error_if_changed()
.map_err(LockfileWriteError::Changed)?;
let mut lockfile = self.lockfile.lock();
let Some(bytes) = lockfile.resolve_write_bytes() else {
return Ok(()); // nothing to do
};
// do an atomic write to reduce the chance of multiple deno
// processes corrupting the file
const CACHE_PERM: u32 = 0o644;
atomic_write_file_with_retries(
&self.sys,
&lockfile.filename,
&bytes,
CACHE_PERM,
)
.map_err(LockfileWriteError::Io)?;
lockfile.has_content_changed = false;
Ok(())
}
pub async fn discover(
sys: TSys,
flags: LockfileFlags,
workspace: &Workspace,
maybe_external_import_map: Option<&serde_json::Value>,
api: &dyn NpmPackageInfoProvider,
) -> Result<Option<Self>, AnyError> {
fn pkg_json_deps(
maybe_pkg_json: Option<&PackageJson>,
) -> HashSet<JsrDepPackageReq> {
let Some(pkg_json) = maybe_pkg_json else {
return Default::default();
};
let deps = pkg_json.resolve_local_package_json_deps();
deps
.dependencies
.values()
.chain(deps.dev_dependencies.values())
.filter_map(|dep| dep.as_ref().ok())
.filter_map(|dep| match dep {
PackageJsonDepValue::File(_) => {
// ignored because this will have its own separate lockfile
None
}
PackageJsonDepValue::Req(req) => {
Some(JsrDepPackageReq::npm(req.clone()))
}
PackageJsonDepValue::JsrReq(req) => {
// TODO: remove once we support JSR specifiers in package.json
log::warn!(
"JSR specifiers are not yet supported in package.json: {req}"
);
None
}
PackageJsonDepValue::Workspace(_) => None,
})
.collect()
}
if flags.no_lock {
return Ok(None);
}
let file_path = match flags.lock {
Some(path) => path,
None => match workspace.resolve_lockfile_path()? {
Some(path) => path,
None => return Ok(None),
},
};
let root_folder = workspace.root_folder_configs();
let frozen = flags.frozen_lockfile.unwrap_or_else(|| {
root_folder
.deno_json
.as_ref()
.and_then(|c| c.to_lock_config().ok().flatten().map(|c| c.frozen()))
.unwrap_or(false)
});
let lockfile = Self::read_from_path(
sys,
LockfileReadFromPathOptions {
file_path,
frozen,
skip_write: flags.skip_write,
},
api,
)
.await?;
let root_url = workspace.root_dir_url();
let config = deno_lockfile::WorkspaceConfig {
root: WorkspaceMemberConfig {
package_json_deps: pkg_json_deps(root_folder.pkg_json.as_deref()),
dependencies: if let Some(map) = maybe_external_import_map {
deno_config::import_map::import_map_deps(map)
} else {
root_folder
.deno_json
.as_deref()
.map(|d| d.dependencies())
.unwrap_or_default()
},
},
members: workspace
.config_folders()
.iter()
.filter(|(folder_url, _)| *folder_url != root_url)
.filter_map(|(folder_url, folder)| {
Some((
{
// should never be None here, but just ignore members that
// do fail for this
let mut relative_path = root_url.make_relative(folder_url)?;
if relative_path.ends_with('/') {
// make it slightly cleaner by removing the trailing slash
relative_path.pop();
}
relative_path
},
{
let config = WorkspaceMemberConfig {
package_json_deps: pkg_json_deps(folder.pkg_json.as_deref()),
dependencies: folder
.deno_json
.as_deref()
.map(|d| d.dependencies())
.unwrap_or_default(),
};
if config.package_json_deps.is_empty()
&& config.dependencies.is_empty()
{
// exclude empty workspace members
return None;
}
config
},
))
})
.collect(),
links: workspace
.link_pkg_jsons()
.filter_map(|pkg_json| {
fn collect_deps(
deps: Option<&IndexMap<String, String>>,
) -> HashSet<JsrDepPackageReq> {
deps
.map(|i| {
i.iter()
.filter_map(|(k, v)| PackageJsonDepValue::parse(k, v).ok())
.filter_map(|dep| match dep {
PackageJsonDepValue::Req(req) => {
Some(JsrDepPackageReq::npm(req.clone()))
}
// not supported
PackageJsonDepValue::File(_)
| PackageJsonDepValue::Workspace(_)
| PackageJsonDepValue::JsrReq(_) => None,
})
.collect()
})
.unwrap_or_default()
}
let name = pkg_json.name.as_ref()?;
let key = StringBuilder::<String>::build(|builder| {
builder.append("npm:");
builder.append(name);
if let Some(version) = &pkg_json.version {
builder.append('@');
builder.append(version);
}
})
.unwrap();
// anything that affects npm resolution should go here in order to bust
// the npm resolution when it changes
let value = deno_lockfile::LockfileLinkContent {
dependencies: collect_deps(pkg_json.dependencies.as_ref()),
optional_dependencies: collect_deps(
pkg_json.optional_dependencies.as_ref(),
),
peer_dependencies: collect_deps(
pkg_json.peer_dependencies.as_ref(),
),
peer_dependencies_meta: pkg_json
.peer_dependencies_meta
.clone()
.and_then(|v| serde_json::from_value(v).ok())
.unwrap_or_default(),
};
Some((key, value))
})
.chain(workspace.link_deno_jsons().filter_map(|deno_json| {
let name = deno_json.json.name.as_ref()?;
let key = StringBuilder::<String>::build(|builder| {
builder.append("jsr:");
builder.append(name);
if let Some(version) = &deno_json.json.version {
builder.append('@');
builder.append(version);
}
})
.unwrap();
let value = deno_lockfile::LockfileLinkContent {
dependencies: deno_json.dependencies(),
optional_dependencies: Default::default(),
peer_dependencies: Default::default(),
peer_dependencies_meta: Default::default(),
};
Some((key, value))
}))
.collect(),
};
lockfile.set_workspace_config(deno_lockfile::SetWorkspaceConfigOptions {
no_npm: flags.no_npm,
no_config: flags.no_config,
config,
});
Ok(Some(lockfile))
}
pub async fn read_from_path(
sys: TSys,
opts: LockfileReadFromPathOptions,
api: &dyn deno_lockfile::NpmPackageInfoProvider,
) -> Result<LockfileLock<TSys>, AnyError> {
let lockfile = match sys.fs_read_to_string(&opts.file_path) {
Ok(text) => {
Lockfile::new(
deno_lockfile::NewLockfileOptions {
file_path: opts.file_path,
content: &text,
overwrite: false,
},
api,
)
.await?
}
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
Lockfile::new_empty(opts.file_path, false)
}
Err(err) => {
return Err(err).with_context(|| {
format!("Failed reading lockfile '{}'", opts.file_path.display())
});
}
};
Ok(LockfileLock {
sys,
filename: lockfile.filename.clone(),
lockfile: Mutex::new(lockfile),
frozen: opts.frozen,
skip_write: opts.skip_write,
})
}
pub fn error_if_changed(&self) -> Result<(), JsErrorBox> {
if !self.frozen {
return Ok(());
}
let lockfile = self.lockfile.lock();
if lockfile.has_content_changed {
let contents = self
.sys
.fs_read_to_string(&lockfile.filename)
.unwrap_or_default();
let new_contents = lockfile.as_json_string();
let diff = crate::display::diff(&contents, &new_contents);
// has an extra newline at the end
let diff = diff.trim_end();
Err(JsErrorBox::generic(format!(
"The lockfile is out of date. Run `deno install --frozen=false`, or rerun with `--frozen=false` to update it.\nchanges:\n{diff}"
)))
} else {
Ok(())
}
}
}
/// An adapter to use the lockfile with `deno_graph`.
#[cfg(feature = "graph")]
pub struct DenoGraphLocker<'a, TSys: LockfileSys>(&'a LockfileLock<TSys>);
#[cfg(feature = "graph")]
impl<TSys: LockfileSys> deno_graph::source::Locker
for DenoGraphLocker<'_, TSys>
{
fn get_remote_checksum(
&self,
specifier: &url::Url,
) -> Option<deno_graph::source::LoaderChecksum> {
self
.0
.lock()
.remote()
.get(specifier.as_str())
.map(|s| deno_graph::source::LoaderChecksum::new(s.clone()))
}
fn has_remote_checksum(&self, specifier: &url::Url) -> bool {
self.0.lock().remote().contains_key(specifier.as_str())
}
fn set_remote_checksum(
&mut self,
specifier: &url::Url,
checksum: deno_graph::source::LoaderChecksum,
) {
self
.0
.lock()
.insert_remote(specifier.to_string(), checksum.into_string())
}
fn get_pkg_manifest_checksum(
&self,
package_nv: &PackageNv,
) -> Option<deno_graph::source::LoaderChecksum> {
self
.0
.lock()
.content
.packages
.jsr
.get(package_nv)
.map(|s| deno_graph::source::LoaderChecksum::new(s.integrity.clone()))
}
fn set_pkg_manifest_checksum(
&mut self,
package_nv: &PackageNv,
checksum: deno_graph::source::LoaderChecksum,
) {
// a value would only exist in here if two workers raced
// to insert the same package manifest checksum
self
.0
.lock()
.insert_package(package_nv.clone(), checksum.into_string());
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/npmrc.rs | libs/resolver/npmrc.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
use boxed_error::Boxed;
use deno_config::workspace::Workspace;
use deno_npm::npm_rc::NpmRc;
use deno_npm::npm_rc::ResolvedNpmRc;
use sys_traits::EnvHomeDir;
use sys_traits::EnvVar;
use sys_traits::FsRead;
use thiserror::Error;
use url::Url;
#[allow(clippy::disallowed_types)]
pub type ResolvedNpmRcRc = deno_maybe_sync::MaybeArc<ResolvedNpmRc>;
#[derive(Debug, Boxed)]
pub struct NpmRcDiscoverError(pub Box<NpmRcDiscoverErrorKind>);
#[derive(Debug, Error)]
pub enum NpmRcDiscoverErrorKind {
#[error(transparent)]
Load(#[from] NpmRcLoadError),
#[error(transparent)]
Parse(#[from] NpmRcParseError),
#[error(transparent)]
Resolve(#[from] NpmRcOptionsResolveError),
#[error(transparent)]
UrlToFilePath(#[from] deno_path_util::UrlToFilePathError),
}
#[derive(Debug, Error)]
#[error("Error loading .npmrc at {}.", path.display())]
pub struct NpmRcLoadError {
path: PathBuf,
#[source]
source: std::io::Error,
}
#[derive(Debug, Error)]
#[error("Failed to parse .npmrc at {}.", path.display())]
pub struct NpmRcParseError {
path: PathBuf,
#[source]
source: std::io::Error,
}
#[derive(Debug, Error)]
#[error("Failed to resolve .npmrc options at {}.", path.display())]
pub struct NpmRcOptionsResolveError {
path: PathBuf,
#[source]
source: deno_npm::npm_rc::ResolveError,
}
/// Discover `.npmrc` file - currently we only support it next to `package.json`,
/// next to `deno.json`, or in the user's home directory.
///
/// In the future we will need to support it in the global directory
/// as per https://docs.npmjs.com/cli/v10/configuring-npm/npmrc#files.
pub fn discover_npmrc_from_workspace<TSys: EnvVar + EnvHomeDir + FsRead>(
sys: &TSys,
workspace: &Workspace,
) -> Result<(ResolvedNpmRc, Option<PathBuf>), NpmRcDiscoverError> {
let root_folder = workspace.root_folder_configs();
discover_npmrc(
sys,
root_folder.pkg_json.as_ref().map(|p| p.path.clone()),
match &root_folder.deno_json {
Some(cf) if cf.specifier.scheme() == "file" => {
Some(deno_path_util::url_to_file_path(&cf.specifier)?)
}
_ => None,
},
)
}
fn discover_npmrc<TSys: EnvVar + EnvHomeDir + FsRead>(
sys: &TSys,
maybe_package_json_path: Option<PathBuf>,
maybe_deno_json_path: Option<PathBuf>,
) -> Result<(ResolvedNpmRc, Option<PathBuf>), NpmRcDiscoverError> {
const NPMRC_NAME: &str = ".npmrc";
fn try_to_read_npmrc(
sys: &impl FsRead,
dir: &Path,
) -> Result<Option<(Cow<'static, str>, PathBuf)>, NpmRcLoadError> {
let path = dir.join(NPMRC_NAME);
let maybe_source = match sys.fs_read_to_string(&path) {
Ok(source) => Some(source),
Err(err) if err.kind() == std::io::ErrorKind::NotFound => None,
Err(err) => return Err(NpmRcLoadError { path, source: err }),
};
Ok(maybe_source.map(|source| (source, path)))
}
fn try_to_parse_npmrc(
sys: &impl EnvVar,
source: &str,
path: &Path,
) -> Result<NpmRc, NpmRcDiscoverError> {
let npmrc = NpmRc::parse(source, &|name| sys.env_var(name).ok()).map_err(
|source| {
NpmRcParseError {
path: path.to_path_buf(),
// todo(dsherret): use source directly here once it's no longer an internal type
source: std::io::Error::new(std::io::ErrorKind::InvalidData, source),
}
},
)?;
log::debug!(".npmrc found at: '{}'", path.display());
Ok(npmrc)
}
fn merge_npm_rc(project_rc: NpmRc, home_rc: NpmRc) -> NpmRc {
fn merge_maps<TValue>(
mut project: HashMap<String, TValue>,
home: HashMap<String, TValue>,
) -> HashMap<String, TValue> {
for (key, value) in home {
project.entry(key).or_insert(value);
}
project
}
NpmRc {
registry: project_rc.registry.or(home_rc.registry),
scope_registries: merge_maps(
project_rc.scope_registries,
home_rc.scope_registries,
),
registry_configs: merge_maps(
project_rc.registry_configs,
home_rc.registry_configs,
),
}
}
let mut home_npmrc = None;
let mut project_npmrc = None;
// 1. Try `.npmrc` in the user's home directory
if let Some(home_dir) = sys.env_home_dir() {
match try_to_read_npmrc(sys, &home_dir) {
Ok(Some((source, path))) => {
let npmrc = try_to_parse_npmrc(sys, &source, &path)?;
home_npmrc = Some((path, npmrc));
}
Ok(None) => {}
Err(err) if err.source.kind() == std::io::ErrorKind::PermissionDenied => {
log::debug!(
"Skipping .npmrc in home directory due to permission denied error. {:#}",
err
);
}
Err(err) => {
return Err(err.into());
}
}
}
// 2. Try `.npmrc` next to `package.json`
if let Some(package_json_path) = maybe_package_json_path
&& let Some(package_json_dir) = package_json_path.parent()
&& let Some((source, path)) = try_to_read_npmrc(sys, package_json_dir)?
{
let npmrc = try_to_parse_npmrc(sys, &source, &path)?;
project_npmrc = Some((path, npmrc));
}
// 3. Try `.npmrc` next to `deno.json(c)` when not found `package.json`
if project_npmrc.is_none()
&& let Some(deno_json_path) = maybe_deno_json_path
&& let Some(deno_json_dir) = deno_json_path.parent()
&& let Some((source, path)) = try_to_read_npmrc(sys, deno_json_dir)?
{
let npmrc = try_to_parse_npmrc(sys, &source, &path)?;
project_npmrc = Some((path, npmrc));
}
let resolve_npmrc = |path: PathBuf, npm_rc: NpmRc| {
Ok((
npm_rc
.as_resolved(&npm_registry_url(sys))
.map_err(|source| NpmRcOptionsResolveError {
path: path.to_path_buf(),
source,
})?,
Some(path),
))
};
match (home_npmrc, project_npmrc) {
(None, None) => {
log::debug!("No .npmrc file found");
Ok((create_default_npmrc(sys), None))
}
(None, Some((npmrc_path, project_rc))) => {
log::debug!("Only project .npmrc file found");
resolve_npmrc(npmrc_path, project_rc)
}
(Some((npmrc_path, home_rc)), None) => {
log::debug!("Only home .npmrc file found");
resolve_npmrc(npmrc_path, home_rc)
}
(Some((_, home_rc)), Some((npmrc_path, project_rc))) => {
log::debug!("Both home and project .npmrc files found");
let merged_npmrc = merge_npm_rc(project_rc, home_rc);
resolve_npmrc(npmrc_path, merged_npmrc)
}
}
}
pub fn create_default_npmrc(sys: &impl EnvVar) -> ResolvedNpmRc {
ResolvedNpmRc {
default_config: deno_npm::npm_rc::RegistryConfigWithUrl {
registry_url: npm_registry_url(sys).clone(),
config: Default::default(),
},
scopes: Default::default(),
registry_configs: Default::default(),
}
}
pub fn npm_registry_url(sys: &impl EnvVar) -> Url {
let env_var_name = "NPM_CONFIG_REGISTRY";
if let Ok(registry_url) = sys.env_var(env_var_name) {
// ensure there is a trailing slash for the directory
let registry_url = format!("{}/", registry_url.trim_end_matches('/'));
match Url::parse(®istry_url) {
Ok(url) => {
return url;
}
Err(err) => {
log::debug!("Invalid {} environment variable: {:#}", env_var_name, err,);
}
}
}
Url::parse("https://registry.npmjs.org").unwrap()
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/workspace.rs | libs/resolver/workspace.rs | // Copyright 2018-2025 the Deno authors. MIT license.
// use super::UrlRc;
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::fmt;
use std::path::Path;
use std::path::PathBuf;
use deno_config::deno_json::ConfigFileError;
use deno_config::workspace::ResolverWorkspaceJsrPackage;
use deno_config::workspace::Workspace;
use deno_error::JsError;
use deno_maybe_sync::MaybeDashMap;
use deno_maybe_sync::new_rc;
use deno_media_type::MediaType;
use deno_npm::registry::NpmPackageVersionInfo;
use deno_package_json::PackageJsonDepValue;
use deno_package_json::PackageJsonDepValueParseError;
use deno_package_json::PackageJsonDepWorkspaceReq;
use deno_package_json::PackageJsonDepsRc;
use deno_package_json::PackageJsonRc;
use deno_path_util::url_from_directory_path;
use deno_path_util::url_from_file_path;
use deno_path_util::url_to_file_path;
use deno_semver::RangeSetOrTag;
use deno_semver::SmallStackString;
use deno_semver::StackString;
use deno_semver::Version;
use deno_semver::VersionReq;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::package::PackageName;
use deno_semver::package::PackageReq;
use deno_terminal::colors;
use import_map::ImportMap;
use import_map::ImportMapDiagnostic;
use import_map::ImportMapError;
use import_map::ImportMapErrorKind;
use import_map::ImportMapWithDiagnostics;
use import_map::specifier::SpecifierError;
use indexmap::IndexMap;
use node_resolver::NodeResolutionKind;
use parking_lot::RwLock;
use serde::Deserialize;
use serde::Serialize;
use sys_traits::FsMetadata;
use sys_traits::FsMetadataValue;
use sys_traits::FsRead;
use thiserror::Error;
use url::Url;
use crate::collections::FolderScopedMap;
use crate::deno_json::CompilerOptionsModuleResolution;
use crate::deno_json::CompilerOptionsPaths;
use crate::deno_json::CompilerOptionsResolverRc;
#[allow(clippy::disallowed_types)]
type UrlRc = deno_maybe_sync::MaybeArc<Url>;
#[derive(Debug)]
struct PkgJsonResolverFolderConfig {
deps: PackageJsonDepsRc,
pkg_json: PackageJsonRc,
}
#[derive(Debug, Error, JsError)]
pub enum WorkspaceResolverCreateError {
#[class(inherit)]
#[error("Failed loading import map specified in '{referrer}'")]
ImportMapFetch {
referrer: Url,
#[source]
#[inherit]
source: Box<ConfigFileError>,
},
#[class(inherit)]
#[error(transparent)]
ImportMap(
#[from]
#[inherit]
ImportMapError,
),
}
/// Whether to resolve dependencies by reading the dependencies list
/// from a package.json
#[derive(
Debug, Default, Serialize, Deserialize, Copy, Clone, PartialEq, Eq,
)]
pub enum PackageJsonDepResolution {
/// Resolves based on the dep entries in the package.json.
#[default]
Enabled,
/// Doesn't use the package.json to resolve dependencies. Let's the caller
/// resolve based on the file system.
Disabled,
}
#[derive(
Debug, Default, Serialize, Deserialize, Copy, Clone, PartialEq, Eq,
)]
pub enum SloppyImportsOptions {
Enabled,
#[default]
Unspecified,
}
/// Toggle FS metadata caching when probing files for sloppy imports and
/// `compilerOptions.rootDirs` resolution.
#[derive(
Debug, Default, Serialize, Deserialize, Copy, Clone, PartialEq, Eq,
)]
pub enum FsCacheOptions {
#[default]
Enabled,
Disabled,
}
#[derive(Debug, Default, Clone)]
pub struct CreateResolverOptions {
pub pkg_json_dep_resolution: PackageJsonDepResolution,
pub specified_import_map: Option<SpecifiedImportMap>,
pub sloppy_imports_options: SloppyImportsOptions,
pub fs_cache_options: FsCacheOptions,
}
#[derive(Debug, Clone)]
pub struct SpecifiedImportMap {
pub base_url: Url,
pub value: serde_json::Value,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum MappedResolutionDiagnostic {
ConstraintNotMatchedLocalVersion {
/// If it was for a link (true) or workspace (false) member.
is_link: bool,
reference: JsrPackageReqReference,
local_version: Version,
},
}
impl std::fmt::Display for MappedResolutionDiagnostic {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::ConstraintNotMatchedLocalVersion {
is_link,
reference,
local_version,
} => {
write!(
f,
"{0} '{1}@{2}' was not used because it did not match '{1}@{3}'",
if *is_link {
"Linked package"
} else {
"Workspace member"
},
reference.req().name,
local_version,
reference.req().version_req
)
}
}
}
}
#[derive(Debug, Clone)]
pub enum MappedResolution<'a> {
Normal {
specifier: Url,
sloppy_reason: Option<SloppyImportsResolutionReason>,
used_import_map: bool,
used_compiler_options_root_dirs: bool,
maybe_diagnostic: Option<Box<MappedResolutionDiagnostic>>,
},
WorkspaceJsrPackage {
specifier: Url,
pkg_req_ref: JsrPackageReqReference,
},
/// Resolved a bare specifier to a package.json that was a workspace member.
WorkspaceNpmPackage {
target_pkg_json: &'a PackageJsonRc,
pkg_name: &'a str,
sub_path: Option<String>,
},
PackageJson {
pkg_json: &'a PackageJsonRc,
alias: &'a str,
sub_path: Option<String>,
dep_result: &'a Result<PackageJsonDepValue, PackageJsonDepValueParseError>,
},
PackageJsonImport {
pkg_json: &'a PackageJsonRc,
},
}
#[derive(Debug, Clone, Error, JsError)]
#[class(type)]
pub enum WorkspaceResolveError {
#[error("Failed joining '{}' to '{}'. {:#}", .sub_path, .base, .error)]
InvalidExportPath {
base: Url,
sub_path: String,
error: url::ParseError,
},
#[error("Unknown export '{}' for '{}'.\n Package exports:\n{}", export_name, package_name, .exports.iter().map(|e| format!(" * {}", e)).collect::<Vec<_>>().join("\n"))]
UnknownExport {
package_name: String,
export_name: String,
exports: Vec<String>,
},
}
#[derive(Debug, Error, JsError)]
#[class(type)]
#[error(
"Import \"{}\" via 'compilerOptions.paths[\"{}\"]' did not match an existing file", prior_resolution.as_ref().map(|s| s.as_str()).unwrap_or(specifier.as_str()), matched_key
)]
pub struct NotFoundInCompilerOptionsPathsError {
specifier: String,
referrer: Url,
matched_key: String,
prior_resolution: Option<Url>,
}
#[derive(Debug, Error, JsError)]
pub enum MappedResolutionError {
#[class(inherit)]
#[error(transparent)]
Specifier(#[from] SpecifierError),
#[class(inherit)]
#[error(transparent)]
ImportMap(#[from] ImportMapError),
#[class(inherit)]
#[error(transparent)]
Workspace(#[from] WorkspaceResolveError),
#[class(inherit)]
#[error(transparent)]
NotFoundInCompilerOptionsPaths(
#[from] Box<NotFoundInCompilerOptionsPathsError>,
),
}
impl MappedResolutionError {
pub fn is_unmapped_bare_specifier(&self) -> bool {
match self {
MappedResolutionError::Specifier(err) => match err {
SpecifierError::InvalidUrl(_) => false,
SpecifierError::ImportPrefixMissing { .. } => true,
},
MappedResolutionError::ImportMap(err) => {
matches!(**err, ImportMapErrorKind::UnmappedBareSpecifier(_, _))
}
MappedResolutionError::Workspace(_) => false,
MappedResolutionError::NotFoundInCompilerOptionsPaths(_) => false,
}
}
}
#[derive(Error, Debug, JsError)]
#[class(inherit)]
#[error(transparent)]
pub struct WorkspaceResolvePkgJsonFolderError(
Box<WorkspaceResolvePkgJsonFolderErrorKind>,
);
impl WorkspaceResolvePkgJsonFolderError {
pub fn as_kind(&self) -> &WorkspaceResolvePkgJsonFolderErrorKind {
&self.0
}
pub fn into_kind(self) -> WorkspaceResolvePkgJsonFolderErrorKind {
*self.0
}
}
impl<E> From<E> for WorkspaceResolvePkgJsonFolderError
where
WorkspaceResolvePkgJsonFolderErrorKind: From<E>,
{
fn from(err: E) -> Self {
WorkspaceResolvePkgJsonFolderError(Box::new(
WorkspaceResolvePkgJsonFolderErrorKind::from(err),
))
}
}
#[derive(Debug, Error, JsError, Clone, PartialEq, Eq)]
#[class(type)]
pub enum WorkspaceResolvePkgJsonFolderErrorKind {
#[error("Could not find package.json with name '{0}' in workspace.")]
NotFound(String),
#[error(
"Found package.json in workspace, but version '{1}' didn't satisfy constraint '{0}'."
)]
VersionNotSatisfied(VersionReq, Version),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum CachedMetadataFsEntry {
File,
Dir,
}
#[derive(Debug)]
struct CachedMetadataFs<TSys: FsMetadata> {
sys: TSys,
cache: Option<MaybeDashMap<PathBuf, Option<CachedMetadataFsEntry>>>,
}
impl<TSys: FsMetadata> CachedMetadataFs<TSys> {
fn new(sys: TSys, options: FsCacheOptions) -> Self {
Self {
sys,
cache: match options {
FsCacheOptions::Enabled => Some(Default::default()),
FsCacheOptions::Disabled => None,
},
}
}
fn stat_sync(&self, path: &Path) -> Option<CachedMetadataFsEntry> {
if let Some(cache) = &self.cache
&& let Some(entry) = cache.get(path)
{
return *entry;
}
let entry = self.sys.fs_metadata(path).ok().and_then(|stat| {
if stat.file_type().is_file() {
Some(CachedMetadataFsEntry::File)
} else if stat.file_type().is_dir() {
Some(CachedMetadataFsEntry::Dir)
} else {
None
}
});
if let Some(cache) = &self.cache {
cache.insert(path.to_owned(), entry);
}
entry
}
fn is_file(&self, path: &Path) -> bool {
self.stat_sync(path) == Some(CachedMetadataFsEntry::File)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum SloppyImportsResolutionReason {
/// Ex. `./file.js` to `./file.ts`
JsToTs,
/// Ex. `./file` to `./file.ts`
NoExtension,
/// Ex. `./dir` to `./dir/index.ts`
Directory,
}
impl SloppyImportsResolutionReason {
pub fn suggestion_message_for_specifier(&self, specifier: &Url) -> String {
format!("Maybe {}", self.base_message_for_specifier(specifier))
}
pub fn quick_fix_message_for_specifier(&self, specifier: &Url) -> String {
let message = self.base_message_for_specifier(specifier);
let mut chars = message.chars();
format!(
"{}{}.",
chars.next().unwrap().to_uppercase(),
chars.as_str()
)
}
fn base_message_for_specifier(&self, specifier: &Url) -> String {
match self {
Self::JsToTs => {
let media_type = MediaType::from_specifier(specifier);
format!("change the extension to '{}'", media_type.as_ts_extension())
}
Self::NoExtension => {
let media_type = MediaType::from_specifier(specifier);
format!("add a '{}' extension", media_type.as_ts_extension())
}
Self::Directory => {
let file_name = specifier
.path()
.rsplit_once('/')
.map(|(_, file_name)| file_name)
.unwrap_or(specifier.path());
format!("specify path to '{}' file in directory instead", file_name)
}
}
}
}
#[derive(Debug)]
struct SloppyImportsResolver<TSys: FsMetadata> {
compiler_options_resolver: CompilerOptionsResolverCellRc,
fs: CachedMetadataFs<TSys>,
enabled_by_options: bool,
}
impl<TSys: FsMetadata> SloppyImportsResolver<TSys> {
fn new(
fs: CachedMetadataFs<TSys>,
compiler_options_resolver: CompilerOptionsResolverCellRc,
options: SloppyImportsOptions,
) -> Self {
Self {
fs,
compiler_options_resolver,
enabled_by_options: match options {
SloppyImportsOptions::Enabled => true,
SloppyImportsOptions::Unspecified => false,
},
}
}
fn resolve(
&self,
specifier: &Url,
referrer: &Url,
resolution_kind: ResolutionKind,
) -> Option<(Url, SloppyImportsResolutionReason)> {
if !self.enabled_by_options
&& self
.compiler_options_resolver
.read()
.for_specifier(referrer)
.module_resolution()
!= CompilerOptionsModuleResolution::Bundler
{
return None;
}
fn path_without_ext(
path: &Path,
media_type: MediaType,
) -> Option<Cow<'_, str>> {
let old_path_str = path.to_string_lossy();
match media_type {
MediaType::Unknown => Some(old_path_str),
_ => old_path_str
.strip_suffix(media_type.as_ts_extension())
.map(|s| Cow::Owned(s.to_string())),
}
}
fn media_types_to_paths(
path_no_ext: &str,
original_media_type: MediaType,
probe_media_type_types: Vec<MediaType>,
reason: SloppyImportsResolutionReason,
) -> Vec<(PathBuf, SloppyImportsResolutionReason)> {
probe_media_type_types
.into_iter()
.filter(|media_type| *media_type != original_media_type)
.map(|media_type| {
(
PathBuf::from(format!(
"{}{}",
path_no_ext,
media_type.as_ts_extension()
)),
reason,
)
})
.collect::<Vec<_>>()
}
if specifier.scheme() != "file" {
return None;
}
let path = url_to_file_path(specifier).ok()?;
let probe_paths: Vec<(PathBuf, SloppyImportsResolutionReason)> =
match self.fs.stat_sync(&path) {
Some(CachedMetadataFsEntry::File) => {
if resolution_kind.is_types() {
let media_type = MediaType::from_specifier(specifier);
// attempt to resolve the .d.ts file before the .js file
let probe_media_type_types = match media_type {
MediaType::JavaScript => {
vec![(MediaType::Dts), MediaType::JavaScript]
}
MediaType::Mjs => {
vec![MediaType::Dmts, MediaType::Dts, MediaType::Mjs]
}
MediaType::Cjs => {
vec![MediaType::Dcts, MediaType::Dts, MediaType::Cjs]
}
_ => return None,
};
let path_no_ext = path_without_ext(&path, media_type)?;
media_types_to_paths(
&path_no_ext,
media_type,
probe_media_type_types,
SloppyImportsResolutionReason::JsToTs,
)
} else {
return None;
}
}
entry @ None | entry @ Some(CachedMetadataFsEntry::Dir) => {
let media_type = MediaType::from_specifier(specifier);
let probe_media_type_types = match media_type {
MediaType::JavaScript => (
if resolution_kind.is_types() {
vec![MediaType::TypeScript, MediaType::Tsx, MediaType::Dts]
} else {
vec![MediaType::TypeScript, MediaType::Tsx]
},
SloppyImportsResolutionReason::JsToTs,
),
MediaType::Jsx => {
(vec![MediaType::Tsx], SloppyImportsResolutionReason::JsToTs)
}
MediaType::Mjs => (
if resolution_kind.is_types() {
vec![MediaType::Mts, MediaType::Dmts, MediaType::Dts]
} else {
vec![MediaType::Mts]
},
SloppyImportsResolutionReason::JsToTs,
),
MediaType::Cjs => (
if resolution_kind.is_types() {
vec![MediaType::Cts, MediaType::Dcts, MediaType::Dts]
} else {
vec![MediaType::Cts]
},
SloppyImportsResolutionReason::JsToTs,
),
MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Dts
| MediaType::Dmts
| MediaType::Dcts
| MediaType::Tsx
| MediaType::Json
| MediaType::Jsonc
| MediaType::Json5
| MediaType::Wasm
| MediaType::Css
| MediaType::Html
| MediaType::Sql
| MediaType::SourceMap => {
return None;
}
MediaType::Unknown => (
if resolution_kind.is_types() {
vec![
MediaType::TypeScript,
MediaType::Tsx,
MediaType::Mts,
MediaType::Dts,
MediaType::Dmts,
MediaType::Dcts,
MediaType::JavaScript,
MediaType::Jsx,
MediaType::Mjs,
]
} else {
vec![
MediaType::TypeScript,
MediaType::JavaScript,
MediaType::Tsx,
MediaType::Jsx,
MediaType::Mts,
MediaType::Mjs,
]
},
SloppyImportsResolutionReason::NoExtension,
),
};
let mut probe_paths = match path_without_ext(&path, media_type) {
Some(path_no_ext) => media_types_to_paths(
&path_no_ext,
media_type,
probe_media_type_types.0,
probe_media_type_types.1,
),
None => vec![],
};
if matches!(entry, Some(CachedMetadataFsEntry::Dir)) {
// try to resolve at the index file
if resolution_kind.is_types() {
probe_paths.push((
path.join("index.ts"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.mts"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.d.ts"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.d.mts"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.js"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.mjs"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.tsx"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.jsx"),
SloppyImportsResolutionReason::Directory,
));
} else {
probe_paths.push((
path.join("index.ts"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.mts"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.tsx"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.js"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.mjs"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.jsx"),
SloppyImportsResolutionReason::Directory,
));
}
}
if probe_paths.is_empty() {
return None;
}
probe_paths
}
};
for (probe_path, reason) in probe_paths {
if self.fs.is_file(&probe_path)
&& let Ok(specifier) = url_from_file_path(&probe_path)
{
return Some((specifier, reason));
}
}
None
}
}
pub fn sloppy_imports_resolve<TSys: FsMetadata>(
specifier: &Url,
resolution_kind: ResolutionKind,
sys: TSys,
) -> Option<(Url, SloppyImportsResolutionReason)> {
SloppyImportsResolver::new(
CachedMetadataFs::new(sys, FsCacheOptions::Enabled),
Default::default(),
SloppyImportsOptions::Enabled,
)
// The referrer is used to determine the applicable compiler options, which
// can force-override `SloppyImportOptions::Disabled` depending on
// `moduleResolution`. But `SloppyImportOptions::Enabled` is set.
.resolve(specifier, &Url::parse("unknown:").unwrap(), resolution_kind)
}
#[allow(clippy::disallowed_types)]
type SloppyImportsResolverRc<T> =
deno_maybe_sync::MaybeArc<SloppyImportsResolver<T>>;
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum CompilerOptionsRootDirsDiagnostic {
InvalidType(Url),
InvalidEntryType(Url, usize),
UnexpectedError(Url, String),
UnexpectedEntryError(Url, usize, String),
}
impl fmt::Display for CompilerOptionsRootDirsDiagnostic {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Self::InvalidType(s) => write!(
f,
"Invalid value for \"compilerOptions.rootDirs\" (\"{s}\"). Expected a string."
),
Self::InvalidEntryType(s, i) => write!(
f,
"Invalid value for \"compilerOptions.rootDirs[{i}]\" (\"{s}\"). Expected a string."
),
Self::UnexpectedError(s, message) => write!(
f,
"Unexpected error while parsing \"compilerOptions.rootDirs\" (\"{s}\"): {message}"
),
Self::UnexpectedEntryError(s, i, message) => write!(
f,
"Unexpected error while parsing \"compilerOptions.rootDirs[{i}]\" (\"{s}\"): {message}"
),
}
}
}
fn resolve_types_with_compiler_options_paths(
specifier: &str,
referrer: &Url,
paths: &CompilerOptionsPaths,
sloppy_imports_resolver: &SloppyImportsResolver<impl FsMetadata>,
) -> Option<Result<(Url, Option<SloppyImportsResolutionReason>), String>> {
if referrer.scheme() != "file" {
return None;
}
let (candidates, matched_key) = paths.resolve_candidates(specifier)?;
for candidate_specifier in candidates {
let Ok(candidate_path) = url_to_file_path(&candidate_specifier) else {
continue;
};
if sloppy_imports_resolver.fs.is_file(&candidate_path) {
return Some(Ok((candidate_specifier, None)));
} else if let Some((candidate_specifier, sloppy_reason)) =
sloppy_imports_resolver.resolve(
&candidate_specifier,
referrer,
ResolutionKind::Types,
)
{
return Some(Ok((candidate_specifier, Some(sloppy_reason))));
}
}
Some(Err(matched_key))
}
fn resolve_types_with_compiler_options_root_dirs(
specifier: &Url,
referrer: &Url,
root_dirs: &[Url],
sloppy_imports_resolver: &SloppyImportsResolver<impl FsMetadata>,
) -> Option<(Url, Option<SloppyImportsResolutionReason>)> {
if specifier.scheme() != "file" || referrer.scheme() != "file" {
return None;
}
let (matched_root_dir, suffix) = root_dirs
.iter()
.filter_map(|r| {
let suffix = specifier.as_str().strip_prefix(r.as_str())?;
Some((r, suffix))
})
.max_by_key(|(r, _)| r.as_str().len())?;
for root_dir in root_dirs {
if root_dir == matched_root_dir {
continue;
}
let Ok(candidate_specifier) = root_dir.join(suffix) else {
continue;
};
let Ok(candidate_path) = url_to_file_path(&candidate_specifier) else {
continue;
};
if sloppy_imports_resolver.fs.is_file(&candidate_path) {
return Some((candidate_specifier, None));
} else if let Some((candidate_specifier, sloppy_reason)) =
sloppy_imports_resolver.resolve(
&candidate_specifier,
referrer,
ResolutionKind::Types,
)
{
return Some((candidate_specifier, Some(sloppy_reason)));
}
}
None
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ResolutionKind {
/// Resolving for code that will be executed.
Execution,
/// Resolving for code that will be used for type information.
Types,
}
impl ResolutionKind {
pub fn is_types(&self) -> bool {
*self == ResolutionKind::Types
}
}
impl From<NodeResolutionKind> for ResolutionKind {
fn from(value: NodeResolutionKind) -> Self {
match value {
NodeResolutionKind::Execution => Self::Execution,
NodeResolutionKind::Types => Self::Types,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum WorkspaceResolverDiagnostic<'a> {
ImportMap(&'a ImportMapDiagnostic),
CompilerOptionsRootDirs(&'a CompilerOptionsRootDirsDiagnostic),
}
impl fmt::Display for WorkspaceResolverDiagnostic<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Self::ImportMap(d) => write!(f, "Import map: {d}"),
Self::CompilerOptionsRootDirs(d) => d.fmt(f),
}
}
}
#[allow(clippy::disallowed_types)]
type CompilerOptionsResolverCellRc =
deno_maybe_sync::MaybeArc<RwLock<CompilerOptionsResolverRc>>;
#[derive(Debug)]
pub struct WorkspaceResolver<TSys: FsMetadata + FsRead> {
workspace_root: UrlRc,
jsr_pkgs: Vec<ResolverWorkspaceJsrPackage>,
maybe_import_map: Option<ImportMapWithDiagnostics>,
pkg_jsons: FolderScopedMap<PkgJsonResolverFolderConfig>,
pkg_json_dep_resolution: PackageJsonDepResolution,
sloppy_imports_options: SloppyImportsOptions,
fs_cache_options: FsCacheOptions,
compiler_options_resolver: CompilerOptionsResolverCellRc,
sloppy_imports_resolver: SloppyImportsResolverRc<TSys>,
}
impl<TSys: FsMetadata + FsRead> WorkspaceResolver<TSys> {
pub fn from_workspace(
workspace: &Workspace,
sys: TSys,
options: CreateResolverOptions,
) -> Result<Self, WorkspaceResolverCreateError> {
fn resolve_import_map(
sys: &impl FsRead,
workspace: &Workspace,
specified_import_map: Option<SpecifiedImportMap>,
) -> Result<Option<ImportMapWithDiagnostics>, WorkspaceResolverCreateError>
{
let root_deno_json = workspace.root_deno_json();
let deno_jsons = workspace.resolver_deno_jsons().collect::<Vec<_>>();
let (import_map_url, import_map) = match specified_import_map {
Some(SpecifiedImportMap {
base_url,
value: import_map,
}) => (base_url, import_map),
None => {
if !deno_jsons.iter().any(|p| p.is_package())
&& !deno_jsons.iter().any(|c| {
c.json.import_map.is_some()
|| c.json.scopes.is_some()
|| c.json.imports.is_some()
|| c
.json
.compiler_options
.as_ref()
.and_then(|v| v.as_object()?.get("rootDirs")?.as_array())
.is_some_and(|a| a.len() > 1)
})
{
// no configs have an import map and none are a package, so exit
return Ok(None);
}
let config_specified_import_map = match root_deno_json.as_ref() {
Some(deno_json) => deno_json
.to_import_map_value(sys)
.map_err(|source| WorkspaceResolverCreateError::ImportMapFetch {
referrer: deno_json.specifier.clone(),
source: Box::new(source),
})?
.unwrap_or_else(|| {
(
Cow::Borrowed(&deno_json.specifier),
serde_json::Value::Object(Default::default()),
)
}),
None => (
Cow::Owned(workspace.root_dir_url().join("deno.json").unwrap()),
serde_json::Value::Object(Default::default()),
),
};
let base_import_map_config = import_map::ext::ImportMapConfig {
base_url: config_specified_import_map.0.into_owned(),
import_map_value: config_specified_import_map.1,
};
let child_import_map_configs = deno_jsons
.iter()
.filter(|f| {
Some(&f.specifier)
!= root_deno_json.as_ref().map(|c| &c.specifier)
})
.map(|config| import_map::ext::ImportMapConfig {
base_url: config.specifier.clone(),
import_map_value: {
// don't include scopes here
let mut value = serde_json::Map::with_capacity(1);
if let Some(imports) = &config.json.imports {
value.insert("imports".to_string(), imports.clone());
}
value.into()
},
})
.collect::<Vec<_>>();
let (import_map_url, import_map) =
::import_map::ext::create_synthetic_import_map(
base_import_map_config,
child_import_map_configs,
);
let import_map = import_map::ext::expand_import_map_value(import_map);
log::debug!(
"Workspace config generated this import map {}",
serde_json::to_string_pretty(&import_map).unwrap()
);
(import_map_url, import_map)
}
};
Ok(Some(import_map::parse_from_value(
import_map_url,
import_map,
)?))
}
let maybe_import_map =
resolve_import_map(&sys, workspace, options.specified_import_map)?;
let jsr_pkgs = workspace.resolver_jsr_pkgs().collect::<Vec<_>>();
let pkg_jsons = workspace
.resolver_pkg_jsons()
.map(|(dir_url, pkg_json)| {
let deps = pkg_json.resolve_local_package_json_deps();
(
dir_url.clone(),
PkgJsonResolverFolderConfig {
deps: deps.clone(),
pkg_json: pkg_json.clone(),
},
)
})
.collect::<BTreeMap<_, _>>();
let fs = CachedMetadataFs::new(sys, options.fs_cache_options);
let compiler_options_resolver = CompilerOptionsResolverCellRc::default();
let sloppy_imports_resolver = new_rc(SloppyImportsResolver::new(
fs,
compiler_options_resolver.clone(),
options.sloppy_imports_options,
));
Ok(Self {
workspace_root: workspace.root_dir_url().clone(),
pkg_json_dep_resolution: options.pkg_json_dep_resolution,
jsr_pkgs,
maybe_import_map,
pkg_jsons: FolderScopedMap::from_map(pkg_jsons),
sloppy_imports_options: options.sloppy_imports_options,
fs_cache_options: options.fs_cache_options,
compiler_options_resolver,
sloppy_imports_resolver,
})
}
/// Creates a new WorkspaceResolver from the specified import map and package.jsons.
///
/// Generally, create this from a Workspace instead.
#[allow(clippy::too_many_arguments)]
pub fn new_raw(
workspace_root: UrlRc,
maybe_import_map: Option<ImportMap>,
jsr_pkgs: Vec<ResolverWorkspaceJsrPackage>,
pkg_jsons: Vec<PackageJsonRc>,
pkg_json_dep_resolution: PackageJsonDepResolution,
sloppy_imports_options: SloppyImportsOptions,
fs_cache_options: FsCacheOptions,
sys: TSys,
) -> Self {
let maybe_import_map =
maybe_import_map.map(|import_map| ImportMapWithDiagnostics {
import_map,
diagnostics: Default::default(),
});
let pkg_jsons = pkg_jsons
.into_iter()
.map(|pkg_json| {
let deps = pkg_json.resolve_local_package_json_deps();
(
new_rc(
url_from_directory_path(pkg_json.path.parent().unwrap()).unwrap(),
),
PkgJsonResolverFolderConfig {
deps: deps.clone(),
pkg_json,
},
)
})
.collect::<BTreeMap<_, _>>();
let fs = CachedMetadataFs::new(sys, fs_cache_options);
let compiler_options_resolver = CompilerOptionsResolverCellRc::default();
let sloppy_imports_resolver = new_rc(SloppyImportsResolver::new(
fs,
compiler_options_resolver.clone(),
sloppy_imports_options,
));
Self {
workspace_root,
jsr_pkgs,
maybe_import_map,
pkg_jsons: FolderScopedMap::from_map(pkg_jsons),
pkg_json_dep_resolution,
sloppy_imports_options,
fs_cache_options,
compiler_options_resolver,
sloppy_imports_resolver,
}
}
/// Prepare the workspace resolver for serialization
///
/// The most significant preparation involves converting
/// absolute paths into relative (based on `root_dir_url`).
/// It also takes care of pre-serializing non-serde internal data.
pub fn to_serializable(
&self,
root_dir_url: &Url,
) -> SerializableWorkspaceResolver<'_> {
let root_dir_url = BaseUrl(root_dir_url);
SerializableWorkspaceResolver {
import_map: self.maybe_import_map().map(|i| {
SerializedWorkspaceResolverImportMap {
specifier: root_dir_url.make_relative_if_descendant(i.base_url()),
json: Cow::Owned(i.to_json()),
}
}),
jsr_pkgs: self
.jsr_packages()
.iter()
.map(|pkg| SerializedResolverWorkspaceJsrPackage {
relative_base: root_dir_url.make_relative_if_descendant(&pkg.base),
name: Cow::Borrowed(&pkg.name),
version: Cow::Borrowed(&pkg.version),
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/lib.rs | libs/resolver/lib.rs | // Copyright 2018-2025 the Deno authors. MIT license.
#![deny(clippy::print_stderr)]
#![deny(clippy::print_stdout)]
use std::borrow::Cow;
use std::path::PathBuf;
use boxed_error::Boxed;
use deno_cache_dir::npm::NpmCacheDir;
use deno_error::JsError;
use deno_package_json::PackageJsonDepValue;
use deno_package_json::PackageJsonDepValueParseError;
use deno_semver::npm::NpmPackageReqReference;
pub use node_resolver::DenoIsBuiltInNodeModuleChecker;
use node_resolver::InNpmPackageChecker;
use node_resolver::IsBuiltInNodeModuleChecker;
use node_resolver::NodeResolution;
use node_resolver::NodeResolutionKind;
pub use node_resolver::NodeResolverOptions;
use node_resolver::NodeResolverRc;
use node_resolver::NpmPackageFolderResolver;
use node_resolver::ResolutionMode;
use node_resolver::UrlOrPath;
use node_resolver::UrlOrPathRef;
use node_resolver::errors::NodeJsErrorCode;
use node_resolver::errors::NodeResolveError;
use node_resolver::errors::NodeResolveErrorKind;
use node_resolver::errors::UnknownBuiltInNodeModuleError;
use npm::NodeModulesOutOfDateError;
use npm::NpmReqResolverRc;
use npm::ResolveIfForNpmPackageErrorKind;
use npm::ResolvePkgFolderFromDenoReqError;
use thiserror::Error;
use url::Url;
use self::npm::NpmResolver;
use self::npm::NpmResolverSys;
use self::npm::ResolveNpmReqRefError;
use crate::workspace::MappedResolution;
use crate::workspace::MappedResolutionDiagnostic;
use crate::workspace::MappedResolutionError;
use crate::workspace::WorkspaceResolvePkgJsonFolderError;
use crate::workspace::WorkspaceResolver;
pub mod cache;
pub mod cjs;
pub mod collections;
pub mod deno_json;
pub mod display;
#[cfg(feature = "deno_ast")]
pub mod emit;
pub mod factory;
#[cfg(feature = "graph")]
pub mod file_fetcher;
#[cfg(feature = "graph")]
pub mod graph;
pub mod import_map;
pub mod loader;
pub mod lockfile;
pub mod npm;
pub mod npmrc;
#[cfg(feature = "sync")]
mod rt;
pub mod workspace;
#[allow(clippy::disallowed_types)]
pub type WorkspaceResolverRc<TSys> =
deno_maybe_sync::MaybeArc<WorkspaceResolver<TSys>>;
#[allow(clippy::disallowed_types)]
pub(crate) type NpmCacheDirRc = deno_maybe_sync::MaybeArc<NpmCacheDir>;
#[derive(Debug, Clone)]
pub struct DenoResolution {
pub url: Url,
pub maybe_diagnostic: Option<Box<MappedResolutionDiagnostic>>,
pub found_package_json_dep: bool,
}
#[derive(Debug, Boxed, JsError)]
pub struct DenoResolveError(pub Box<DenoResolveErrorKind>);
impl DenoResolveError {
#[cfg(feature = "graph")]
pub fn into_deno_graph_error(self) -> deno_graph::source::ResolveError {
use deno_error::JsErrorBox;
use deno_graph::source::ResolveError;
match self.into_kind() {
DenoResolveErrorKind::MappedResolution(mapped_resolution_error) => {
match mapped_resolution_error {
MappedResolutionError::Specifier(e) => ResolveError::Specifier(e),
// deno_graph checks specifically for an ImportMapError
MappedResolutionError::ImportMap(e) => ResolveError::ImportMap(e),
MappedResolutionError::Workspace(e) => {
ResolveError::Other(JsErrorBox::from_err(e))
}
MappedResolutionError::NotFoundInCompilerOptionsPaths(e) => {
ResolveError::Other(JsErrorBox::from_err(e))
}
}
}
err => ResolveError::Other(JsErrorBox::from_err(err)),
}
}
pub fn maybe_specifier(&self) -> Option<Cow<'_, UrlOrPath>> {
match self.as_kind() {
DenoResolveErrorKind::Node(err) => err.maybe_specifier(),
DenoResolveErrorKind::PathToUrl(err) => {
Some(Cow::Owned(UrlOrPath::Path(err.0.clone())))
}
DenoResolveErrorKind::ResolveNpmReqRef(err) => err.err.maybe_specifier(),
DenoResolveErrorKind::MappedResolution(_)
| DenoResolveErrorKind::WorkspaceResolvePkgJsonFolder(_)
| DenoResolveErrorKind::ResolvePkgFolderFromDenoReq(_)
| DenoResolveErrorKind::InvalidVendorFolderImport
| DenoResolveErrorKind::UnsupportedPackageJsonFileSpecifier
| DenoResolveErrorKind::UnsupportedPackageJsonJsrReq
| DenoResolveErrorKind::NodeModulesOutOfDate(_)
| DenoResolveErrorKind::PackageJsonDepValueParse(_)
| DenoResolveErrorKind::PackageJsonDepValueUrlParse(_) => None,
}
}
}
#[derive(Debug, Error, JsError)]
pub enum DenoResolveErrorKind {
#[class(type)]
#[error(
"Importing from the vendor directory is not permitted. Use a remote specifier instead or disable vendoring."
)]
InvalidVendorFolderImport,
#[class(type)]
#[error(
"Importing npm packages via a file: specifier is only supported with --node-modules-dir=manual"
)]
UnsupportedPackageJsonFileSpecifier,
#[class(type)]
#[error("JSR specifiers are not yet supported in package.json")]
UnsupportedPackageJsonJsrReq,
#[class(inherit)]
#[error(transparent)]
MappedResolution(#[from] MappedResolutionError),
#[class(inherit)]
#[error(transparent)]
Node(#[from] NodeResolveError),
#[class(inherit)]
#[error(transparent)]
NodeModulesOutOfDate(#[from] NodeModulesOutOfDateError),
#[class(inherit)]
#[error(transparent)]
PackageJsonDepValueParse(#[from] PackageJsonDepValueParseError),
#[class(inherit)]
#[error(transparent)]
PackageJsonDepValueUrlParse(url::ParseError),
#[class(inherit)]
#[error(transparent)]
PathToUrl(#[from] deno_path_util::PathToUrlError),
#[class(inherit)]
#[error(transparent)]
ResolveNpmReqRef(#[from] ResolveNpmReqRefError),
#[class(inherit)]
#[error(transparent)]
ResolvePkgFolderFromDenoReq(#[from] ResolvePkgFolderFromDenoReqError),
#[class(inherit)]
#[error(transparent)]
WorkspaceResolvePkgJsonFolder(#[from] WorkspaceResolvePkgJsonFolderError),
}
impl DenoResolveErrorKind {
pub fn maybe_node_code(&self) -> Option<NodeJsErrorCode> {
match self {
DenoResolveErrorKind::InvalidVendorFolderImport
| DenoResolveErrorKind::UnsupportedPackageJsonFileSpecifier
| DenoResolveErrorKind::UnsupportedPackageJsonJsrReq
| DenoResolveErrorKind::MappedResolution { .. }
| DenoResolveErrorKind::NodeModulesOutOfDate { .. }
| DenoResolveErrorKind::PackageJsonDepValueParse { .. }
| DenoResolveErrorKind::PackageJsonDepValueUrlParse { .. }
| DenoResolveErrorKind::PathToUrl { .. }
| DenoResolveErrorKind::ResolvePkgFolderFromDenoReq { .. }
| DenoResolveErrorKind::WorkspaceResolvePkgJsonFolder { .. } => None,
DenoResolveErrorKind::ResolveNpmReqRef(err) => {
err.err.as_kind().maybe_code()
}
DenoResolveErrorKind::Node(err) => err.as_kind().maybe_code(),
}
}
}
#[derive(Debug)]
pub struct NodeAndNpmResolvers<
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: NpmResolverSys,
> {
pub node_resolver: NodeResolverRc<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
pub npm_resolver: NpmResolver<TSys>,
pub npm_req_resolver: NpmReqResolverRc<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
}
#[sys_traits::auto_impl]
pub trait DenoResolverSys: NpmResolverSys {}
pub struct DenoResolverOptions<
'a,
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: DenoResolverSys,
> {
pub in_npm_pkg_checker: TInNpmPackageChecker,
pub node_and_req_resolver: Option<
NodeAndNpmResolvers<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
>,
pub workspace_resolver: WorkspaceResolverRc<TSys>,
/// Whether bare node built-ins are enabled (ex. resolve "path" as "node:path").
pub bare_node_builtins: bool,
/// Whether "bring your own node_modules" is enabled where Deno does not
/// setup the node_modules directories automatically, but instead uses
/// what already exists on the file system.
pub is_byonm: bool,
pub maybe_vendor_dir: Option<&'a PathBuf>,
}
#[allow(clippy::disallowed_types)]
pub type RawDenoResolverRc<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
> = deno_maybe_sync::MaybeArc<
RawDenoResolver<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
>;
/// Helper type for a RawDenoResolverRc that has the implementations
/// used by the Deno CLI.
pub type DefaultRawDenoResolverRc<TSys> = RawDenoResolverRc<
npm::DenoInNpmPackageChecker,
DenoIsBuiltInNodeModuleChecker,
npm::NpmResolver<TSys>,
TSys,
>;
/// A resolver that takes care of resolution, taking into account loaded
/// import map, JSX settings.
#[derive(Debug)]
pub struct RawDenoResolver<
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: DenoResolverSys,
> {
in_npm_pkg_checker: TInNpmPackageChecker,
node_and_npm_resolver: Option<
NodeAndNpmResolvers<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
>,
workspace_resolver: WorkspaceResolverRc<TSys>,
bare_node_builtins: bool,
is_byonm: bool,
maybe_vendor_specifier: Option<Url>,
}
impl<
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: DenoResolverSys,
>
RawDenoResolver<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>
{
pub fn new(
options: DenoResolverOptions<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
) -> Self {
Self {
in_npm_pkg_checker: options.in_npm_pkg_checker,
node_and_npm_resolver: options.node_and_req_resolver,
workspace_resolver: options.workspace_resolver,
bare_node_builtins: options.bare_node_builtins,
is_byonm: options.is_byonm,
maybe_vendor_specifier: options
.maybe_vendor_dir
.and_then(|v| deno_path_util::url_from_directory_path(v).ok()),
}
}
pub fn resolve(
&self,
raw_specifier: &str,
referrer: &Url,
resolution_mode: ResolutionMode,
resolution_kind: NodeResolutionKind,
) -> Result<DenoResolution, DenoResolveError> {
let mut found_package_json_dep = false;
let mut maybe_diagnostic = None;
// Use node resolution if we're in an npm package
if let Some(node_and_npm_resolver) = self.node_and_npm_resolver.as_ref() {
let node_resolver = &node_and_npm_resolver.node_resolver;
if referrer.scheme() == "file"
&& self.in_npm_pkg_checker.in_npm_package(referrer)
{
log::debug!(
"{}: specifier={} referrer={} mode={:?} kind={:?}",
deno_terminal::colors::magenta("resolving in npm package"),
raw_specifier,
referrer,
resolution_mode,
resolution_kind
);
return node_resolver
.resolve(raw_specifier, referrer, resolution_mode, resolution_kind)
.and_then(|res| {
Ok(DenoResolution {
url: res.into_url()?,
found_package_json_dep,
maybe_diagnostic,
})
})
.map_err(|e| e.into());
}
}
// Attempt to resolve with the workspace resolver
let result = self.workspace_resolver.resolve(
raw_specifier,
referrer,
resolution_kind.into(),
);
let result = match result {
Ok(resolution) => match resolution {
MappedResolution::Normal {
specifier,
maybe_diagnostic: current_diagnostic,
..
} => {
maybe_diagnostic = current_diagnostic;
Ok(specifier)
}
MappedResolution::WorkspaceJsrPackage { specifier, .. } => {
Ok(specifier)
}
MappedResolution::WorkspaceNpmPackage {
target_pkg_json: pkg_json,
sub_path,
..
} => self
.node_and_npm_resolver
.as_ref()
.unwrap()
.node_resolver
.resolve_package_subpath_from_deno_module(
pkg_json.dir_path(),
sub_path.as_deref(),
Some(referrer),
resolution_mode,
resolution_kind,
)
.map_err(|e| {
DenoResolveErrorKind::Node(e.into_node_resolve_error()).into_box()
})
.and_then(|r| Ok(r.into_url()?)),
MappedResolution::PackageJson {
dep_result,
alias,
sub_path,
..
} => {
// found a specifier in the package.json, so mark that
// we need to do an "npm install" later
found_package_json_dep = true;
dep_result
.as_ref()
.map_err(|e| {
DenoResolveErrorKind::PackageJsonDepValueParse(e.clone())
.into_box()
})
.and_then(|dep| match dep {
PackageJsonDepValue::File(_) => {
// We don't support --node-modules-dir=auto/none because it's too
// much work to get this to work with a lockfile properly and for
// multiple managed node_modules directories to work. If someone wants
// to do this, then they need to use the default (manual)
Err(
DenoResolveErrorKind::UnsupportedPackageJsonFileSpecifier
.into_box(),
)
}
PackageJsonDepValue::JsrReq(_) => Err(
DenoResolveErrorKind::UnsupportedPackageJsonJsrReq.into_box(),
),
// todo(dsherret): it seems bad that we're converting this
// to a url because the req might not be a valid url.
PackageJsonDepValue::Req(req) => Url::parse(&format!(
"npm:{}{}",
req,
sub_path.map(|s| format!("/{}", s)).unwrap_or_default()
))
.map_err(|e| {
DenoResolveErrorKind::PackageJsonDepValueUrlParse(e).into_box()
}),
PackageJsonDepValue::Workspace(version_req) => self
.workspace_resolver
.resolve_workspace_pkg_json_folder_for_pkg_json_dep(
alias,
version_req,
)
.map_err(|e| {
DenoResolveErrorKind::WorkspaceResolvePkgJsonFolder(e)
.into_box()
})
.and_then(|pkg_folder| {
self
.node_and_npm_resolver
.as_ref()
.unwrap()
.node_resolver
.resolve_package_subpath_from_deno_module(
pkg_folder,
sub_path.as_deref(),
Some(referrer),
resolution_mode,
resolution_kind,
)
.map_err(|e| {
DenoResolveErrorKind::Node(e.into_node_resolve_error())
.into_box()
})
})
.and_then(|r| Ok(r.into_url()?)),
})
}
MappedResolution::PackageJsonImport { pkg_json } => self
.node_and_npm_resolver
.as_ref()
.unwrap()
.node_resolver
.resolve_package_import(
raw_specifier,
Some(&UrlOrPathRef::from_url(referrer)),
Some(pkg_json),
resolution_mode,
resolution_kind,
)
.map_err(|e| {
DenoResolveErrorKind::Node(
NodeResolveErrorKind::PackageImportsResolve(e).into_box(),
)
.into_box()
})
.and_then(|r| Ok(r.into_url()?)),
},
Err(err) => Err(err.into()),
};
// When the user is vendoring, don't allow them to import directly from the vendor/ directory
// as it might cause them confusion or duplicate dependencies. Additionally, this folder has
// special treatment in the language server so it will definitely cause issues/confusion there
// if they do this.
if let Some(vendor_specifier) = &self.maybe_vendor_specifier
&& let Ok(specifier) = &result
&& specifier.as_str().starts_with(vendor_specifier.as_str())
{
return Err(DenoResolveErrorKind::InvalidVendorFolderImport.into_box());
}
let Some(NodeAndNpmResolvers {
node_resolver,
npm_req_resolver,
..
}) = &self.node_and_npm_resolver
else {
return Ok(DenoResolution {
url: result?,
maybe_diagnostic,
found_package_json_dep,
});
};
match result {
Ok(specifier) => {
if specifier.scheme() == "node" {
let module_name = specifier.path();
return if node_resolver.is_builtin_node_module(module_name) {
Ok(DenoResolution {
url: specifier,
maybe_diagnostic,
found_package_json_dep,
})
} else {
Err(
NodeResolveErrorKind::UnknownBuiltInNodeModule(
UnknownBuiltInNodeModuleError {
module_name: module_name.to_string(),
},
)
.into_box()
.into(),
)
};
}
if let Ok(npm_req_ref) =
NpmPackageReqReference::from_specifier(&specifier)
{
// check if the npm specifier resolves to a workspace member
if let Some(pkg_folder) = self
.workspace_resolver
.resolve_workspace_pkg_json_folder_for_npm_specifier(
npm_req_ref.req(),
)
{
return node_resolver
.resolve_package_subpath_from_deno_module(
pkg_folder,
npm_req_ref.sub_path(),
Some(referrer),
resolution_mode,
resolution_kind,
)
.map_err(|err| {
DenoResolveErrorKind::ResolveNpmReqRef(ResolveNpmReqRefError {
npm_req_ref: npm_req_ref.clone(),
err: err.into(),
})
.into_box()
})
.and_then(|url_or_path| {
Ok(DenoResolution {
url: url_or_path.into_url()?,
maybe_diagnostic,
found_package_json_dep,
})
});
}
if self.is_byonm {
return npm_req_resolver
.resolve_req_reference(
&npm_req_ref,
referrer,
resolution_mode,
resolution_kind,
)
.map_err(|err| {
DenoResolveErrorKind::ResolveNpmReqRef(err).into_box()
})
.and_then(|url_or_path| {
Ok(DenoResolution {
url: url_or_path.into_url()?,
maybe_diagnostic,
found_package_json_dep,
})
});
}
}
Ok(DenoResolution {
url: node_resolver
.handle_if_in_node_modules(&specifier)
.unwrap_or(specifier),
maybe_diagnostic,
found_package_json_dep,
})
}
Err(err) => {
// If byonm, check if the bare specifier resolves to an npm package
if self.is_byonm && referrer.scheme() == "file" {
let maybe_resolution = npm_req_resolver
.resolve_if_for_npm_pkg(
raw_specifier,
referrer,
resolution_mode,
resolution_kind,
)
.map_err(|e| match e.into_kind() {
ResolveIfForNpmPackageErrorKind::NodeResolve(e) => {
DenoResolveErrorKind::Node(e).into_box()
}
ResolveIfForNpmPackageErrorKind::NodeModulesOutOfDate(e) => {
e.into()
}
})?;
if let Some(res) = maybe_resolution {
match res {
NodeResolution::Module(ref _url) => {
return Ok(DenoResolution {
url: res.into_url()?,
maybe_diagnostic,
found_package_json_dep,
});
}
NodeResolution::BuiltIn(ref _module) => {
if self.bare_node_builtins {
return Ok(DenoResolution {
url: res.into_url()?,
maybe_diagnostic,
found_package_json_dep,
});
}
}
}
}
} else if self.bare_node_builtins
&& matches!(err.as_kind(), DenoResolveErrorKind::MappedResolution(err) if err.is_unmapped_bare_specifier())
&& node_resolver.is_builtin_node_module(raw_specifier)
{
return Ok(DenoResolution {
url: Url::parse(&format!("node:{}", raw_specifier)).unwrap(),
maybe_diagnostic,
found_package_json_dep,
});
}
Err(err)
}
}
}
#[cfg(feature = "graph")]
pub(crate) fn resolve_non_workspace_npm_req_ref_to_file(
&self,
npm_req_ref: &NpmPackageReqReference,
referrer: &Url,
resolution_mode: ResolutionMode,
resolution_kind: NodeResolutionKind,
) -> Result<node_resolver::UrlOrPath, npm::ResolveNpmReqRefError> {
let Some(NodeAndNpmResolvers {
npm_req_resolver, ..
}) = &self.node_and_npm_resolver
else {
return Err(npm::ResolveNpmReqRefError {
npm_req_ref: npm_req_ref.clone(),
err: npm::ResolveReqWithSubPathErrorKind::NoNpm(npm::NoNpmError)
.into_box(),
});
};
npm_req_resolver.resolve_req_reference(
npm_req_ref,
referrer,
resolution_mode,
resolution_kind,
)
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/emit.rs | libs/resolver/emit.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::hash::Hash;
use std::hash::Hasher;
use anyhow::Error as AnyError;
use deno_ast::EmittedSourceText;
use deno_ast::ModuleKind;
use deno_ast::ParsedSource;
use deno_ast::SourceMapOption;
use deno_ast::SourceRange;
use deno_ast::SourceRanged;
use deno_ast::SourceRangedForSpanned;
use deno_ast::TranspileModuleOptions;
use deno_ast::TranspileResult;
use deno_error::JsErrorBox;
use deno_graph::MediaType;
use deno_graph::Module;
use deno_graph::ModuleGraph;
use deno_maybe_sync::MaybeSend;
use deno_maybe_sync::MaybeSync;
use futures::FutureExt;
use futures::StreamExt;
use futures::stream::FuturesUnordered;
use node_resolver::InNpmPackageChecker;
use url::Url;
use crate::cache::EmitCacheRc;
use crate::cache::EmitCacheSys;
use crate::cache::ParsedSourceCacheRc;
use crate::cjs::CjsTrackerRc;
use crate::deno_json::CompilerOptionsParseError;
use crate::deno_json::CompilerOptionsResolverRc;
use crate::deno_json::TranspileAndEmitOptions;
#[allow(clippy::disallowed_types)] // ok because we always store source text as Arc<str>
type ArcStr = std::sync::Arc<str>;
#[allow(clippy::disallowed_types)]
pub type EmitterRc<TInNpmPackageChecker, TSys> =
deno_maybe_sync::MaybeArc<Emitter<TInNpmPackageChecker, TSys>>;
#[sys_traits::auto_impl]
pub trait EmitterSys: EmitCacheSys {}
#[derive(Debug)]
pub struct Emitter<TInNpmPackageChecker: InNpmPackageChecker, TSys: EmitterSys>
{
cjs_tracker: CjsTrackerRc<TInNpmPackageChecker, TSys>,
emit_cache: EmitCacheRc<TSys>,
parsed_source_cache: ParsedSourceCacheRc,
compiler_options_resolver: CompilerOptionsResolverRc,
}
impl<TInNpmPackageChecker: InNpmPackageChecker, TSys: EmitterSys>
Emitter<TInNpmPackageChecker, TSys>
{
pub fn new(
cjs_tracker: CjsTrackerRc<TInNpmPackageChecker, TSys>,
emit_cache: EmitCacheRc<TSys>,
parsed_source_cache: ParsedSourceCacheRc,
compiler_options_resolver: CompilerOptionsResolverRc,
) -> Self {
Self {
cjs_tracker,
emit_cache,
parsed_source_cache,
compiler_options_resolver,
}
}
pub async fn cache_module_emits(
&self,
graph: &ModuleGraph,
) -> Result<(), AnyError> {
let mut futures = FuturesUnordered::new();
for module in graph.modules() {
let Module::Js(module) = module else {
continue;
};
if module.media_type.is_emittable() {
futures.push(
self
.maybe_emit_source(
&module.specifier,
module.media_type,
ModuleKind::from_is_cjs(
self.cjs_tracker.is_cjs_with_known_is_script(
&module.specifier,
module.media_type,
module.is_script,
)?,
),
&module.source.text,
)
.boxed_local(),
);
}
}
while let Some(result) = futures.next().await {
result?; // surface errors
}
Ok(())
}
/// Gets a cached emit if the source matches the hash found in the cache.
pub fn maybe_cached_emit(
&self,
specifier: &Url,
module_kind: deno_ast::ModuleKind,
source: &str,
) -> Result<Option<String>, AnyError> {
let transpile_and_emit_options = self
.compiler_options_resolver
.for_specifier(specifier)
.transpile_options()?;
let source_hash =
self.get_source_hash(module_kind, transpile_and_emit_options, source);
Ok(self.emit_cache.get_emit_code(specifier, source_hash))
}
pub async fn maybe_emit_source(
&self,
specifier: &Url,
media_type: MediaType,
module_kind: ModuleKind,
source: &ArcStr,
) -> Result<ArcStr, EmitParsedSourceHelperError> {
self
.maybe_emit_parsed_source_provider(
ParsedSourceCacheParsedSourceProvider {
parsed_source_cache: self.parsed_source_cache.clone(),
specifier: specifier.clone(),
media_type,
source: source.clone(),
},
module_kind,
)
.await
}
pub async fn maybe_emit_parsed_source(
&self,
parsed_source: deno_ast::ParsedSource,
module_kind: ModuleKind,
) -> Result<ArcStr, EmitParsedSourceHelperError> {
// note: this method is used in deno-js-loader
self
.maybe_emit_parsed_source_provider(parsed_source, module_kind)
.await
}
async fn maybe_emit_parsed_source_provider<
TProvider: ParsedSourceProvider,
>(
&self,
provider: TProvider,
module_kind: ModuleKind,
) -> Result<ArcStr, EmitParsedSourceHelperError> {
// Note: keep this in sync with the sync version below
if !provider.media_type().is_emittable() {
return Ok(provider.into_source());
}
let transpile_and_emit_options = self
.compiler_options_resolver
.for_specifier(provider.specifier())
.transpile_options()?;
if transpile_and_emit_options.no_transpile {
return Ok(provider.into_source());
}
let transpile_options = &transpile_and_emit_options.transpile;
if matches!(provider.media_type(), MediaType::Jsx)
&& transpile_options.jsx.is_none()
{
// jsx disabled, so skip
return Ok(provider.into_source());
}
let helper = EmitParsedSourceHelper(self);
match helper.pre_emit_parsed_source(
provider.specifier(),
module_kind,
transpile_and_emit_options,
provider.source(),
) {
PreEmitResult::Cached(emitted_text) => Ok(emitted_text.into()),
PreEmitResult::NotCached { source_hash } => {
let specifier = provider.specifier().clone();
let emit = {
let transpile_and_emit_options = transpile_and_emit_options.clone();
move || {
let parsed_source = provider.parsed_source()?;
transpile(
parsed_source,
module_kind,
&transpile_and_emit_options.transpile,
&transpile_and_emit_options.emit,
)
.map(|r| r.text)
}
};
#[cfg(feature = "sync")]
let transpiled_source =
crate::rt::spawn_blocking(emit).await.unwrap()?;
#[cfg(not(feature = "sync"))]
let transpiled_source = emit()?;
helper.post_emit_parsed_source(
&specifier,
&transpiled_source,
source_hash,
);
Ok(transpiled_source.into())
}
}
}
#[allow(clippy::result_large_err)]
pub fn maybe_emit_source_sync(
&self,
specifier: &Url,
media_type: MediaType,
module_kind: deno_ast::ModuleKind,
source: &ArcStr,
) -> Result<ArcStr, EmitParsedSourceHelperError> {
// Note: keep this in sync with the async version above
if !media_type.is_emittable() {
return Ok(source.clone());
}
let transpile_and_emit_options = self
.compiler_options_resolver
.for_specifier(specifier)
.transpile_options()?;
if transpile_and_emit_options.no_transpile {
return Ok(source.clone());
}
let transpile_options = &transpile_and_emit_options.transpile;
if matches!(media_type, MediaType::Jsx) && transpile_options.jsx.is_none() {
// jsx disabled, so skip
return Ok(source.clone());
}
let helper = EmitParsedSourceHelper(self);
match helper.pre_emit_parsed_source(
specifier,
module_kind,
transpile_and_emit_options,
source,
) {
PreEmitResult::Cached(emitted_text) => Ok(emitted_text.into()),
PreEmitResult::NotCached { source_hash } => {
let parsed_source = self.parsed_source_cache.remove_or_parse_module(
specifier,
media_type,
source.clone(),
)?;
let transpiled_source = transpile(
parsed_source,
module_kind,
&transpile_and_emit_options.transpile,
&transpile_and_emit_options.emit,
)?
.text;
helper.post_emit_parsed_source(
specifier,
&transpiled_source,
source_hash,
);
Ok(transpiled_source.into())
}
}
}
pub fn emit_source_for_deno_compile(
&self,
specifier: &Url,
media_type: MediaType,
module_kind: deno_ast::ModuleKind,
source: &ArcStr,
) -> Result<(String, String), AnyError> {
let transpile_and_emit_options = self
.compiler_options_resolver
.for_specifier(specifier)
.transpile_options()?;
let mut emit_options = transpile_and_emit_options.emit.clone();
emit_options.inline_sources = false;
emit_options.source_map = SourceMapOption::Separate;
// strip off the path to have more deterministic builds as we don't care
// about the source name because we manually provide the source map to v8
emit_options.source_map_base = Some(deno_path_util::url_parent(specifier));
let parsed_source = self.parsed_source_cache.remove_or_parse_module(
specifier,
media_type,
source.clone(),
)?;
let source = transpile(
parsed_source,
module_kind,
&transpile_and_emit_options.transpile,
&emit_options,
)?;
Ok((source.text, source.source_map.unwrap()))
}
/// Expects a file URL, panics otherwise.
pub fn emit_for_hmr(
&self,
specifier: &Url,
source_code: String,
) -> Result<String, JsErrorBox> {
let media_type = MediaType::from_specifier(specifier);
match media_type {
MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Jsx
| MediaType::Tsx => {
let source_arc: ArcStr = source_code.into();
let parsed_source = self
.parsed_source_cache
.remove_or_parse_module(specifier, media_type, source_arc)
.map_err(JsErrorBox::from_err)?;
// HMR doesn't work with embedded source maps for some reason, so set
// the option to not use them (though you should test this out because
// this statement is probably wrong)
let transpile_and_emit_options = self
.compiler_options_resolver
.for_specifier(specifier)
.transpile_options()
.map_err(JsErrorBox::from_err)?;
let mut options = transpile_and_emit_options.emit.clone();
options.source_map = SourceMapOption::None;
let is_cjs = self
.cjs_tracker
.is_cjs_with_known_is_script(
specifier,
media_type,
parsed_source.compute_is_script(),
)
.map_err(JsErrorBox::from_err)?;
let transpiled_source = parsed_source
.transpile(
&transpile_and_emit_options.transpile,
&deno_ast::TranspileModuleOptions {
module_kind: Some(ModuleKind::from_is_cjs(is_cjs)),
},
&options,
)
.map_err(JsErrorBox::from_err)?
.into_source();
Ok(transpiled_source.text)
}
MediaType::JavaScript
| MediaType::Mjs
| MediaType::Cjs
| MediaType::Dts
| MediaType::Dmts
| MediaType::Dcts
| MediaType::Json
| MediaType::Jsonc
| MediaType::Json5
| MediaType::Wasm
| MediaType::Css
| MediaType::Html
| MediaType::SourceMap
| MediaType::Sql
| MediaType::Unknown => {
// clear this specifier from the parsed source cache as it's now out of date
self.parsed_source_cache.free(specifier);
Ok(source_code)
}
}
}
/// A hashing function that takes the source code and uses the global emit
/// options then generates a string hash which can be stored to
/// determine if the cached emit is valid or not.
fn get_source_hash(
&self,
module_kind: ModuleKind,
transpile_and_emit: &TranspileAndEmitOptions,
source_text: &str,
) -> u64 {
let mut hasher = twox_hash::XxHash64::default();
source_text.hash(&mut hasher);
transpile_and_emit.pre_computed_hash.hash(&mut hasher);
module_kind.hash(&mut hasher);
hasher.finish()
}
}
#[allow(clippy::result_large_err)]
trait ParsedSourceProvider: MaybeSend + MaybeSync + Clone + 'static {
fn specifier(&self) -> &Url;
fn media_type(&self) -> MediaType;
fn source(&self) -> &ArcStr;
fn into_source(self) -> ArcStr;
fn parsed_source(self) -> Result<ParsedSource, deno_ast::ParseDiagnostic>;
}
#[derive(Clone)]
struct ParsedSourceCacheParsedSourceProvider {
parsed_source_cache: ParsedSourceCacheRc,
specifier: Url,
media_type: MediaType,
source: ArcStr,
}
impl ParsedSourceProvider for ParsedSourceCacheParsedSourceProvider {
fn specifier(&self) -> &Url {
&self.specifier
}
fn media_type(&self) -> MediaType {
self.media_type
}
fn source(&self) -> &ArcStr {
&self.source
}
fn into_source(self) -> ArcStr {
self.source
}
fn parsed_source(self) -> Result<ParsedSource, deno_ast::ParseDiagnostic> {
self.parsed_source_cache.remove_or_parse_module(
&self.specifier,
self.media_type,
self.source.clone(),
)
}
}
impl ParsedSourceProvider for ParsedSource {
fn specifier(&self) -> &Url {
ParsedSource::specifier(self)
}
fn media_type(&self) -> MediaType {
ParsedSource::media_type(self)
}
fn source(&self) -> &ArcStr {
self.text()
}
fn into_source(self) -> ArcStr {
self.text().clone()
}
fn parsed_source(self) -> Result<ParsedSource, deno_ast::ParseDiagnostic> {
Ok(self)
}
}
enum PreEmitResult {
Cached(String),
NotCached { source_hash: u64 },
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum EmitParsedSourceHelperError {
#[class(inherit)]
#[error(transparent)]
CompilerOptionsParse(#[from] CompilerOptionsParseError),
#[class(inherit)]
#[error(transparent)]
ParseDiagnostic(#[from] deno_ast::ParseDiagnostic),
#[class(inherit)]
#[error(transparent)]
Transpile(#[from] deno_ast::TranspileError),
#[class(inherit)]
#[error(transparent)]
Other(#[from] JsErrorBox),
}
/// Helper to share code between async and sync emit_parsed_source methods.
struct EmitParsedSourceHelper<
'a,
TInNpmPackageChecker: InNpmPackageChecker,
TSys: EmitterSys,
>(&'a Emitter<TInNpmPackageChecker, TSys>);
impl<TInNpmPackageChecker: InNpmPackageChecker, TSys: EmitterSys>
EmitParsedSourceHelper<'_, TInNpmPackageChecker, TSys>
{
pub fn pre_emit_parsed_source(
&self,
specifier: &Url,
module_kind: deno_ast::ModuleKind,
transpile_and_emit_options: &TranspileAndEmitOptions,
source: &ArcStr,
) -> PreEmitResult {
let source_hash =
self
.0
.get_source_hash(module_kind, transpile_and_emit_options, source);
if let Some(emit_code) =
self.0.emit_cache.get_emit_code(specifier, source_hash)
{
PreEmitResult::Cached(emit_code)
} else {
PreEmitResult::NotCached { source_hash }
}
}
pub fn post_emit_parsed_source(
&self,
specifier: &Url,
transpiled_source: &str,
source_hash: u64,
) {
self.0.emit_cache.set_emit_code(
specifier,
source_hash,
transpiled_source.as_bytes(),
);
}
}
#[allow(clippy::result_large_err)]
fn transpile(
parsed_source: ParsedSource,
module_kind: deno_ast::ModuleKind,
transpile_options: &deno_ast::TranspileOptions,
emit_options: &deno_ast::EmitOptions,
) -> Result<EmittedSourceText, EmitParsedSourceHelperError> {
ensure_no_import_assertion(&parsed_source)?;
let transpile_result = parsed_source.transpile(
transpile_options,
&TranspileModuleOptions {
module_kind: Some(module_kind),
},
emit_options,
)?;
let transpiled_source = match transpile_result {
TranspileResult::Owned(source) => source,
TranspileResult::Cloned(source) => {
debug_assert!(false, "Transpile owned failed.");
source
}
};
Ok(transpiled_source)
}
// todo(dsherret): this is a temporary measure until we have swc erroring for this
fn ensure_no_import_assertion(
parsed_source: &deno_ast::ParsedSource,
) -> Result<(), JsErrorBox> {
fn has_import_assertion(text: &str) -> bool {
// good enough
text.contains(" assert ") && !text.contains(" with ")
}
fn create_err(
parsed_source: &deno_ast::ParsedSource,
range: SourceRange,
) -> JsErrorBox {
let text_info = parsed_source.text_info_lazy();
let loc = text_info.line_and_column_display(range.start);
let mut msg = "Import assertions are deprecated. Use `with` keyword, instead of 'assert' keyword.".to_string();
msg.push_str("\n\n");
msg.push_str(range.text_fast(text_info));
msg.push_str("\n\n");
msg.push_str(&format!(
" at {}:{}:{}\n",
parsed_source.specifier(),
loc.line_number,
loc.column_number,
));
JsErrorBox::generic(msg)
}
let deno_ast::ProgramRef::Module(module) = parsed_source.program_ref() else {
return Ok(());
};
for item in &module.body {
match item {
deno_ast::swc::ast::ModuleItem::ModuleDecl(decl) => match decl {
deno_ast::swc::ast::ModuleDecl::Import(n) => {
if n.with.is_some()
&& has_import_assertion(n.text_fast(parsed_source.text_info_lazy()))
{
return Err(create_err(parsed_source, n.range()));
}
}
deno_ast::swc::ast::ModuleDecl::ExportAll(n) => {
if n.with.is_some()
&& has_import_assertion(n.text_fast(parsed_source.text_info_lazy()))
{
return Err(create_err(parsed_source, n.range()));
}
}
deno_ast::swc::ast::ModuleDecl::ExportNamed(n) => {
if n.with.is_some()
&& has_import_assertion(n.text_fast(parsed_source.text_info_lazy()))
{
return Err(create_err(parsed_source, n.range()));
}
}
deno_ast::swc::ast::ModuleDecl::ExportDecl(_)
| deno_ast::swc::ast::ModuleDecl::ExportDefaultDecl(_)
| deno_ast::swc::ast::ModuleDecl::ExportDefaultExpr(_)
| deno_ast::swc::ast::ModuleDecl::TsImportEquals(_)
| deno_ast::swc::ast::ModuleDecl::TsExportAssignment(_)
| deno_ast::swc::ast::ModuleDecl::TsNamespaceExport(_) => {}
},
deno_ast::swc::ast::ModuleItem::Stmt(_) => {}
}
}
Ok(())
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/import_map.rs | libs/resolver/import_map.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::path::PathBuf;
use anyhow::Context;
use deno_config::workspace::WorkspaceRc;
#[derive(Debug, Clone)]
pub struct ExternalImportMap {
pub path: PathBuf,
pub value: serde_json::Value,
}
#[allow(clippy::disallowed_types)]
pub type WorkspaceExternalImportMapLoaderRc<TSys> =
deno_maybe_sync::MaybeArc<WorkspaceExternalImportMapLoader<TSys>>;
#[derive(Debug)]
pub struct WorkspaceExternalImportMapLoader<TSys: sys_traits::FsRead> {
sys: TSys,
workspace: WorkspaceRc,
maybe_external_import_map:
once_cell::sync::OnceCell<Option<ExternalImportMap>>,
}
impl<TSys: sys_traits::FsRead> WorkspaceExternalImportMapLoader<TSys> {
pub fn new(sys: TSys, workspace: WorkspaceRc) -> Self {
Self {
sys,
workspace,
maybe_external_import_map: Default::default(),
}
}
pub fn get_or_load(
&self,
) -> Result<Option<&ExternalImportMap>, anyhow::Error> {
self
.maybe_external_import_map
.get_or_try_init(|| {
let Some(deno_json) = self.workspace.root_deno_json() else {
return Ok(None);
};
if deno_json.is_an_import_map() {
return Ok(None);
}
let Some(path) = deno_json.to_import_map_path()? else {
return Ok(None);
};
let contents =
self.sys.fs_read_to_string(&path).with_context(|| {
format!("Unable to read import map at '{}'", path.display())
})?;
let value = serde_json::from_str(&contents)?;
Ok(Some(ExternalImportMap { path, value }))
})
.map(|v| v.as_ref())
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/rt.rs | libs/resolver/rt.rs | // Copyright 2018-2025 the Deno authors. MIT license.
#[cfg(not(target_arch = "wasm32"))]
use deno_unsync::JoinHandle;
#[cfg(target_arch = "wasm32")]
pub type JoinHandle<T> =
std::future::Ready<Result<T, std::convert::Infallible>>;
pub fn spawn_blocking<
F: (FnOnce() -> R) + Send + 'static,
R: Send + 'static,
>(
f: F,
) -> JoinHandle<R> {
#[cfg(target_arch = "wasm32")]
{
let result = f();
std::future::ready(Ok(result))
}
#[cfg(not(target_arch = "wasm32"))]
{
deno_unsync::spawn_blocking(f)
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/collections.rs | libs/resolver/collections.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::BTreeMap;
use url::Url;
#[allow(clippy::disallowed_types)]
type UrlRc = deno_maybe_sync::MaybeArc<Url>;
/// A map that stores values scoped to a specific directory
/// on the file system.
pub struct FolderScopedMap<TValue> {
scoped: BTreeMap<UrlRc, TValue>,
}
impl<TValue> std::fmt::Debug for FolderScopedMap<TValue>
where
TValue: std::fmt::Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("FolderScopedMap")
.field("scoped", &self.scoped)
.finish()
}
}
impl<TValue> Default for FolderScopedMap<TValue> {
fn default() -> Self {
Self {
scoped: Default::default(),
}
}
}
impl<TValue> FolderScopedMap<TValue> {
pub fn from_map(map: BTreeMap<UrlRc, TValue>) -> Self {
Self { scoped: map }
}
pub fn count(&self) -> usize {
self.scoped.len()
}
pub fn get_for_specifier(&self, specifier: &Url) -> Option<&TValue> {
let specifier_str = specifier.as_str();
self
.scoped
.iter()
.rfind(|(s, _)| specifier_str.starts_with(s.as_str()))
.map(|(_, v)| v)
}
pub fn entry_for_specifier(
&self,
specifier: &Url,
) -> Option<(&UrlRc, &TValue)> {
self
.scoped
.iter()
.rfind(|(s, _)| specifier.as_str().starts_with(s.as_str()))
}
pub fn entries_for_specifier<'a>(
&'a self,
specifier: &Url,
) -> impl Iterator<Item = (&'a UrlRc, &'a TValue)> {
struct ValueIter<
'a,
'b,
TValue: 'a,
Iter: Iterator<Item = (&'a UrlRc, &'a TValue)>,
> {
previously_found_dir: bool,
iter: Iter,
specifier: &'b Url,
}
impl<'a, TValue, Iter: Iterator<Item = (&'a UrlRc, &'a TValue)>> Iterator
for ValueIter<'a, '_, TValue, Iter>
{
type Item = (&'a UrlRc, &'a TValue);
fn next(&mut self) -> Option<Self::Item> {
for (dir_url, value) in self.iter.by_ref() {
if !self.specifier.as_str().starts_with(dir_url.as_str()) {
if self.previously_found_dir {
break;
} else {
continue;
}
}
self.previously_found_dir = true;
return Some((dir_url, value));
}
None
}
}
ValueIter {
previously_found_dir: false,
iter: self.scoped.iter().rev(),
specifier,
}
}
pub fn get_for_scope(&self, scope: &Url) -> Option<&TValue> {
self.scoped.get(scope)
}
pub fn entries(&self) -> impl Iterator<Item = (&UrlRc, &TValue)> {
self.scoped.iter()
}
pub fn values(&self) -> impl Iterator<Item = &TValue> {
self.scoped.values()
}
pub fn insert(&mut self, dir_url: UrlRc, value: TValue) {
debug_assert!(dir_url.path().ends_with("/")); // must be a dir url
debug_assert_eq!(dir_url.scheme(), "file");
self.scoped.insert(dir_url, value);
}
pub fn try_map<B, E>(
&self,
mut f: impl FnMut(&TValue) -> Result<B, E>,
) -> Result<FolderScopedMap<B>, E> {
Ok(FolderScopedMap {
scoped: self
.scoped
.iter()
.map(|(s, v)| Ok((s.clone(), f(v)?)))
.collect::<Result<_, _>>()?,
})
}
}
/// A map that stores values scoped to a specific directory
/// on the file system, but also having the concept of "unscoped"
/// for any folders that land outside.
///
/// The root directory is considered "unscoped" so values that
/// fall outside the other directories land here (ex. remote modules).
pub struct FolderScopedWithUnscopedMap<TValue> {
pub unscoped: TValue,
scoped: FolderScopedMap<TValue>,
}
impl<TValue> std::fmt::Debug for FolderScopedWithUnscopedMap<TValue>
where
TValue: std::fmt::Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("FolderScopedWithUnscopedMap")
.field("unscoped", &self.unscoped)
.field("scoped", &self.scoped)
.finish()
}
}
impl<TValue> Default for FolderScopedWithUnscopedMap<TValue>
where
TValue: Default,
{
fn default() -> Self {
Self::new(Default::default())
}
}
impl<TValue> FolderScopedWithUnscopedMap<TValue> {
pub fn new(unscoped: TValue) -> Self {
Self {
unscoped,
scoped: Default::default(),
}
}
pub fn count(&self) -> usize {
// +1 for unscoped
self.scoped.count() + 1
}
pub fn get_for_specifier(&self, specifier: &Url) -> &TValue {
self
.scoped
.get_for_specifier(specifier)
.unwrap_or(&self.unscoped)
}
pub fn entry_for_specifier(
&self,
specifier: &Url,
) -> (Option<&UrlRc>, &TValue) {
self
.scoped
.entry_for_specifier(specifier)
.map(|(s, v)| (Some(s), v))
.unwrap_or((None, &self.unscoped))
}
pub fn get_for_scope(&self, scope: Option<&Url>) -> Option<&TValue> {
let Some(scope) = scope else {
return Some(&self.unscoped);
};
self.scoped.get_for_scope(scope)
}
pub fn entries(&self) -> impl Iterator<Item = (Option<&UrlRc>, &TValue)> {
[(None, &self.unscoped)]
.into_iter()
.chain(self.scoped.entries().map(|(s, v)| (Some(s), v)))
}
pub fn insert(&mut self, dir_url: UrlRc, value: TValue) {
debug_assert!(dir_url.path().ends_with("/")); // must be a dir url
debug_assert_eq!(dir_url.scheme(), "file");
self.scoped.insert(dir_url, value);
}
pub fn try_map<B, E>(
&self,
mut f: impl FnMut(&TValue) -> Result<B, E>,
) -> Result<FolderScopedWithUnscopedMap<B>, E> {
Ok(FolderScopedWithUnscopedMap {
unscoped: f(&self.unscoped)?,
scoped: self.scoped.try_map(f)?,
})
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/display.rs | libs/resolver/display.rs | // Copyright 2018-2025 the Deno authors. MIT license.
//! It would be best to move these utilities out of this
//! crate as this is not specific to resolution, but for
//! the time being it's fine for this to live here.
use std::fmt::Write as _;
use deno_terminal::colors;
use dissimilar::Chunk;
use dissimilar::diff as difference;
/// Print diff of the same file_path, before and after formatting.
///
/// Diff format is loosely based on GitHub diff formatting.
pub fn diff(orig_text: &str, edit_text: &str) -> String {
if orig_text == edit_text {
return String::new();
}
// normalize newlines as it adds too much noise if they differ
let orig_text = orig_text.replace("\r\n", "\n");
let edit_text = edit_text.replace("\r\n", "\n");
if orig_text == edit_text {
return " | Text differed by line endings.\n".to_string();
}
DiffBuilder::build(&orig_text, &edit_text)
}
struct DiffBuilder {
output: String,
line_number_width: usize,
orig_line: usize,
edit_line: usize,
orig: String,
edit: String,
has_changes: bool,
}
impl DiffBuilder {
pub fn build(orig_text: &str, edit_text: &str) -> String {
let mut diff_builder = DiffBuilder {
output: String::new(),
orig_line: 1,
edit_line: 1,
orig: String::new(),
edit: String::new(),
has_changes: false,
line_number_width: {
let line_count = std::cmp::max(
orig_text.split('\n').count(),
edit_text.split('\n').count(),
);
line_count.to_string().chars().count()
},
};
let chunks = difference(orig_text, edit_text);
diff_builder.handle_chunks(chunks);
diff_builder.output
}
fn handle_chunks<'a>(&'a mut self, chunks: Vec<Chunk<'a>>) {
for chunk in chunks {
match chunk {
Chunk::Delete(s) => {
let split = s.split('\n').enumerate();
for (i, s) in split {
if i > 0 {
self.orig.push('\n');
}
self.orig.push_str(&fmt_rem_text_highlight(s));
}
self.has_changes = true
}
Chunk::Insert(s) => {
let split = s.split('\n').enumerate();
for (i, s) in split {
if i > 0 {
self.edit.push('\n');
}
self.edit.push_str(&fmt_add_text_highlight(s));
}
self.has_changes = true
}
Chunk::Equal(s) => {
let split = s.split('\n').enumerate();
for (i, s) in split {
if i > 0 {
self.flush_changes();
}
self.orig.push_str(&fmt_rem_text(s));
self.edit.push_str(&fmt_add_text(s));
}
}
}
}
self.flush_changes();
}
fn flush_changes(&mut self) {
if self.has_changes {
self.write_line_diff();
self.orig_line += self.orig.split('\n').count();
self.edit_line += self.edit.split('\n').count();
self.has_changes = false;
} else {
self.orig_line += 1;
self.edit_line += 1;
}
self.orig.clear();
self.edit.clear();
}
fn write_line_diff(&mut self) {
let split = self.orig.split('\n').enumerate();
for (i, s) in split {
write!(
self.output,
"{:width$}{} ",
self.orig_line + i,
colors::gray(" |"),
width = self.line_number_width
)
.unwrap();
self.output.push_str(&fmt_rem());
self.output.push_str(s);
self.output.push('\n');
}
let split = self.edit.split('\n').enumerate();
for (i, s) in split {
write!(
self.output,
"{:width$}{} ",
self.edit_line + i,
colors::gray(" |"),
width = self.line_number_width
)
.unwrap();
self.output.push_str(&fmt_add());
self.output.push_str(s);
self.output.push('\n');
}
}
}
fn fmt_add() -> String {
colors::green_bold("+").to_string()
}
fn fmt_add_text(x: &str) -> String {
colors::green(x).to_string()
}
fn fmt_add_text_highlight(x: &str) -> String {
colors::black_on_green(x).to_string()
}
fn fmt_rem() -> String {
colors::red_bold("-").to_string()
}
fn fmt_rem_text(x: &str) -> String {
colors::red(x).to_string()
}
fn fmt_rem_text_highlight(x: &str) -> String {
colors::white_on_red(x).to_string()
}
pub struct DisplayTreeNode {
pub text: String,
pub children: Vec<DisplayTreeNode>,
}
impl DisplayTreeNode {
pub fn from_text(text: String) -> Self {
Self {
text,
children: Default::default(),
}
}
pub fn print<TWrite: std::fmt::Write>(
&self,
writer: &mut TWrite,
) -> std::fmt::Result {
fn print_children<TWrite: std::fmt::Write>(
writer: &mut TWrite,
prefix: &str,
children: &[DisplayTreeNode],
) -> std::fmt::Result {
const SIBLING_CONNECTOR: char = '├';
const LAST_SIBLING_CONNECTOR: char = '└';
const CHILD_DEPS_CONNECTOR: char = '┬';
const CHILD_NO_DEPS_CONNECTOR: char = '─';
const VERTICAL_CONNECTOR: char = '│';
const EMPTY_CONNECTOR: char = ' ';
let child_len = children.len();
for (index, child) in children.iter().enumerate() {
let is_last = index + 1 == child_len;
let sibling_connector = if is_last {
LAST_SIBLING_CONNECTOR
} else {
SIBLING_CONNECTOR
};
let child_connector = if child.children.is_empty() {
CHILD_NO_DEPS_CONNECTOR
} else {
CHILD_DEPS_CONNECTOR
};
writeln!(
writer,
"{} {}",
colors::gray(format!(
"{prefix}{sibling_connector}─{child_connector}"
)),
child.text
)?;
let child_prefix = format!(
"{}{}{}",
prefix,
if is_last {
EMPTY_CONNECTOR
} else {
VERTICAL_CONNECTOR
},
EMPTY_CONNECTOR
);
print_children(writer, &child_prefix, &child.children)?;
}
Ok(())
}
writeln!(writer, "{}", self.text)?;
print_children(writer, "", &self.children)?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_diff() {
run_test(
"console.log('Hello World')",
"console.log(\"Hello World\");",
concat!(
"1 | -console.log('Hello World')\n",
"1 | +console.log(\"Hello World\");\n",
),
);
run_test(
"\n\n\n\nconsole.log(\n'Hello World'\n)",
"console.log(\n\"Hello World\"\n);",
concat!(
"1 | -\n",
"2 | -\n",
"3 | -\n",
"4 | -\n",
"5 | -console.log(\n",
"1 | +console.log(\n",
"6 | -'Hello World'\n",
"2 | +\"Hello World\"\n",
"7 | -)\n3 | +);\n",
),
);
}
#[test]
fn test_eof_newline_missing() {
run_test(
"test\nsome line text test",
"test\nsome line text test\n",
concat!(
"2 | -some line text test\n",
"2 | +some line text test\n",
"3 | +\n",
),
);
}
#[test]
fn test_newlines_differing() {
run_test("test\n", "test\r\n", " | Text differed by line endings.\n");
}
fn run_test(diff_text1: &str, diff_text2: &str, expected_output: &str) {
assert_eq!(
test_util::strip_ansi_codes(&diff(diff_text1, diff_text2,)),
expected_output,
);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/file_fetcher.rs | libs/resolver/file_fetcher.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::HashMap;
use std::path::PathBuf;
use boxed_error::Boxed;
use deno_cache_dir::GlobalHttpCacheRc;
use deno_cache_dir::GlobalHttpCacheSys;
use deno_cache_dir::HttpCacheRc;
use deno_cache_dir::file_fetcher::AuthTokens;
use deno_cache_dir::file_fetcher::BlobStore;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_cache_dir::file_fetcher::CachedOrRedirect;
use deno_cache_dir::file_fetcher::FetchCachedError;
use deno_cache_dir::file_fetcher::File;
use deno_cache_dir::file_fetcher::FileFetcherSys;
use deno_cache_dir::file_fetcher::FileOrRedirect;
use deno_cache_dir::file_fetcher::HttpClient;
use deno_cache_dir::file_fetcher::TooManyRedirectsError;
use deno_cache_dir::file_fetcher::UnsupportedSchemeError;
use deno_error::JsError;
use deno_error::JsErrorBox;
use deno_graph::source::CacheInfo;
use deno_graph::source::CacheSetting as LoaderCacheSetting;
use deno_graph::source::LoadFuture;
use deno_graph::source::LoadResponse;
use deno_graph::source::Loader;
use deno_graph::source::LoaderChecksum;
use deno_permissions::CheckSpecifierKind;
use deno_permissions::PermissionCheckError;
use deno_permissions::PermissionsContainer;
use futures::FutureExt;
use futures::future::LocalBoxFuture;
use http::header;
use node_resolver::InNpmPackageChecker;
use thiserror::Error;
use url::Url;
use crate::loader::MemoryFilesRc;
use crate::npm::DenoInNpmPackageChecker;
#[derive(Debug, Boxed, JsError)]
pub struct FetchError(pub Box<FetchErrorKind>);
#[derive(Debug, Error, JsError)]
pub enum FetchErrorKind {
#[error(transparent)]
#[class(inherit)]
FetchNoFollow(#[from] FetchNoFollowError),
#[error(transparent)]
#[class(generic)]
TooManyRedirects(#[from] TooManyRedirectsError),
}
#[derive(Debug, Boxed, JsError)]
pub struct FetchNoFollowError(pub Box<FetchNoFollowErrorKind>);
#[derive(Debug, Error, JsError)]
pub enum FetchNoFollowErrorKind {
#[error(transparent)]
#[class(inherit)]
FetchNoFollow(#[from] deno_cache_dir::file_fetcher::FetchNoFollowError),
#[error(transparent)]
#[class(generic)]
PermissionCheck(#[from] PermissionCheckError),
}
#[derive(Debug, Boxed, JsError)]
pub struct GetCachedSourceOrLocalError(
pub Box<GetCachedSourceOrLocalErrorKind>,
);
#[derive(Debug, Error, JsError)]
pub enum GetCachedSourceOrLocalErrorKind {
#[error(transparent)]
#[class(inherit)]
FetchLocal(#[from] deno_cache_dir::file_fetcher::FetchLocalError),
#[error(transparent)]
#[class(inherit)]
FetchCached(#[from] deno_cache_dir::file_fetcher::FetchCachedError),
}
#[derive(Debug, Copy, Clone)]
pub enum FetchPermissionsOptionRef<'a> {
AllowAll,
Restricted(&'a PermissionsContainer, CheckSpecifierKind),
}
#[derive(Debug, Default)]
pub struct FetchOptions<'a> {
pub local: FetchLocalOptions,
pub maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
pub maybe_accept: Option<&'a str>,
pub maybe_cache_setting: Option<&'a CacheSetting>,
}
pub type FetchLocalOptions = deno_cache_dir::file_fetcher::FetchLocalOptions;
pub struct FetchNoFollowOptions<'a> {
pub local: FetchLocalOptions,
pub maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
pub maybe_accept: Option<&'a str>,
pub maybe_cache_setting: Option<&'a CacheSetting>,
pub maybe_checksum: Option<&'a LoaderChecksum>,
}
impl<'a> FetchNoFollowOptions<'a> {
fn into_deno_cache_dir_options(
self,
) -> deno_cache_dir::file_fetcher::FetchNoFollowOptions<'a> {
deno_cache_dir::file_fetcher::FetchNoFollowOptions {
local: self.local,
maybe_auth: self.maybe_auth,
maybe_checksum: self
.maybe_checksum
.map(|c| deno_cache_dir::Checksum::new(c.as_str())),
maybe_accept: self.maybe_accept,
maybe_cache_setting: self.maybe_cache_setting,
}
}
}
#[sys_traits::auto_impl]
pub trait PermissionedFileFetcherSys:
FileFetcherSys + sys_traits::EnvVar
{
}
#[allow(clippy::disallowed_types)]
type PermissionedFileFetcherRc<TBlobStore, TSys, THttpClient> =
deno_maybe_sync::MaybeArc<
PermissionedFileFetcher<TBlobStore, TSys, THttpClient>,
>;
pub struct PermissionedFileFetcherOptions {
pub allow_remote: bool,
pub cache_setting: CacheSetting,
}
/// A structure for resolving, fetching and caching source files.
#[derive(Debug)]
pub struct PermissionedFileFetcher<
TBlobStore: BlobStore,
TSys: PermissionedFileFetcherSys,
THttpClient: HttpClient,
> {
file_fetcher:
deno_cache_dir::file_fetcher::FileFetcher<TBlobStore, TSys, THttpClient>,
memory_files: MemoryFilesRc,
}
impl<
TBlobStore: BlobStore,
TSys: PermissionedFileFetcherSys,
THttpClient: HttpClient,
> PermissionedFileFetcher<TBlobStore, TSys, THttpClient>
{
pub fn new(
blob_store: TBlobStore,
http_cache: HttpCacheRc,
http_client: THttpClient,
memory_files: MemoryFilesRc,
sys: TSys,
options: PermissionedFileFetcherOptions,
) -> Self {
let auth_tokens = AuthTokens::new_from_sys(&sys);
let file_fetcher = deno_cache_dir::file_fetcher::FileFetcher::new(
blob_store,
sys,
http_cache,
http_client,
memory_files.clone(),
deno_cache_dir::file_fetcher::FileFetcherOptions {
allow_remote: options.allow_remote,
cache_setting: options.cache_setting,
auth_tokens,
},
);
Self {
file_fetcher,
memory_files,
}
}
pub fn cache_setting(&self) -> &CacheSetting {
self.file_fetcher.cache_setting()
}
#[inline(always)]
pub async fn fetch_bypass_permissions(
&self,
specifier: &Url,
) -> Result<File, FetchError> {
self
.fetch_inner(specifier, None, FetchPermissionsOptionRef::AllowAll)
.await
}
#[inline(always)]
pub async fn fetch_bypass_permissions_with_maybe_auth(
&self,
specifier: &Url,
maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
) -> Result<File, FetchError> {
self
.fetch_inner(specifier, maybe_auth, FetchPermissionsOptionRef::AllowAll)
.await
}
/// Fetch a source file and asynchronously return it.
#[inline(always)]
pub async fn fetch(
&self,
specifier: &Url,
permissions: &PermissionsContainer,
) -> Result<File, FetchError> {
self
.fetch_inner(
specifier,
None,
FetchPermissionsOptionRef::Restricted(
permissions,
CheckSpecifierKind::Static,
),
)
.await
}
async fn fetch_inner(
&self,
specifier: &Url,
maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
permissions: FetchPermissionsOptionRef<'_>,
) -> Result<File, FetchError> {
self
.fetch_with_options(
specifier,
permissions,
FetchOptions {
local: Default::default(),
maybe_auth,
maybe_accept: None,
maybe_cache_setting: None,
},
)
.await
}
pub async fn fetch_with_options(
&self,
specifier: &Url,
permissions: FetchPermissionsOptionRef<'_>,
options: FetchOptions<'_>,
) -> Result<File, FetchError> {
self
.fetch_with_options_and_max_redirect(specifier, permissions, options, 10)
.await
}
pub async fn fetch_with_options_and_max_redirect(
&self,
specifier: &Url,
permissions: FetchPermissionsOptionRef<'_>,
options: FetchOptions<'_>,
max_redirect: usize,
) -> Result<File, FetchError> {
let mut specifier = Cow::Borrowed(specifier);
let mut maybe_auth = options.maybe_auth;
for _ in 0..=max_redirect {
match self
.fetch_no_follow(
&specifier,
permissions,
FetchNoFollowOptions {
local: options.local.clone(),
maybe_auth: maybe_auth.clone(),
maybe_accept: options.maybe_accept,
maybe_cache_setting: options.maybe_cache_setting,
maybe_checksum: None,
},
)
.await?
{
FileOrRedirect::File(file) => {
return Ok(file);
}
FileOrRedirect::Redirect(redirect_specifier) => {
// If we were redirected to another origin, don't send the auth header anymore.
if redirect_specifier.origin() != specifier.origin() {
maybe_auth = None;
}
specifier = Cow::Owned(redirect_specifier);
}
}
}
Err(TooManyRedirectsError(specifier.into_owned()).into())
}
/// Ensures the module is cached without following redirects.
pub async fn ensure_cached_no_follow(
&self,
specifier: &Url,
permissions: FetchPermissionsOptionRef<'_>,
options: FetchNoFollowOptions<'_>,
) -> Result<CachedOrRedirect, FetchNoFollowError> {
self.validate_fetch(specifier, permissions)?;
self
.file_fetcher
.ensure_cached_no_follow(specifier, options.into_deno_cache_dir_options())
.await
.map_err(|err| FetchNoFollowErrorKind::FetchNoFollow(err).into_box())
}
/// Fetches without following redirects.
pub async fn fetch_no_follow(
&self,
specifier: &Url,
permissions: FetchPermissionsOptionRef<'_>,
options: FetchNoFollowOptions<'_>,
) -> Result<FileOrRedirect, FetchNoFollowError> {
self.validate_fetch(specifier, permissions)?;
self
.file_fetcher
.fetch_no_follow(specifier, options.into_deno_cache_dir_options())
.await
.map_err(|err| FetchNoFollowErrorKind::FetchNoFollow(err).into_box())
}
fn validate_fetch(
&self,
specifier: &Url,
permissions: FetchPermissionsOptionRef<'_>,
) -> Result<(), FetchNoFollowError> {
validate_scheme(specifier).map_err(|err| {
FetchNoFollowErrorKind::FetchNoFollow(err.into()).into_box()
})?;
match permissions {
FetchPermissionsOptionRef::AllowAll => {
// allow
}
FetchPermissionsOptionRef::Restricted(permissions, kind) => {
permissions.check_specifier(specifier, kind)?;
}
}
Ok(())
}
/// A synchronous way to retrieve a source file, where if the file has already
/// been cached in memory it will be returned, otherwise for local files will
/// be read from disk.
pub fn get_cached_source_or_local(
&self,
specifier: &Url,
) -> Result<Option<File>, GetCachedSourceOrLocalError> {
if specifier.scheme() == "file" {
Ok(
self
.file_fetcher
.fetch_local(specifier, &Default::default())?,
)
} else {
Ok(self.file_fetcher.fetch_cached(specifier, 10)?)
}
}
pub fn fetch_cached_remote(
&self,
url: &Url,
redirect_limit: i64,
) -> Result<Option<File>, FetchCachedError> {
self.file_fetcher.fetch_cached(url, redirect_limit)
}
/// Insert a temporary module for the file fetcher.
pub fn insert_memory_files(&self, file: File) -> Option<File> {
self.memory_files.insert(file.url.clone(), file)
}
pub fn clear_memory_files(&self) {
self.memory_files.clear();
}
}
pub trait GraphLoaderReporter: Send + Sync {
#[allow(unused_variables)]
fn on_load(
&self,
specifier: &Url,
loaded_from: deno_cache_dir::file_fetcher::LoadedFrom,
) {
}
}
#[allow(clippy::disallowed_types)]
pub type GraphLoaderReporterRc =
deno_maybe_sync::MaybeArc<dyn GraphLoaderReporter>;
pub struct DenoGraphLoaderOptions {
pub file_header_overrides: HashMap<Url, HashMap<String, String>>,
pub permissions: Option<PermissionsContainer>,
pub reporter: Option<GraphLoaderReporterRc>,
}
#[sys_traits::auto_impl]
pub trait DenoGraphLoaderSys:
GlobalHttpCacheSys + PermissionedFileFetcherSys + sys_traits::FsCanonicalize
{
}
/// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides
/// an implementation of `deno_graph::source::Loader`.
pub struct DenoGraphLoader<
TBlobStore: BlobStore,
TSys: DenoGraphLoaderSys,
THttpClient: HttpClient,
> {
file_header_overrides: HashMap<Url, HashMap<String, String>>,
file_fetcher: PermissionedFileFetcherRc<TBlobStore, TSys, THttpClient>,
global_http_cache: GlobalHttpCacheRc<TSys>,
in_npm_pkg_checker: DenoInNpmPackageChecker,
permissions: Option<PermissionsContainer>,
sys: TSys,
cache_info_enabled: bool,
reporter: Option<GraphLoaderReporterRc>,
}
impl<
TBlobStore: BlobStore + 'static,
TSys: DenoGraphLoaderSys + 'static,
THttpClient: HttpClient + 'static,
> DenoGraphLoader<TBlobStore, TSys, THttpClient>
{
pub fn new(
file_fetcher: PermissionedFileFetcherRc<TBlobStore, TSys, THttpClient>,
global_http_cache: GlobalHttpCacheRc<TSys>,
in_npm_pkg_checker: DenoInNpmPackageChecker,
sys: TSys,
options: DenoGraphLoaderOptions,
) -> Self {
Self {
file_fetcher,
global_http_cache,
in_npm_pkg_checker,
sys,
file_header_overrides: options.file_header_overrides,
permissions: options.permissions,
cache_info_enabled: false,
reporter: options.reporter,
}
}
pub fn insert_file_header_override(
&mut self,
specifier: Url,
headers: HashMap<String, String>,
) {
self.file_header_overrides.insert(specifier, headers);
}
/// The cache information takes a bit of time to fetch and it's
/// not always necessary. It should only be enabled for deno info.
pub fn enable_loading_cache_info(&mut self) {
self.cache_info_enabled = true;
}
/// Only use this for `deno info`.
fn get_local_path(&self, specifier: &Url) -> Option<PathBuf> {
// TODO(@kitsonk) fix when deno_graph does not query cache for synthetic
// modules
if specifier.scheme() == "flags" {
None
} else if specifier.scheme() == "file" {
deno_path_util::url_to_file_path(specifier).ok()
} else {
self.global_http_cache.local_path_for_url(specifier).ok()
}
}
fn load_or_cache<TStrategy: LoadOrCacheStrategy + 'static>(
&self,
strategy: TStrategy,
specifier: &Url,
options: deno_graph::source::LoadOptions,
) -> LocalBoxFuture<
'static,
Result<Option<TStrategy::Response>, deno_graph::source::LoadError>,
> {
let file_fetcher = self.file_fetcher.clone();
let permissions = self.permissions.clone();
let specifier = specifier.clone();
let is_statically_analyzable = !options.was_dynamic_root;
async move {
let maybe_cache_setting = match options.cache_setting {
LoaderCacheSetting::Use => None,
LoaderCacheSetting::Reload => {
if matches!(file_fetcher.cache_setting(), CacheSetting::Only) {
return Err(load_error(JsErrorBox::generic(
"Could not resolve version constraint using only cached data. Try running again without --cached-only"
)));
} else {
Some(CacheSetting::ReloadAll)
}
}
LoaderCacheSetting::Only => Some(CacheSetting::Only),
};
let result = strategy
.handle_fetch_or_cache_no_follow(
&specifier,
match &permissions {
Some(permissions) => FetchPermissionsOptionRef::Restricted(
permissions,
if is_statically_analyzable {
CheckSpecifierKind::Static
} else {
CheckSpecifierKind::Dynamic
},
),
None => FetchPermissionsOptionRef::AllowAll,
},
FetchNoFollowOptions {
local: FetchLocalOptions {
// only include the mtime in dynamic branches because we only
// need to know about it then in order to tell whether to reload
// or not
include_mtime: options.in_dynamic_branch,
},
maybe_auth: None,
maybe_accept: None,
maybe_cache_setting: maybe_cache_setting.as_ref(),
maybe_checksum: options.maybe_checksum.as_ref(),
},
)
.await;
match result {
Ok(response) => Ok(Some(response)),
Err(err) => {
let err = err.into_kind();
match err {
FetchNoFollowErrorKind::FetchNoFollow(err) => {
use deno_cache_dir::file_fetcher::FetchNoFollowErrorKind::*;
let err = err.into_kind();
match err {
NotFound(_) => Ok(None),
UrlToFilePath { .. }
| ReadingBlobUrl { .. }
| ReadingFile { .. }
| FetchingRemote { .. }
| ClientError { .. }
| NoRemote { .. }
| DataUrlDecode { .. }
| RedirectResolution { .. }
| CacheRead { .. }
| CacheSave { .. }
| UnsupportedScheme { .. }
| RedirectHeaderParse { .. }
| InvalidHeader { .. } => Err(load_error(JsErrorBox::from_err(err))),
NotCached { .. } => {
if options.cache_setting == LoaderCacheSetting::Only {
Ok(None)
} else {
Err(load_error(JsErrorBox::from_err(err)))
}
}
ChecksumIntegrity(err) => {
// convert to the equivalent deno_graph error so that it
// enhances it if this is passed to deno_graph
Err(deno_graph::source::LoadError::ChecksumIntegrity(
deno_graph::source::ChecksumIntegrityError {
actual: err.actual,
expected: err.expected,
},
))
}
}
}
FetchNoFollowErrorKind::PermissionCheck(permission_check_error) => {
Err(load_error(JsErrorBox::from_err(permission_check_error)))
}
}
}
}
}
.boxed_local()
}
}
impl<
TBlobStore: BlobStore + 'static,
TSys: DenoGraphLoaderSys + 'static,
THttpClient: HttpClient + 'static,
> Loader for DenoGraphLoader<TBlobStore, TSys, THttpClient>
{
fn cache_info_enabled(&self) -> bool {
self.cache_info_enabled
}
fn get_cache_info(&self, specifier: &Url) -> Option<CacheInfo> {
let local = self.get_local_path(specifier)?;
if self.sys.fs_is_file_no_err(&local) {
Some(CacheInfo { local: Some(local) })
} else {
None
}
}
fn load(
&self,
specifier: &Url,
options: deno_graph::source::LoadOptions,
) -> LoadFuture {
if specifier.scheme() == "file"
&& specifier.path().contains("/node_modules/")
{
// The specifier might be in a completely different symlinked tree than
// what the node_modules url is in (ex. `/my-project-1/node_modules`
// symlinked to `/my-project-2/node_modules`), so first we checked if the path
// is in a node_modules dir to avoid needlessly canonicalizing, then now compare
// against the canonicalized specifier.
let specifier = node_resolver::resolve_specifier_into_node_modules(
&self.sys, specifier,
);
if self.in_npm_pkg_checker.in_npm_package(&specifier) {
return Box::pin(std::future::ready(Ok(Some(
LoadResponse::External { specifier },
))));
}
}
if !matches!(
specifier.scheme(),
"file" | "http" | "https" | "blob" | "data"
) {
return Box::pin(std::future::ready(Ok(Some(
deno_graph::source::LoadResponse::External {
specifier: specifier.clone(),
},
))));
}
self.load_or_cache(
LoadStrategy {
file_fetcher: self.file_fetcher.clone(),
file_header_overrides: self.file_header_overrides.clone(),
reporter: self.reporter.clone(),
},
specifier,
options,
)
}
fn ensure_cached(
&self,
specifier: &Url,
options: deno_graph::source::LoadOptions,
) -> deno_graph::source::EnsureCachedFuture {
self.load_or_cache(
CacheStrategy {
file_fetcher: self.file_fetcher.clone(),
},
specifier,
options,
)
}
}
#[async_trait::async_trait(?Send)]
trait LoadOrCacheStrategy {
type Response;
async fn handle_fetch_or_cache_no_follow(
&self,
specifier: &Url,
permissions: FetchPermissionsOptionRef<'_>,
options: FetchNoFollowOptions<'_>,
) -> Result<Self::Response, FetchNoFollowError>;
}
struct LoadStrategy<
TBlobStore: BlobStore,
TSys: DenoGraphLoaderSys,
THttpClient: HttpClient,
> {
file_fetcher: PermissionedFileFetcherRc<TBlobStore, TSys, THttpClient>,
file_header_overrides: HashMap<Url, HashMap<String, String>>,
reporter: Option<GraphLoaderReporterRc>,
}
#[async_trait::async_trait(?Send)]
impl<TBlobStore: BlobStore, TSys: DenoGraphLoaderSys, THttpClient: HttpClient>
LoadOrCacheStrategy for LoadStrategy<TBlobStore, TSys, THttpClient>
{
type Response = deno_graph::source::LoadResponse;
async fn handle_fetch_or_cache_no_follow(
&self,
specifier: &Url,
permissions: FetchPermissionsOptionRef<'_>,
options: FetchNoFollowOptions<'_>,
) -> Result<deno_graph::source::LoadResponse, FetchNoFollowError> {
self
.file_fetcher
.fetch_no_follow(specifier, permissions, options)
.await
.map(|file_or_redirect| match file_or_redirect {
FileOrRedirect::File(file) => {
let maybe_headers = match (
file.maybe_headers,
self.file_header_overrides.get(specifier),
) {
(Some(headers), Some(overrides)) => {
Some(headers.into_iter().chain(overrides.clone()).collect())
}
(Some(headers), None) => Some(headers),
(None, Some(overrides)) => Some(overrides.clone()),
(None, None) => None,
};
if let Some(reporter) = &self.reporter {
reporter.on_load(specifier, file.loaded_from);
}
LoadResponse::Module {
specifier: file.url,
maybe_headers,
mtime: file.mtime,
content: file.source,
}
}
FileOrRedirect::Redirect(redirect_specifier) => {
LoadResponse::Redirect {
specifier: redirect_specifier,
}
}
})
}
}
struct CacheStrategy<
TBlobStore: BlobStore,
TSys: DenoGraphLoaderSys,
THttpClient: HttpClient,
> {
file_fetcher: PermissionedFileFetcherRc<TBlobStore, TSys, THttpClient>,
}
#[async_trait::async_trait(?Send)]
impl<TBlobStore: BlobStore, TSys: DenoGraphLoaderSys, THttpClient: HttpClient>
LoadOrCacheStrategy for CacheStrategy<TBlobStore, TSys, THttpClient>
{
type Response = deno_graph::source::CacheResponse;
async fn handle_fetch_or_cache_no_follow(
&self,
specifier: &Url,
permissions: FetchPermissionsOptionRef<'_>,
options: FetchNoFollowOptions<'_>,
) -> Result<deno_graph::source::CacheResponse, FetchNoFollowError> {
self
.file_fetcher
.ensure_cached_no_follow(specifier, permissions, options)
.await
.map(|cached_or_redirect| match cached_or_redirect {
CachedOrRedirect::Cached => deno_graph::source::CacheResponse::Cached,
CachedOrRedirect::Redirect(url) => {
deno_graph::source::CacheResponse::Redirect { specifier: url }
}
})
}
}
fn load_error(err: JsErrorBox) -> deno_graph::source::LoadError {
#[allow(clippy::disallowed_types)] // ok, deno_graph requires an Arc
let err = std::sync::Arc::new(err);
deno_graph::source::LoadError::Other(err)
}
fn validate_scheme(specifier: &Url) -> Result<(), UnsupportedSchemeError> {
match deno_cache_dir::file_fetcher::is_valid_scheme(specifier.scheme()) {
true => Ok(()),
false => Err(UnsupportedSchemeError {
scheme: specifier.scheme().to_string(),
url: specifier.clone(),
}),
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/deno_json.rs | libs/resolver/deno_json.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap;
use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
use deno_config::deno_json::CompilerOptions;
use deno_config::glob::PathOrPatternSet;
use deno_config::workspace::CompilerOptionsSource;
use deno_config::workspace::TsTypeLib;
use deno_config::workspace::WorkspaceDirectory;
use deno_config::workspace::WorkspaceRc;
use deno_error::JsError;
use deno_maybe_sync::new_rc;
use deno_path_util::normalize_path;
use deno_path_util::url_from_directory_path;
use deno_path_util::url_from_file_path;
use deno_path_util::url_to_file_path;
use deno_terminal::colors;
use deno_unsync::sync::AtomicFlag;
use indexmap::IndexMap;
use indexmap::IndexSet;
use node_resolver::DenoIsBuiltInNodeModuleChecker;
use node_resolver::NodeResolutionKind;
use node_resolver::NodeResolver;
use node_resolver::NpmPackageFolderResolver;
use node_resolver::ResolutionMode;
#[cfg(feature = "sync")]
use once_cell::sync::OnceCell;
#[cfg(not(feature = "sync"))]
use once_cell::unsync::OnceCell;
use serde::Serialize;
use serde::Serializer;
use serde_json::json;
use sys_traits::FsRead;
use thiserror::Error;
use url::Url;
use crate::collections::FolderScopedWithUnscopedMap;
use crate::factory::ConfigDiscoveryOption;
use crate::npm::DenoInNpmPackageChecker;
use crate::npm::NpmResolverSys;
#[allow(clippy::disallowed_types)]
type UrlRc = deno_maybe_sync::MaybeArc<Url>;
#[allow(clippy::disallowed_types)]
type CompilerOptionsRc = deno_maybe_sync::MaybeArc<CompilerOptions>;
#[allow(clippy::disallowed_types)]
pub type CompilerOptionsTypesRc =
deno_maybe_sync::MaybeArc<Vec<(Url, Vec<String>)>>;
/// A structure that represents a set of options that were ignored and the
/// path those options came from.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct IgnoredCompilerOptions {
pub items: Vec<String>,
pub maybe_specifier: Option<Url>,
}
impl std::fmt::Display for IgnoredCompilerOptions {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
let mut codes = self.items.clone();
codes.sort_unstable();
if let Some(specifier) = &self.maybe_specifier {
write!(
f,
"Unsupported compiler options in \"{}\".\n The following options were ignored:\n {}",
specifier,
codes.join(", ")
)
} else {
write!(
f,
"Unsupported compiler options provided.\n The following options were ignored:\n {}",
codes.join(", ")
)
}
}
}
impl Serialize for IgnoredCompilerOptions {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
Serialize::serialize(&self.items, serializer)
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct CompilerOptionsWithIgnoredOptions {
pub compiler_options: CompilerOptions,
pub ignored_options: Vec<IgnoredCompilerOptions>,
}
#[derive(Debug, Default, Clone)]
pub struct ParsedCompilerOptions {
pub options: serde_json::Map<String, serde_json::Value>,
pub maybe_ignored: Option<IgnoredCompilerOptions>,
}
/// A set of all the compiler options that should be allowed;
static ALLOWED_COMPILER_OPTIONS: phf::Set<&'static str> = phf::phf_set! {
"allowUnreachableCode",
"allowUnusedLabels",
"baseUrl",
"checkJs",
"erasableSyntaxOnly",
"emitDecoratorMetadata",
"exactOptionalPropertyTypes",
"experimentalDecorators",
"isolatedDeclarations",
"jsx",
"jsxFactory",
"jsxFragmentFactory",
"jsxImportSource",
"jsxPrecompileSkipElements",
"lib",
"noErrorTruncation",
"noFallthroughCasesInSwitch",
"noImplicitAny",
"noImplicitOverride",
"noImplicitReturns",
"noImplicitThis",
"noPropertyAccessFromIndexSignature",
"noUncheckedIndexedAccess",
"noUnusedLocals",
"noUnusedParameters",
"paths",
"rootDirs",
"skipLibCheck",
"strict",
"strictBindCallApply",
"strictBuiltinIteratorReturn",
"strictFunctionTypes",
"strictNullChecks",
"strictPropertyInitialization",
"types",
"useUnknownInCatchVariables",
"verbatimModuleSyntax",
};
pub fn parse_compiler_options(
compiler_options: serde_json::Map<String, serde_json::Value>,
maybe_specifier: Option<&Url>,
) -> ParsedCompilerOptions {
let mut allowed: serde_json::Map<String, serde_json::Value> =
serde_json::Map::with_capacity(compiler_options.len());
let mut ignored: Vec<String> = Vec::new(); // don't pre-allocate because it's rare
for (key, value) in compiler_options {
// We don't pass "types" entries to typescript via the compiler
// options and instead provide those to tsc as "roots". This is
// because our "types" behavior is at odds with how TypeScript's
// "types" works.
// We also don't pass "jsxImportSourceTypes" to TypeScript as it doesn't
// know about this option. It will still take this option into account
// because the graph resolves the JSX import source to the types for TSC.
if key != "types" && key != "jsxImportSourceTypes" {
if (key == "module"
&& value
.as_str()
.map(|s| {
matches!(
s.to_ascii_lowercase().as_str(),
"nodenext" | "esnext" | "preserve"
)
})
.unwrap_or(false))
|| (key == "moduleResolution"
&& value
.as_str()
.map(|s| {
matches!(s.to_ascii_lowercase().as_str(), "nodenext" | "bundler")
})
.unwrap_or(false))
|| ALLOWED_COMPILER_OPTIONS.contains(key.as_str())
{
allowed.insert(key, value.to_owned());
} else {
ignored.push(key);
}
}
}
let maybe_ignored = if !ignored.is_empty() {
Some(IgnoredCompilerOptions {
items: ignored,
maybe_specifier: maybe_specifier.cloned(),
})
} else {
None
};
ParsedCompilerOptions {
options: allowed,
maybe_ignored,
}
}
#[allow(clippy::disallowed_types)]
pub type SerdeJsonErrorArc = std::sync::Arc<serde_json::Error>;
#[derive(Debug, Clone, Error, JsError)]
#[class(type)]
#[error("compilerOptions should be an object at '{specifier}'")]
pub struct CompilerOptionsParseError {
pub specifier: Url,
#[source]
pub source: SerdeJsonErrorArc,
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct JsxImportSourceSpecifierConfig {
pub specifier: String,
pub base: Url,
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct JsxImportSourceConfig {
pub module: String,
pub import_source: Option<JsxImportSourceSpecifierConfig>,
pub import_source_types: Option<JsxImportSourceSpecifierConfig>,
}
impl JsxImportSourceConfig {
pub fn specifier(&self) -> Option<&str> {
self.import_source.as_ref().map(|c| c.specifier.as_str())
}
}
#[allow(clippy::disallowed_types)]
pub type JsxImportSourceConfigRc =
deno_maybe_sync::MaybeArc<JsxImportSourceConfig>;
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
enum CompilerOptionsModule {
NodeNext,
EsNext,
Preserve,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum CompilerOptionsModuleResolution {
NodeNext,
Bundler,
}
#[derive(Debug, Clone, Error, JsError)]
#[class(type)]
pub enum ToMaybeJsxImportSourceConfigError {
#[error(
"'jsxImportSource' is only supported when 'jsx' is set to 'react-jsx' or 'react-jsxdev'.\n at {0}"
)]
InvalidJsxImportSourceValue(Url),
#[error(
"'jsxImportSourceTypes' is only supported when 'jsx' is set to 'react-jsx' or 'react-jsxdev'.\n at {0}"
)]
InvalidJsxImportSourceTypesValue(Url),
#[error(
"Unsupported 'jsx' compiler option value '{value}'. Supported: 'react-jsx', 'react-jsxdev', 'react', 'precompile'\n at {specifier}"
)]
InvalidJsxCompilerOption { value: String, specifier: Url },
}
/// An enum that represents the base tsc configuration to return.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum CompilerOptionsType {
/// Return a configuration for bundling, using swc to emit the bundle. This is
/// independent of type checking.
Bundle,
/// Return a configuration to use tsc to type check. This
/// is independent of either bundling or emitting via swc.
Check { lib: TsTypeLib },
/// Return a configuration to use swc to emit single module files.
Emit,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum CompilerOptionsSourceKind {
DenoJson,
TsConfig,
}
/// For a given configuration type get the starting point CompilerOptions
/// used that can then be merged with user specified options.
pub fn get_base_compiler_options_for_emit(
config_type: CompilerOptionsType,
source_kind: CompilerOptionsSourceKind,
) -> CompilerOptions {
match config_type {
CompilerOptionsType::Bundle => CompilerOptions::new(json!({
"allowImportingTsExtensions": true,
"checkJs": false,
"emitDecoratorMetadata": false,
"experimentalDecorators": true,
"importsNotUsedAsValues": "remove",
"inlineSourceMap": false,
"inlineSources": false,
"sourceMap": false,
"jsx": "react",
"jsxFactory": "React.createElement",
"jsxFragmentFactory": "React.Fragment",
"module": "NodeNext",
})),
CompilerOptionsType::Check { lib } => CompilerOptions::new(json!({
"allowJs": true,
"allowImportingTsExtensions": true,
"allowSyntheticDefaultImports": true,
"checkJs": false,
"emitDecoratorMetadata": false,
"experimentalDecorators": false,
"incremental": true,
"jsx": "react",
"importsNotUsedAsValues": "remove",
"inlineSourceMap": true,
"inlineSources": true,
"isolatedModules": true,
"lib": match (lib, source_kind) {
(TsTypeLib::DenoWindow, CompilerOptionsSourceKind::DenoJson) => vec!["deno.window", "deno.unstable", "node"],
(TsTypeLib::DenoWindow, CompilerOptionsSourceKind::TsConfig) => vec!["deno.window", "deno.unstable", "dom", "node"],
(TsTypeLib::DenoWorker, CompilerOptionsSourceKind::DenoJson) => vec!["deno.worker", "deno.unstable", "node"],
(TsTypeLib::DenoWorker, CompilerOptionsSourceKind::TsConfig) => vec!["deno.worker", "deno.unstable", "dom", "node"],
},
"module": "NodeNext",
"moduleDetection": "force",
"noEmit": true,
"noImplicitOverride": match source_kind {
CompilerOptionsSourceKind::DenoJson => true,
CompilerOptionsSourceKind::TsConfig => false,
},
"resolveJsonModule": true,
"sourceMap": false,
"skipLibCheck": false,
"strict": match source_kind {
CompilerOptionsSourceKind::DenoJson => true,
CompilerOptionsSourceKind::TsConfig => false,
},
"target": "esnext",
"tsBuildInfoFile": "internal:///.tsbuildinfo",
"useDefineForClassFields": true,
})),
CompilerOptionsType::Emit => CompilerOptions::new(json!({
"allowImportingTsExtensions": true,
"checkJs": false,
"emitDecoratorMetadata": false,
"experimentalDecorators": false,
"importsNotUsedAsValues": "remove",
"inlineSourceMap": true,
"inlineSources": true,
"sourceMap": false,
"jsx": "react",
"jsxFactory": "React.createElement",
"jsxFragmentFactory": "React.Fragment",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"resolveJsonModule": true,
})),
}
}
#[cfg(feature = "deno_ast")]
#[derive(Debug)]
pub struct TranspileAndEmitOptions {
pub no_transpile: bool,
pub transpile: deno_ast::TranspileOptions,
pub emit: deno_ast::EmitOptions,
// stored ahead of time so we don't have to recompute this a lot
pub pre_computed_hash: u64,
}
#[cfg(feature = "deno_ast")]
#[allow(clippy::disallowed_types)]
pub type TranspileAndEmitOptionsRc =
deno_maybe_sync::MaybeArc<TranspileAndEmitOptions>;
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
struct WildcardString {
prefix: String,
suffix_if_wildcard: Option<String>,
}
#[derive(Debug, Default, Clone, Eq, PartialEq, Hash)]
pub struct CompilerOptionsPaths {
resolved: Vec<(String, WildcardString, Vec<WildcardString>)>,
}
impl CompilerOptionsPaths {
fn new(
object: &serde_json::Map<String, serde_json::Value>,
base: &Url,
) -> Self {
let resolved = object
.iter()
.filter_map(|(raw_key, value)| {
let value = value.as_array()?;
let mut key_prefix = raw_key.as_str();
let mut key_suffix_if_wildcard = None;
if let Some((prefix, suffix)) = raw_key.split_once('*') {
if suffix.contains('*') {
return None;
}
key_prefix = prefix;
key_suffix_if_wildcard = Some(suffix);
}
let key_prefix = if key_prefix.starts_with("./")
|| key_prefix.starts_with("../")
|| key_prefix.starts_with("/")
{
base.join(key_prefix).ok()?.to_string()
} else {
key_prefix.to_string()
};
let key = WildcardString {
prefix: key_prefix,
suffix_if_wildcard: key_suffix_if_wildcard.map(|s| s.to_string()),
};
let paths = value
.iter()
.filter_map(|entry| {
let mut entry_prefix = entry.as_str()?;
let mut entry_suffix_if_wildcard = None;
if let Some((prefix, suffix)) = entry_prefix.split_once('*') {
if suffix.contains('*') {
return None;
}
entry_prefix = prefix;
entry_suffix_if_wildcard = Some(suffix);
}
let entry_prefix = base.join(entry_prefix).ok()?.to_string();
Some(WildcardString {
prefix: entry_prefix,
suffix_if_wildcard: entry_suffix_if_wildcard
.map(|s| s.to_string()),
})
})
.collect();
Some((raw_key.clone(), key, paths))
})
.collect();
Self { resolved }
}
pub fn resolve_candidates(
&self,
specifier: &str,
) -> Option<(impl Iterator<Item = Url>, String)> {
self.resolved.iter().find_map(|(raw_key, key, paths)| {
let mut matched = specifier.strip_prefix(&key.prefix)?;
if let Some(key_suffix) = &key.suffix_if_wildcard {
matched = matched.strip_suffix(key_suffix)?;
} else if !matched.is_empty() {
return None;
}
Some((
paths.iter().filter_map(move |path| {
if let Some(path_suffix) = &path.suffix_if_wildcard {
Url::parse(&format!("{}{matched}{}", &path.prefix, path_suffix))
.ok()
} else {
Url::parse(&path.prefix).ok()
}
}),
raw_key.clone(),
))
})
}
}
#[derive(Debug, Default)]
struct LoggedWarnings {
experimental_decorators: AtomicFlag,
folders: deno_maybe_sync::MaybeDashSet<Url>,
}
#[allow(clippy::disallowed_types)]
type LoggedWarningsRc = deno_maybe_sync::MaybeArc<LoggedWarnings>;
#[derive(Default, Debug)]
struct MemoizedValues {
deno_window_check_compiler_options:
OnceCell<Result<CompilerOptionsRc, CompilerOptionsParseError>>,
deno_worker_check_compiler_options:
OnceCell<Result<CompilerOptionsRc, CompilerOptionsParseError>>,
emit_compiler_options:
OnceCell<Result<CompilerOptionsRc, CompilerOptionsParseError>>,
#[cfg(feature = "deno_ast")]
transpile_options:
OnceCell<Result<TranspileAndEmitOptionsRc, CompilerOptionsParseError>>,
compiler_options_types: OnceCell<CompilerOptionsTypesRc>,
jsx_import_source_config: OnceCell<
Result<Option<JsxImportSourceConfigRc>, ToMaybeJsxImportSourceConfigError>,
>,
module: OnceCell<CompilerOptionsModule>,
module_resolution: OnceCell<CompilerOptionsModuleResolution>,
check_js: OnceCell<bool>,
skip_lib_check: OnceCell<bool>,
base_url: OnceCell<Option<Url>>,
paths: OnceCell<CompilerOptionsPaths>,
root_dirs: OnceCell<Vec<Url>>,
}
#[derive(Debug, Clone, Default)]
pub struct CompilerOptionsOverrides {
/// Skip transpiling in the loaders.
pub no_transpile: bool,
/// Base to use for the source map. This is useful when bundling
/// and you want to make file urls relative.
pub source_map_base: Option<Url>,
/// Preserve JSX instead of transforming it.
///
/// This may be useful when bundling.
pub preserve_jsx: bool,
}
#[derive(Debug)]
pub struct CompilerOptionsData {
pub sources: Vec<CompilerOptionsSource>,
pub source_kind: CompilerOptionsSourceKind,
workspace_dir_url: Option<UrlRc>,
memoized: MemoizedValues,
logged_warnings: LoggedWarningsRc,
#[cfg_attr(not(feature = "deno_ast"), allow(unused))]
overrides: CompilerOptionsOverrides,
}
impl CompilerOptionsData {
fn new(
sources: Vec<CompilerOptionsSource>,
source_kind: CompilerOptionsSourceKind,
workspace_dir_url: Option<UrlRc>,
logged_warnings: LoggedWarningsRc,
overrides: CompilerOptionsOverrides,
) -> Self {
Self {
sources,
source_kind,
workspace_dir_url,
memoized: Default::default(),
logged_warnings,
overrides,
}
}
pub fn compiler_options_for_lib(
&self,
lib: TsTypeLib,
) -> Result<&CompilerOptionsRc, CompilerOptionsParseError> {
self.compiler_options_inner(CompilerOptionsType::Check { lib })
}
pub fn compiler_options_for_emit(
&self,
) -> Result<&CompilerOptionsRc, CompilerOptionsParseError> {
self.compiler_options_inner(CompilerOptionsType::Emit)
}
fn compiler_options_inner(
&self,
typ: CompilerOptionsType,
) -> Result<&CompilerOptionsRc, CompilerOptionsParseError> {
let cell = match typ {
CompilerOptionsType::Bundle => unreachable!(),
CompilerOptionsType::Check {
lib: TsTypeLib::DenoWindow,
} => &self.memoized.deno_window_check_compiler_options,
CompilerOptionsType::Check {
lib: TsTypeLib::DenoWorker,
} => &self.memoized.deno_worker_check_compiler_options,
CompilerOptionsType::Emit => &self.memoized.emit_compiler_options,
};
let result = cell.get_or_init(|| {
let mut result = CompilerOptionsWithIgnoredOptions {
compiler_options: get_base_compiler_options_for_emit(
typ,
self.source_kind,
),
ignored_options: Vec::new(),
};
for source in &self.sources {
let Some(compiler_options) = source.compiler_options.as_ref() else {
continue;
};
let object = serde_json::from_value(compiler_options.0.clone())
.map_err(|err| CompilerOptionsParseError {
specifier: source.specifier.as_ref().clone(),
source: SerdeJsonErrorArc::new(err),
})?;
let parsed =
parse_compiler_options(object, Some(source.specifier.as_ref()));
result.compiler_options.merge_object_mut(parsed.options);
if matches!(typ, CompilerOptionsType::Check { .. })
&& let Some(compiler_options) =
result.compiler_options.0.as_object_mut()
&& compiler_options.get("isolatedDeclarations")
== Some(&serde_json::Value::Bool(true))
{
compiler_options.insert("declaration".into(), true.into());
compiler_options.insert("allowJs".into(), false.into());
}
if let Some(ignored) = parsed.maybe_ignored {
result.ignored_options.push(ignored);
}
}
if self.source_kind != CompilerOptionsSourceKind::TsConfig {
check_warn_compiler_options(&result, &self.logged_warnings);
}
Ok(new_rc(result.compiler_options))
});
result.as_ref().map_err(Clone::clone)
}
#[cfg(feature = "deno_ast")]
pub fn transpile_options(
&self,
) -> Result<&TranspileAndEmitOptionsRc, CompilerOptionsParseError> {
let result = self.memoized.transpile_options.get_or_init(|| {
let compiler_options = self.compiler_options_for_emit()?;
compiler_options_to_transpile_and_emit_options(
compiler_options.as_ref().clone(),
&self.overrides,
)
.map(new_rc)
.map_err(|source| CompilerOptionsParseError {
specifier: self
.sources
.last()
.map(|s| s.specifier.as_ref().clone())
.expect(
"Compiler options parse errors must come from a user source.",
),
source: SerdeJsonErrorArc::new(source),
})
});
result.as_ref().map_err(Clone::clone)
}
pub fn compiler_options_types(&self) -> &CompilerOptionsTypesRc {
self.memoized.compiler_options_types.get_or_init(|| {
let types = self
.sources
.iter()
.filter_map(|s| {
let types = s
.compiler_options
.as_ref()?
.0
.as_object()?
.get("types")?
.as_array()?
.iter()
.filter_map(|v| Some(v.as_str()?.to_string()))
.collect();
Some((s.specifier.as_ref().clone(), types))
})
.collect();
new_rc(types)
})
}
pub fn jsx_import_source_config(
&self,
) -> Result<Option<&JsxImportSourceConfigRc>, ToMaybeJsxImportSourceConfigError>
{
let result = self.memoized.jsx_import_source_config.get_or_init(|| {
let jsx = self.sources.iter().rev().find_map(|s| Some((s.compiler_options.as_ref()?.0.as_object()?.get("jsx")?.as_str()?, &s.specifier)));
let is_jsx_automatic = matches!(
jsx,
Some(("react-jsx" | "preserve" | "react-jsxdev" | "precompile", _)),
);
let import_source = self.sources.iter().rev().find_map(|s| {
Some(JsxImportSourceSpecifierConfig {
specifier: s.compiler_options.as_ref()?.0.as_object()?.get("jsxImportSource")?.as_str()?.to_string(),
base: s.specifier.as_ref().clone()
})
}).or_else(|| {
if !is_jsx_automatic {
return None;
}
Some(JsxImportSourceSpecifierConfig {
base: self.sources.last()?.specifier.as_ref().clone(),
specifier: "react".to_string()
})
});
let import_source_types = self.sources.iter().rev().find_map(|s| {
Some(JsxImportSourceSpecifierConfig {
specifier: s.compiler_options.as_ref()?.0.as_object()?.get("jsxImportSourceTypes")?.as_str()?.to_string(),
base: s.specifier.as_ref().clone()
})
}).or_else(|| import_source.clone());
let module = match jsx {
Some(("react-jsx" | "preserve", _)) => "jsx-runtime".to_string(),
Some(("react-jsxdev", _)) => "jsx-dev-runtime".to_string(),
Some(("react", _)) | None => {
if let Some(import_source) = &import_source {
return Err(
ToMaybeJsxImportSourceConfigError::InvalidJsxImportSourceValue(
import_source.base.clone(),
),
);
}
if let Some(import_source_types) = &import_source_types {
return Err(
ToMaybeJsxImportSourceConfigError::InvalidJsxImportSourceTypesValue(
import_source_types.base.clone(),
),
);
}
return Ok(None);
}
Some(("precompile", _)) => "jsx-runtime".to_string(),
Some((setting, setting_source)) => {
return Err(
ToMaybeJsxImportSourceConfigError::InvalidJsxCompilerOption {
value: setting.to_string(),
specifier: setting_source.as_ref().clone(),
},
)
}
};
Ok(Some(new_rc(JsxImportSourceConfig {
module,
import_source,
import_source_types,
})))
});
result.as_ref().map(|c| c.as_ref()).map_err(Clone::clone)
}
fn module(&self) -> CompilerOptionsModule {
*self.memoized.module.get_or_init(|| {
let value = self.sources.iter().rev().find_map(|s| {
s.compiler_options
.as_ref()?
.0
.as_object()?
.get("module")?
.as_str()
});
match value.map(|s| s.to_ascii_lowercase()).as_deref() {
Some("esnext") => CompilerOptionsModule::EsNext,
Some("preserve") => CompilerOptionsModule::Preserve,
_ => CompilerOptionsModule::NodeNext,
}
})
}
pub fn module_resolution(&self) -> CompilerOptionsModuleResolution {
*self.memoized.module_resolution.get_or_init(|| {
let value = self.sources.iter().rev().find_map(|s| {
s.compiler_options
.as_ref()?
.0
.as_object()?
.get("moduleResolution")?
.as_str()
});
match value.map(|s| s.to_ascii_lowercase()).as_deref() {
Some("nodenext") => CompilerOptionsModuleResolution::NodeNext,
Some("bundler") => CompilerOptionsModuleResolution::Bundler,
_ => match self.module() {
CompilerOptionsModule::NodeNext => {
CompilerOptionsModuleResolution::NodeNext
}
CompilerOptionsModule::EsNext | CompilerOptionsModule::Preserve => {
CompilerOptionsModuleResolution::Bundler
}
},
}
})
}
pub fn check_js(&self) -> bool {
*self.memoized.check_js.get_or_init(|| {
self
.sources
.iter()
.rev()
.find_map(|s| {
s.compiler_options
.as_ref()?
.0
.as_object()?
.get("checkJs")?
.as_bool()
})
.unwrap_or(false)
})
}
pub fn skip_lib_check(&self) -> bool {
*self.memoized.skip_lib_check.get_or_init(|| {
self
.sources
.iter()
.rev()
.find_map(|s| {
s.compiler_options
.as_ref()?
.0
.as_object()?
.get("skipLibCheck")?
.as_bool()
})
.unwrap_or(false)
})
}
fn base_url(&self) -> Option<&Url> {
let base_url = self.memoized.base_url.get_or_init(|| {
let base_url = self.sources.iter().rev().find_map(|s| {
s.compiler_options
.as_ref()?
.0
.as_object()?
.get("baseUrl")?
.as_str()
})?;
url_from_directory_path(
&url_to_file_path(&self.sources.last()?.specifier)
.ok()?
.parent()?
.join(base_url),
)
.ok()
});
base_url.as_ref()
}
pub fn paths(&self) -> &CompilerOptionsPaths {
self.memoized.paths.get_or_init(|| {
let Some((source_specifier, paths)) =
self.sources.iter().rev().find_map(|s| {
Some((
&s.specifier,
s.compiler_options
.as_ref()?
.0
.as_object()?
.get("paths")?
.as_object()?,
))
})
else {
return Default::default();
};
CompilerOptionsPaths::new(
paths,
self.base_url().unwrap_or(source_specifier),
)
})
}
pub fn root_dirs(&self) -> &Vec<Url> {
self.memoized.root_dirs.get_or_init(|| {
let Some((source_specifier, root_dirs)) =
self.sources.iter().rev().find_map(|s| {
Some((
&s.specifier,
s.compiler_options
.as_ref()?
.0
.as_object()?
.get("rootDirs")?
.as_array()?,
))
})
else {
return Vec::new();
};
root_dirs
.iter()
.filter_map(|s| {
url_from_directory_path(
&url_to_file_path(self.base_url().unwrap_or(source_specifier))
.ok()?
.parent()?
.join(s.as_str()?),
)
.ok()
})
.collect()
})
}
pub fn workspace_dir_or_source_url(&self) -> Option<&UrlRc> {
self
.workspace_dir_url
.as_ref()
.or_else(|| self.sources.last().map(|s| &s.specifier))
}
}
// A resolved element of the `files` array in a tsconfig.
#[derive(Debug, Clone)]
pub struct TsConfigFile {
pub relative_specifier: String,
pub absolute_path: PathBuf,
}
impl TsConfigFile {
fn from_raw(raw: &str, dir_path: impl AsRef<Path>) -> Self {
let relative_specifier = if raw.starts_with("./")
|| raw.starts_with("../")
|| raw.starts_with('/')
{
raw.to_string()
} else {
format!("./{raw}")
};
let path = Path::new(raw);
let absolute_path = if path.is_absolute() {
normalize_path(Cow::Borrowed(path))
} else {
normalize_path(Cow::Owned(dir_path.as_ref().join(path)))
};
Self {
relative_specifier,
absolute_path: absolute_path.into_owned(),
}
}
}
#[derive(Debug)]
struct TsConfigFileFilter {
// Note that `files`, `include` and `exclude` are overwritten, not merged,
// when using `extends`. So we only need to store one referrer for `files`.
// See: https://www.typescriptlang.org/tsconfig/#extends.
files: Option<(UrlRc, Vec<TsConfigFile>)>,
include: Option<PathOrPatternSet>,
exclude: Option<PathOrPatternSet>,
dir_path: PathBuf,
}
impl TsConfigFileFilter {
fn includes_path(&self, path: impl AsRef<Path>) -> bool {
let path = path.as_ref();
if let Some((_, files)) = &self.files
&& files.iter().any(|f| f.absolute_path == path)
{
return true;
}
if let Some(exclude) = &self.exclude
&& exclude.matches_path(path)
{
return false;
}
if let Some(include) = &self.include {
if include.matches_path(path) {
return true;
}
} else if path.starts_with(&self.dir_path) {
return true;
}
false
}
}
#[allow(clippy::disallowed_types)]
type TsConfigFileFilterRc = deno_maybe_sync::MaybeArc<TsConfigFileFilter>;
#[derive(Debug)]
pub struct TsConfigData {
compiler_options: CompilerOptionsData,
filter: TsConfigFileFilterRc,
references: Vec<String>,
}
impl TsConfigData {
pub fn files(&self) -> Option<(&UrlRc, &Vec<TsConfigFile>)> {
let (referrer, files) = self.filter.files.as_ref()?;
Some((referrer, files))
}
fn specifier(&self) -> &UrlRc {
&self
.compiler_options
.sources
.last()
.expect("Tsconfigs should always have at least one source.")
.specifier
}
}
fn is_maybe_directory_error(err: &std::io::Error) -> bool {
let kind = err.kind();
kind == ErrorKind::IsADirectory
// This happens on Windows for some reason.
|| cfg!(windows) && kind == ErrorKind::PermissionDenied
}
pub(crate) type TsConfigNodeResolver<TSys, TNpfr> = NodeResolver<
DenoInNpmPackageChecker,
DenoIsBuiltInNodeModuleChecker,
TNpfr,
TSys,
>;
type GetNodeResolverFn<'a, NSys, TNpfr> =
Box<dyn Fn(&Url) -> Option<&'a TsConfigNodeResolver<NSys, TNpfr>> + 'a>;
struct TsConfigCollector<
'a,
'b,
TSys: FsRead,
NSys: NpmResolverSys,
TNpfr: NpmPackageFolderResolver,
> {
roots: BTreeSet<PathBuf>,
collected: IndexMap<UrlRc, Rc<TsConfigData>>,
read_cache: HashMap<PathBuf, Result<Rc<TsConfigData>, Rc<std::io::Error>>>,
currently_reading: IndexSet<PathBuf>,
sys: &'a TSys,
get_node_resolver: GetNodeResolverFn<'b, NSys, TNpfr>,
logged_warnings: &'a LoggedWarningsRc,
overrides: CompilerOptionsOverrides,
}
impl<
TSys: FsRead + std::fmt::Debug,
NSys: NpmResolverSys,
TNpfr: NpmPackageFolderResolver,
> std::fmt::Debug for TsConfigCollector<'_, '_, TSys, NSys, TNpfr>
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("TsConfigCollector")
.field("roots", &self.roots)
.field("collected", &self.collected)
.field("read_cache", &self.read_cache)
.field("currently_reading", &self.currently_reading)
.field("sys", &self.sys)
.field("logged_warnings", &self.logged_warnings)
.finish()
}
}
impl<
'a,
'b,
TSys: FsRead,
NSys: NpmResolverSys,
TNpfr: NpmPackageFolderResolver,
> TsConfigCollector<'a, 'b, TSys, NSys, TNpfr>
{
fn new(
sys: &'a TSys,
get_node_resolver: GetNodeResolverFn<'b, NSys, TNpfr>,
logged_warnings: &'a LoggedWarningsRc,
overrides: CompilerOptionsOverrides,
) -> Self {
Self {
roots: Default::default(),
collected: Default::default(),
read_cache: Default::default(),
currently_reading: Default::default(),
sys,
get_node_resolver,
logged_warnings,
overrides,
}
}
fn add_root(&mut self, path: PathBuf) {
self.roots.insert(path);
}
fn collect(mut self) -> Vec<TsConfigData> {
for root in std::mem::take(&mut self.roots) {
let Ok(ts_config) = self.read_ts_config_with_cache(Cow::Owned(root))
else {
continue;
};
self.visit_reference(ts_config);
}
let Self { collected, .. } = { self };
collected
.into_values()
.map(|t| {
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/graph.rs | libs/resolver/graph.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use boxed_error::Boxed;
use deno_error::JsErrorClass;
use deno_graph::JsrLoadError;
use deno_graph::Module;
use deno_graph::ModuleError;
use deno_graph::ModuleErrorKind;
use deno_graph::ModuleGraphError;
use deno_graph::ModuleLoadError;
use deno_graph::Resolution;
use deno_graph::ResolutionError;
use deno_graph::SpecifierError;
use deno_graph::source::ResolveError;
use deno_media_type::MediaType;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq;
use deno_unsync::sync::AtomicFlag;
use import_map::ImportMapErrorKind;
use node_resolver::DenoIsBuiltInNodeModuleChecker;
use node_resolver::InNpmPackageChecker;
use node_resolver::IsBuiltInNodeModuleChecker;
use node_resolver::NpmPackageFolderResolver;
use node_resolver::UrlOrPath;
use node_resolver::errors::NodeJsErrorCoded;
use url::Url;
use crate::DenoResolveError;
use crate::DenoResolverSys;
use crate::RawDenoResolverRc;
use crate::cjs::CjsTracker;
use crate::deno_json::JsxImportSourceConfigResolver;
use crate::npm;
use crate::npm::managed::ManagedResolvePkgFolderFromDenoReqError;
use crate::workspace::MappedResolutionDiagnostic;
use crate::workspace::sloppy_imports_resolve;
#[allow(clippy::disallowed_types)]
pub type FoundPackageJsonDepFlagRc =
deno_maybe_sync::MaybeArc<FoundPackageJsonDepFlag>;
/// A flag that indicates if a package.json dependency was
/// found during resolution.
#[derive(Debug, Default)]
pub struct FoundPackageJsonDepFlag(AtomicFlag);
impl FoundPackageJsonDepFlag {
#[inline(always)]
pub fn raise(&self) -> bool {
self.0.raise()
}
#[inline(always)]
pub fn is_raised(&self) -> bool {
self.0.is_raised()
}
}
#[derive(Debug, deno_error::JsError, Boxed)]
pub struct ResolveWithGraphError(pub Box<ResolveWithGraphErrorKind>);
impl ResolveWithGraphError {
pub fn maybe_specifier(&self) -> Option<Cow<'_, UrlOrPath>> {
match self.as_kind() {
ResolveWithGraphErrorKind::ManagedResolvePkgFolderFromDenoReq(_) => None,
ResolveWithGraphErrorKind::CouldNotResolveNpmReqRef(err) => {
err.source.maybe_specifier()
}
ResolveWithGraphErrorKind::ResolveNpmReqRef(err) => {
err.err.maybe_specifier()
}
ResolveWithGraphErrorKind::Resolution(err) => match err {
deno_graph::ResolutionError::InvalidDowngrade { specifier, .. } => {
Some(specifier)
}
deno_graph::ResolutionError::InvalidJsrHttpsTypesImport {
specifier,
..
} => Some(specifier),
deno_graph::ResolutionError::InvalidLocalImport {
specifier, ..
} => Some(specifier),
deno_graph::ResolutionError::ResolverError { .. }
| deno_graph::ResolutionError::InvalidSpecifier { .. } => None,
}
.map(|s| Cow::Owned(UrlOrPath::Url(s.clone()))),
ResolveWithGraphErrorKind::Resolve(err) => err.maybe_specifier(),
ResolveWithGraphErrorKind::PathToUrl(err) => {
Some(Cow::Owned(UrlOrPath::Path(err.0.clone())))
}
ResolveWithGraphErrorKind::ResolvePkgFolderFromDenoModule(_) => None,
}
}
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum ResolveWithGraphErrorKind {
#[error(transparent)]
#[class(inherit)]
ManagedResolvePkgFolderFromDenoReq(
#[from] ManagedResolvePkgFolderFromDenoReqError,
),
#[error(transparent)]
#[class(inherit)]
CouldNotResolveNpmReqRef(#[from] CouldNotResolveNpmReqRefError),
#[error(transparent)]
#[class(inherit)]
ResolvePkgFolderFromDenoModule(
#[from] npm::managed::ResolvePkgFolderFromDenoModuleError,
),
#[error(transparent)]
#[class(inherit)]
ResolveNpmReqRef(#[from] npm::ResolveNpmReqRefError),
#[error(transparent)]
#[class(inherit)]
Resolution(#[from] deno_graph::ResolutionError),
#[error(transparent)]
#[class(inherit)]
Resolve(#[from] DenoResolveError),
#[error(transparent)]
#[class(inherit)]
PathToUrl(#[from] deno_path_util::PathToUrlError),
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
#[class(inherit)]
#[error("Could not resolve '{reference}'")]
pub struct CouldNotResolveNpmReqRefError {
pub reference: deno_semver::npm::NpmPackageReqReference,
#[source]
#[inherit]
pub source: node_resolver::errors::PackageSubpathFromDenoModuleResolveError,
}
impl NodeJsErrorCoded for CouldNotResolveNpmReqRefError {
fn code(&self) -> node_resolver::errors::NodeJsErrorCode {
self.source.code()
}
}
pub struct MappedResolutionDiagnosticWithPosition<'a> {
pub diagnostic: &'a MappedResolutionDiagnostic,
pub referrer: &'a Url,
pub start: deno_graph::Position,
}
#[allow(clippy::disallowed_types)]
pub type OnMappedResolutionDiagnosticFn = deno_maybe_sync::MaybeArc<
dyn Fn(MappedResolutionDiagnosticWithPosition) + Send + Sync,
>;
pub struct ResolveWithGraphOptions {
pub mode: node_resolver::ResolutionMode,
pub kind: node_resolver::NodeResolutionKind,
/// Whether to maintain npm specifiers as-is. It's necessary for the
/// deno_core module loader to resolve npm specifiers as-is so that
/// the loader can properly dynamic import and install npm packages
/// when managed.
pub maintain_npm_specifiers: bool,
}
pub type DefaultDenoResolverRc<TSys> = DenoResolverRc<
npm::DenoInNpmPackageChecker,
DenoIsBuiltInNodeModuleChecker,
npm::NpmResolver<TSys>,
TSys,
>;
#[allow(clippy::disallowed_types)]
pub type DenoResolverRc<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
> = deno_maybe_sync::MaybeArc<
DenoResolver<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
>;
/// The resolver used in the CLI for resolving and interfacing
/// with deno_graph.
pub struct DenoResolver<
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: DenoResolverSys,
> {
resolver: RawDenoResolverRc<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
sys: TSys,
found_package_json_dep_flag: FoundPackageJsonDepFlagRc,
warned_pkgs: deno_maybe_sync::MaybeDashSet<PackageReq>,
on_warning: Option<OnMappedResolutionDiagnosticFn>,
}
impl<
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: DenoResolverSys,
> std::fmt::Debug
for DenoResolver<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("DenoResolver").finish()
}
}
impl<
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: DenoResolverSys,
>
DenoResolver<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>
{
pub fn new(
resolver: RawDenoResolverRc<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
sys: TSys,
found_package_json_dep_flag: FoundPackageJsonDepFlagRc,
on_warning: Option<OnMappedResolutionDiagnosticFn>,
) -> Self {
Self {
resolver,
sys,
found_package_json_dep_flag,
warned_pkgs: Default::default(),
on_warning,
}
}
pub fn resolve_with_graph(
&self,
graph: &deno_graph::ModuleGraph,
raw_specifier: &str,
referrer: &Url,
referrer_range_start: deno_graph::Position,
options: ResolveWithGraphOptions,
) -> Result<Url, ResolveWithGraphError> {
let resolution = match graph.get(referrer) {
Some(Module::Js(module)) => module
.dependencies
.get(raw_specifier)
.map(|d| &d.maybe_code)
.unwrap_or(&Resolution::None),
_ => &Resolution::None,
};
let specifier = match resolution {
Resolution::Ok(resolved) => Cow::Borrowed(&resolved.specifier),
Resolution::Err(err) => {
return Err(
ResolveWithGraphErrorKind::Resolution((**err).clone()).into(),
);
}
Resolution::None => Cow::Owned(self.resolve(
raw_specifier,
referrer,
referrer_range_start,
options.mode,
options.kind,
)?),
};
let specifier = match graph.get(&specifier) {
Some(Module::Npm(_)) => {
if options.maintain_npm_specifiers {
specifier.into_owned()
} else {
let req_ref =
NpmPackageReqReference::from_specifier(&specifier).unwrap();
self.resolve_managed_npm_req_ref(
&req_ref,
Some(referrer),
options.mode,
options.kind,
)?
}
}
Some(Module::Node(module)) => module.specifier.clone(),
Some(Module::Js(module)) => module.specifier.clone(),
Some(Module::Json(module)) => module.specifier.clone(),
Some(Module::Wasm(module)) => module.specifier.clone(),
Some(Module::External(module)) => {
node_resolver::resolve_specifier_into_node_modules(
&self.sys,
&module.specifier,
)
}
None => {
if options.maintain_npm_specifiers {
specifier.into_owned()
} else {
match NpmPackageReqReference::from_specifier(&specifier) {
Ok(reference) => {
let url =
self.resolver.resolve_non_workspace_npm_req_ref_to_file(
&reference,
referrer,
options.mode,
options.kind,
)?;
url.into_url()?
}
_ => specifier.into_owned(),
}
}
}
};
Ok(specifier)
}
pub fn resolve_non_workspace_npm_req_ref_to_file(
&self,
npm_req_ref: &NpmPackageReqReference,
referrer: &Url,
resolution_mode: node_resolver::ResolutionMode,
resolution_kind: node_resolver::NodeResolutionKind,
) -> Result<node_resolver::UrlOrPath, npm::ResolveNpmReqRefError> {
self.resolver.resolve_non_workspace_npm_req_ref_to_file(
npm_req_ref,
referrer,
resolution_mode,
resolution_kind,
)
}
pub fn resolve_managed_npm_req_ref(
&self,
req_ref: &NpmPackageReqReference,
maybe_referrer: Option<&Url>,
resolution_mode: node_resolver::ResolutionMode,
resolution_kind: node_resolver::NodeResolutionKind,
) -> Result<Url, ResolveWithGraphError> {
let node_and_npm_resolver =
self.resolver.node_and_npm_resolver.as_ref().unwrap();
let managed_resolver = node_and_npm_resolver
.npm_resolver
.as_managed()
.expect("do not call this unless managed");
let package_folder = managed_resolver
.resolve_pkg_folder_from_deno_module_req(req_ref.req())?;
Ok(
node_and_npm_resolver
.node_resolver
.resolve_package_subpath_from_deno_module(
&package_folder,
req_ref.sub_path(),
maybe_referrer,
resolution_mode,
resolution_kind,
)
.map_err(|source| CouldNotResolveNpmReqRefError {
reference: req_ref.clone(),
source,
})?
.into_url()?,
)
}
pub fn resolve(
&self,
raw_specifier: &str,
referrer: &Url,
referrer_range_start: deno_graph::Position,
resolution_mode: node_resolver::ResolutionMode,
resolution_kind: node_resolver::NodeResolutionKind,
) -> Result<Url, DenoResolveError> {
let resolution = self.resolver.resolve(
raw_specifier,
referrer,
resolution_mode,
resolution_kind,
)?;
if resolution.found_package_json_dep {
// mark that we need to do an "npm install" later
self.found_package_json_dep_flag.raise();
}
if let Some(diagnostic) = resolution.maybe_diagnostic {
let diagnostic = &*diagnostic;
match diagnostic {
MappedResolutionDiagnostic::ConstraintNotMatchedLocalVersion {
reference,
..
} => {
if let Some(on_warning) = &self.on_warning
&& self.warned_pkgs.insert(reference.req().clone())
{
on_warning(MappedResolutionDiagnosticWithPosition {
diagnostic,
referrer,
start: referrer_range_start,
});
}
}
}
}
Ok(resolution.url)
}
pub fn as_graph_resolver<'a>(
&'a self,
cjs_tracker: &'a CjsTracker<TInNpmPackageChecker, TSys>,
jsx_import_source_config_resolver: &'a JsxImportSourceConfigResolver,
) -> DenoGraphResolverAdapter<
'a,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
> {
DenoGraphResolverAdapter {
cjs_tracker,
resolver: self,
jsx_import_source_config_resolver,
}
}
}
pub struct DenoGraphResolverAdapter<
'a,
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: DenoResolverSys,
> {
cjs_tracker: &'a CjsTracker<TInNpmPackageChecker, TSys>,
resolver: &'a DenoResolver<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
jsx_import_source_config_resolver: &'a JsxImportSourceConfigResolver,
}
impl<
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: DenoResolverSys,
> std::fmt::Debug
for DenoGraphResolverAdapter<
'_,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("DenoGraphResolverAdapter").finish()
}
}
impl<
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: DenoResolverSys,
> deno_graph::source::Resolver
for DenoGraphResolverAdapter<
'_,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>
{
fn default_jsx_import_source(&self, referrer: &Url) -> Option<String> {
self
.jsx_import_source_config_resolver
.for_specifier(referrer)
.and_then(|c| c.import_source.as_ref().map(|s| s.specifier.clone()))
}
fn default_jsx_import_source_types(&self, referrer: &Url) -> Option<String> {
self
.jsx_import_source_config_resolver
.for_specifier(referrer)
.and_then(|c| c.import_source_types.as_ref().map(|s| s.specifier.clone()))
}
fn jsx_import_source_module(&self, referrer: &Url) -> &str {
self
.jsx_import_source_config_resolver
.for_specifier(referrer)
.map(|c| c.module.as_str())
.unwrap_or(deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE)
}
fn resolve(
&self,
raw_specifier: &str,
referrer_range: &deno_graph::Range,
resolution_kind: deno_graph::source::ResolutionKind,
) -> Result<Url, ResolveError> {
self
.resolver
.resolve(
raw_specifier,
&referrer_range.specifier,
referrer_range.range.start,
referrer_range
.resolution_mode
.map(node_resolver::ResolutionMode::from_deno_graph)
.unwrap_or_else(|| {
self
.cjs_tracker
.get_referrer_kind(&referrer_range.specifier)
}),
node_resolver::NodeResolutionKind::from_deno_graph(resolution_kind),
)
.map_err(|err| err.into_deno_graph_error())
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum EnhanceGraphErrorMode {
ShowRange,
HideRange,
}
pub fn enhance_graph_error(
sys: &(impl sys_traits::FsMetadata + Clone),
error: &ModuleGraphError,
mode: EnhanceGraphErrorMode,
) -> String {
let mut message = match &error {
ModuleGraphError::ResolutionError(resolution_error) => {
enhanced_resolution_error_message(resolution_error)
}
ModuleGraphError::TypesResolutionError(resolution_error) => {
format!(
"Failed resolving types. {}",
enhanced_resolution_error_message(resolution_error)
)
}
ModuleGraphError::ModuleError(error) => {
enhanced_integrity_error_message(error)
.or_else(|| enhanced_sloppy_imports_error_message(sys, error))
.or_else(|| enhanced_unsupported_import_attribute(error))
.unwrap_or_else(|| format_deno_graph_error(error))
}
};
if let Some(range) = error.maybe_range()
&& mode == EnhanceGraphErrorMode::ShowRange
&& !range.specifier.as_str().contains("/$deno$eval")
{
message.push_str("\n at ");
message.push_str(&format_range_with_colors(range));
}
message
}
/// Adds more explanatory information to a resolution error.
pub fn enhanced_resolution_error_message(error: &ResolutionError) -> String {
let mut message = format_deno_graph_error(error);
let maybe_hint = if let Some(specifier) =
get_resolution_error_bare_node_specifier(error)
{
Some(format!(
"If you want to use a built-in Node module, add a \"node:\" prefix (ex. \"node:{specifier}\")."
))
} else {
get_import_prefix_missing_error(error).map(|specifier| {
if specifier.starts_with("@std/") {
format!(
"If you want to use the JSR package, try running `deno add jsr:{}`",
specifier
)
} else if specifier.starts_with('@') {
format!(
"If you want to use a JSR or npm package, try running `deno add jsr:{0}` or `deno add npm:{0}`",
specifier
)
} else {
format!(
"If you want to use the npm package, try running `deno add npm:{0}`",
specifier
)
}
})
};
if let Some(hint) = maybe_hint {
message.push_str(&format!(
"\n {} {}",
deno_terminal::colors::cyan("hint:"),
hint
));
}
message
}
static RUN_WITH_SLOPPY_IMPORTS_MSG: &str = "or run with --sloppy-imports";
fn enhanced_sloppy_imports_error_message(
sys: &(impl sys_traits::FsMetadata + Clone),
error: &ModuleError,
) -> Option<String> {
match error.as_kind() {
ModuleErrorKind::Load { specifier, err: ModuleLoadError::Loader(_), .. } // ex. "Is a directory" error
| ModuleErrorKind::Missing { specifier, .. } => {
let additional_message = maybe_additional_sloppy_imports_message(sys, specifier)?;
Some(format!(
"{} {}",
error,
additional_message,
))
}
_ => None,
}
}
pub fn maybe_additional_sloppy_imports_message(
sys: &(impl sys_traits::FsMetadata + Clone),
specifier: &Url,
) -> Option<String> {
let (resolved, sloppy_reason) = sloppy_imports_resolve(
specifier,
crate::workspace::ResolutionKind::Execution,
sys.clone(),
)?;
Some(format!(
"{} {}",
sloppy_reason.suggestion_message_for_specifier(&resolved),
RUN_WITH_SLOPPY_IMPORTS_MSG
))
}
pub fn enhanced_integrity_error_message(err: &ModuleError) -> Option<String> {
match err.as_kind() {
ModuleErrorKind::Load {
specifier,
err:
ModuleLoadError::Jsr(JsrLoadError::ContentChecksumIntegrity(checksum_err)),
..
} => Some(format!(
concat!(
"Integrity check failed in package. The package may have been tampered with.\n\n",
" Specifier: {}\n",
" Actual: {}\n",
" Expected: {}\n\n",
"If you modified your global cache, run again with the --reload flag to restore ",
"its state. If you want to modify dependencies locally run again with the ",
"--vendor flag or specify `\"vendor\": true` in a deno.json then modify the contents ",
"of the vendor/ folder."
),
specifier, checksum_err.actual, checksum_err.expected,
)),
ModuleErrorKind::Load {
err:
ModuleLoadError::Jsr(
JsrLoadError::PackageVersionManifestChecksumIntegrity(
package_nv,
checksum_err,
),
),
..
} => Some(format!(
concat!(
"Integrity check failed for package. The source code is invalid, as it does not match the expected hash in the lock file.\n\n",
" Package: {}\n",
" Actual: {}\n",
" Expected: {}\n\n",
"This could be caused by:\n",
" * the lock file may be corrupt\n",
" * the source itself may be corrupt\n\n",
"Investigate the lockfile; delete it to regenerate the lockfile or --reload to reload the source code from the server."
),
package_nv, checksum_err.actual, checksum_err.expected,
)),
ModuleErrorKind::Load {
specifier,
err: ModuleLoadError::HttpsChecksumIntegrity(checksum_err),
..
} => Some(format!(
concat!(
"Integrity check failed for remote specifier. The source code is invalid, as it does not match the expected hash in the lock file.\n\n",
" Specifier: {}\n",
" Actual: {}\n",
" Expected: {}\n\n",
"This could be caused by:\n",
" * the lock file may be corrupt\n",
" * the source itself may be corrupt\n\n",
"Investigate the lockfile; delete it to regenerate the lockfile or --reload to reload the source code from the server."
),
specifier, checksum_err.actual, checksum_err.expected,
)),
_ => None,
}
}
fn enhanced_unsupported_import_attribute(err: &ModuleError) -> Option<String> {
match err.as_kind() {
ModuleErrorKind::UnsupportedImportAttributeType { kind, .. }
if matches!(kind.as_str(), "bytes" | "text") =>
{
let mut text = format_deno_graph_error(err);
text.push_str(&format!(
"\n {} run with --unstable-raw-imports",
deno_terminal::colors::cyan("hint:")
));
Some(text)
}
_ => None,
}
}
pub fn get_resolution_error_bare_node_specifier(
error: &ResolutionError,
) -> Option<&str> {
get_resolution_error_bare_specifier(error).filter(|specifier| {
DenoIsBuiltInNodeModuleChecker.is_builtin_node_module(specifier)
})
}
fn get_resolution_error_bare_specifier(
error: &ResolutionError,
) -> Option<&str> {
if let ResolutionError::InvalidSpecifier {
error: SpecifierError::ImportPrefixMissing { specifier, .. },
..
} = error
{
Some(specifier.as_str())
} else if let ResolutionError::ResolverError { error, .. } = error {
if let ResolveError::ImportMap(error) = (*error).as_ref() {
if let import_map::ImportMapErrorKind::UnmappedBareSpecifier(
specifier,
_,
) = error.as_kind()
{
Some(specifier.as_str())
} else {
None
}
} else {
None
}
} else {
None
}
}
fn get_import_prefix_missing_error(error: &ResolutionError) -> Option<&str> {
// not exact, but ok because this is just a hint
let media_type =
MediaType::from_specifier_and_headers(&error.range().specifier, None);
if media_type == MediaType::Wasm {
return None;
}
let mut maybe_specifier = None;
if let ResolutionError::InvalidSpecifier {
error: SpecifierError::ImportPrefixMissing { specifier, .. },
range,
} = error
{
if range.specifier.scheme() == "file" {
maybe_specifier = Some(specifier);
}
} else if let ResolutionError::ResolverError { error, range, .. } = error
&& range.specifier.scheme() == "file"
{
match error.as_ref() {
ResolveError::Specifier(specifier_error) => {
if let SpecifierError::ImportPrefixMissing { specifier, .. } =
specifier_error
{
maybe_specifier = Some(specifier);
}
}
ResolveError::Other(other_error) => {
if let Some(SpecifierError::ImportPrefixMissing { specifier, .. }) =
other_error.get_ref().downcast_ref::<SpecifierError>()
{
maybe_specifier = Some(specifier);
}
}
ResolveError::ImportMap(import_map_err) => {
if let ImportMapErrorKind::UnmappedBareSpecifier(specifier, _referrer) =
import_map_err.as_kind()
{
maybe_specifier = Some(specifier);
}
}
}
}
if let Some(specifier) = maybe_specifier {
// NOTE(bartlomieju): For now, return None if a specifier contains a dot or a space. This is because
// suggesting to `deno add bad-module.ts` makes no sense and is worse than not providing
// a suggestion at all. This should be improved further in the future
if specifier.contains('.') || specifier.contains(' ') {
return None;
}
// Do not return a hint for specifiers starting with `@`, but not containing a `/`
if specifier.starts_with('@') && !specifier.contains('/') {
return None;
}
}
maybe_specifier.map(|s| s.as_str())
}
pub fn format_range_with_colors(referrer: &deno_graph::Range) -> String {
use deno_terminal::colors;
format!(
"{}:{}:{}",
colors::cyan(referrer.specifier.as_str()),
colors::yellow(&(referrer.range.start.line + 1).to_string()),
colors::yellow(&(referrer.range.start.character + 1).to_string())
)
}
pub fn format_deno_graph_error(err: &dyn std::error::Error) -> String {
use std::fmt::Write;
let mut message = format!("{}", err);
let mut maybe_source = err.source();
if maybe_source.is_some() {
let mut past_message = message.clone();
let mut count = 0;
let mut display_count = 0;
while let Some(source) = maybe_source {
let current_message = format!("{}", source);
maybe_source = source.source();
// sometimes an error might be repeated due to
// being boxed multiple times in another AnyError
if current_message != past_message {
write!(message, "\n {}: ", display_count,).unwrap();
for (i, line) in current_message.lines().enumerate() {
if i > 0 {
write!(message, "\n {}", line).unwrap();
} else {
write!(message, "{}", line).unwrap();
}
}
display_count += 1;
}
if count > 8 {
write!(message, "\n {}: ...", count).unwrap();
break;
}
past_message = current_message;
count += 1;
}
}
message
}
#[cfg(test)]
mod test {
use deno_graph::PositionRange;
use deno_graph::Range;
use deno_graph::ResolutionError;
use deno_graph::SpecifierError;
use deno_graph::source::ResolveError;
use super::*;
#[test]
fn import_map_node_resolution_error() {
let cases = vec![("fs", Some("fs")), ("other", None)];
for (input, output) in cases {
let import_map =
import_map::ImportMap::new(Url::parse("file:///deno.json").unwrap());
let specifier = Url::parse("file:///file.ts").unwrap();
let err = import_map.resolve(input, &specifier).err().unwrap();
let err = ResolutionError::ResolverError {
#[allow(clippy::disallowed_types)]
error: std::sync::Arc::new(ResolveError::ImportMap(err)),
specifier: input.to_string(),
range: Range {
specifier,
resolution_mode: None,
range: PositionRange::zeroed(),
},
};
assert_eq!(get_resolution_error_bare_node_specifier(&err), output);
}
}
#[test]
fn bare_specifier_node_resolution_error() {
let cases = vec![("process", Some("process")), ("other", None)];
for (input, output) in cases {
let specifier = Url::parse("file:///file.ts").unwrap();
let err = ResolutionError::InvalidSpecifier {
range: Range {
specifier,
resolution_mode: None,
range: PositionRange::zeroed(),
},
error: SpecifierError::ImportPrefixMissing {
specifier: input.to_string(),
referrer: None,
},
};
assert_eq!(get_resolution_error_bare_node_specifier(&err), output,);
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/factory.rs | libs/resolver/factory.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::path::Path;
use std::path::PathBuf;
use anyhow::bail;
use boxed_error::Boxed;
use deno_cache_dir::DenoDirResolutionError;
use deno_cache_dir::GlobalHttpCacheRc;
use deno_cache_dir::GlobalOrLocalHttpCache;
use deno_cache_dir::LocalHttpCache;
use deno_cache_dir::npm::NpmCacheDir;
use deno_config::deno_json::MinimumDependencyAgeConfig;
use deno_config::deno_json::NewestDependencyDate;
use deno_config::deno_json::NodeModulesDirMode;
use deno_config::workspace::FolderConfigs;
use deno_config::workspace::VendorEnablement;
use deno_config::workspace::WorkspaceDirectory;
use deno_config::workspace::WorkspaceDirectoryEmptyOptions;
use deno_config::workspace::WorkspaceDirectoryRc;
use deno_config::workspace::WorkspaceDiscoverError;
use deno_config::workspace::WorkspaceDiscoverOptions;
use deno_config::workspace::WorkspaceDiscoverStart;
use deno_maybe_sync::MaybeSend;
use deno_maybe_sync::MaybeSync;
use deno_maybe_sync::new_rc;
pub use deno_npm::NpmSystemInfo;
use deno_npm::resolution::NpmVersionResolver;
use deno_path_util::fs::canonicalize_path_maybe_not_exists;
use futures::future::FutureExt;
use node_resolver::DenoIsBuiltInNodeModuleChecker;
use node_resolver::NodeResolver;
use node_resolver::NodeResolverOptions;
use node_resolver::NodeResolverRc;
use node_resolver::PackageJsonResolver;
use node_resolver::PackageJsonResolverRc;
use node_resolver::analyze::CjsModuleExportAnalyzerRc;
use node_resolver::analyze::NodeCodeTranslator;
use node_resolver::analyze::NodeCodeTranslatorRc;
use node_resolver::cache::NodeResolutionSys;
use thiserror::Error;
use url::Url;
use crate::DefaultRawDenoResolverRc;
use crate::DenoResolverOptions;
use crate::NodeAndNpmResolvers;
use crate::NpmCacheDirRc;
use crate::RawDenoResolver;
use crate::WorkspaceResolverRc;
use crate::cache::DenoDir;
use crate::cache::DenoDirOptions;
use crate::cache::DenoDirProvider;
use crate::cache::DenoDirProviderRc;
use crate::cache::DenoDirSys;
use crate::cache::EmitCache;
use crate::cache::EmitCacheRc;
use crate::cjs::CjsTracker;
use crate::cjs::CjsTrackerRc;
use crate::cjs::IsCjsResolutionMode;
use crate::cjs::analyzer::DenoCjsCodeAnalyzer;
use crate::cjs::analyzer::NodeAnalysisCacheRc;
use crate::cjs::analyzer::NullNodeAnalysisCache;
use crate::deno_json::CompilerOptionsOverrides;
use crate::deno_json::CompilerOptionsResolver;
use crate::deno_json::CompilerOptionsResolverRc;
use crate::import_map::WorkspaceExternalImportMapLoader;
use crate::import_map::WorkspaceExternalImportMapLoaderRc;
use crate::loader::AllowJsonImports;
use crate::loader::DenoNpmModuleLoaderRc;
use crate::loader::NpmModuleLoader;
use crate::lockfile::LockfileLock;
use crate::lockfile::LockfileLockRc;
use crate::npm::ByonmNpmResolverCreateOptions;
use crate::npm::CreateInNpmPkgCheckerOptions;
use crate::npm::DenoInNpmPackageChecker;
use crate::npm::NpmReqResolver;
use crate::npm::NpmReqResolverOptions;
use crate::npm::NpmReqResolverRc;
use crate::npm::NpmResolver;
use crate::npm::NpmResolverCreateOptions;
use crate::npm::managed::ManagedInNpmPkgCheckerCreateOptions;
use crate::npm::managed::ManagedNpmResolverCreateOptions;
use crate::npm::managed::NpmResolutionCellRc;
use crate::npmrc::NpmRcDiscoverError;
use crate::npmrc::ResolvedNpmRcRc;
use crate::npmrc::discover_npmrc_from_workspace;
use crate::workspace::FsCacheOptions;
use crate::workspace::PackageJsonDepResolution;
use crate::workspace::SloppyImportsOptions;
use crate::workspace::WorkspaceNpmLinkPackagesRc;
use crate::workspace::WorkspaceResolver;
// todo(https://github.com/rust-lang/rust/issues/109737): remove once_cell after get_or_try_init is stabilized
#[cfg(feature = "sync")]
type Deferred<T> = once_cell::sync::OnceCell<T>;
#[cfg(not(feature = "sync"))]
type Deferred<T> = once_cell::unsync::OnceCell<T>;
pub type DenoCjsModuleExportAnalyzerRc<TSys> = CjsModuleExportAnalyzerRc<
DenoCjsCodeAnalyzer<TSys>,
DenoInNpmPackageChecker,
DenoIsBuiltInNodeModuleChecker,
NpmResolver<TSys>,
TSys,
>;
pub type DenoNodeCodeTranslatorRc<TSys> = NodeCodeTranslatorRc<
DenoCjsCodeAnalyzer<TSys>,
DenoInNpmPackageChecker,
DenoIsBuiltInNodeModuleChecker,
NpmResolver<TSys>,
TSys,
>;
#[allow(clippy::disallowed_types)]
pub type NpmVersionResolverRc = deno_maybe_sync::MaybeArc<NpmVersionResolver>;
#[cfg(feature = "graph")]
#[allow(clippy::disallowed_types)]
pub type JsrVersionResolverRc =
deno_maybe_sync::MaybeArc<deno_graph::packages::JsrVersionResolver>;
#[derive(Debug, Boxed)]
pub struct HttpCacheCreateError(pub Box<HttpCacheCreateErrorKind>);
#[derive(Debug, Error)]
pub enum HttpCacheCreateErrorKind {
#[error(transparent)]
DenoDirResolution(#[from] DenoDirResolutionError),
#[error(transparent)]
WorkspaceDiscover(#[from] WorkspaceDiscoverError),
}
#[derive(Debug, Boxed)]
pub struct NpmCacheDirCreateError(pub Box<NpmCacheDirCreateErrorKind>);
#[derive(Debug, Error)]
pub enum NpmCacheDirCreateErrorKind {
#[error(transparent)]
DenoDirResolution(#[from] DenoDirResolutionError),
#[error(transparent)]
NpmRcCreate(#[from] NpmRcCreateError),
}
#[derive(Debug, Boxed)]
pub struct NpmRcCreateError(pub Box<NpmRcCreateErrorKind>);
#[derive(Debug, Error)]
pub enum NpmRcCreateErrorKind {
#[error(transparent)]
WorkspaceDiscover(#[from] WorkspaceDiscoverError),
#[error(transparent)]
NpmRcDiscover(#[from] NpmRcDiscoverError),
}
#[derive(Debug, Default)]
pub enum ConfigDiscoveryOption {
#[default]
DiscoverCwd,
Discover {
start_paths: Vec<PathBuf>,
},
Path(PathBuf),
Disabled,
}
/// Resolves the JSR registry URL to use for the given system.
pub fn resolve_jsr_url(sys: &impl sys_traits::EnvVar) -> Url {
let env_var_name = "JSR_URL";
if let Ok(registry_url) = sys.env_var(env_var_name) {
// ensure there is a trailing slash for the directory
let registry_url = format!("{}/", registry_url.trim_end_matches('/'));
match Url::parse(®istry_url) {
Ok(url) => {
return url;
}
Err(err) => {
log::debug!("Invalid {} environment variable: {:#}", env_var_name, err,);
}
}
}
Url::parse("https://jsr.io/").unwrap()
}
#[async_trait::async_trait(?Send)]
pub trait SpecifiedImportMapProvider:
std::fmt::Debug + MaybeSend + MaybeSync
{
async fn get(
&self,
) -> Result<Option<crate::workspace::SpecifiedImportMap>, anyhow::Error>;
}
#[derive(Debug)]
pub struct NpmProcessStateOptions {
pub node_modules_dir: Option<Cow<'static, str>>,
pub is_byonm: bool,
}
#[derive(Debug, Default)]
pub struct WorkspaceFactoryOptions {
pub additional_config_file_names: &'static [&'static str],
pub config_discovery: ConfigDiscoveryOption,
pub is_package_manager_subcommand: bool,
pub frozen_lockfile: Option<bool>,
pub lock_arg: Option<PathBuf>,
/// Whether to skip writing to the lockfile.
pub lockfile_skip_write: bool,
pub maybe_custom_deno_dir_root: Option<PathBuf>,
pub node_modules_dir: Option<NodeModulesDirMode>,
pub no_lock: bool,
pub no_npm: bool,
/// The process state if using ext/node and the current process was "forked".
/// This value is found at `deno_lib::args::NPM_PROCESS_STATE`
/// but in most scenarios this can probably just be `None`.
pub npm_process_state: Option<NpmProcessStateOptions>,
/// Override the path to the root node_modules directory.
pub root_node_modules_dir_override: Option<PathBuf>,
pub vendor: Option<bool>,
}
#[allow(clippy::disallowed_types)]
pub type WorkspaceFactoryRc<TSys> =
deno_maybe_sync::MaybeArc<WorkspaceFactory<TSys>>;
#[sys_traits::auto_impl]
pub trait WorkspaceFactorySys:
DenoDirSys
+ crate::lockfile::LockfileSys
+ crate::npm::NpmResolverSys
+ deno_cache_dir::GlobalHttpCacheSys
+ deno_cache_dir::LocalHttpCacheSys
+ crate::loader::NpmModuleLoaderSys
+ sys_traits::SystemTimeNow
{
}
pub struct WorkspaceFactory<TSys: WorkspaceFactorySys> {
sys: TSys,
deno_dir_provider: Deferred<DenoDirProviderRc<TSys>>,
emit_cache: Deferred<EmitCacheRc<TSys>>,
global_http_cache: Deferred<GlobalHttpCacheRc<TSys>>,
http_cache: Deferred<GlobalOrLocalHttpCache<TSys>>,
jsr_url: Deferred<Url>,
lockfile: async_once_cell::OnceCell<Option<LockfileLockRc<TSys>>>,
node_modules_dir_path: Deferred<Option<PathBuf>>,
npm_cache_dir: Deferred<NpmCacheDirRc>,
npmrc: Deferred<(ResolvedNpmRcRc, Option<PathBuf>)>,
node_modules_dir_mode: Deferred<NodeModulesDirMode>,
workspace_directory: Deferred<WorkspaceDirectoryRc>,
workspace_external_import_map_loader:
Deferred<WorkspaceExternalImportMapLoaderRc<TSys>>,
workspace_npm_link_packages: Deferred<WorkspaceNpmLinkPackagesRc>,
initial_cwd: PathBuf,
options: WorkspaceFactoryOptions,
}
impl<TSys: WorkspaceFactorySys> WorkspaceFactory<TSys> {
pub fn new(
sys: TSys,
initial_cwd: PathBuf,
options: WorkspaceFactoryOptions,
) -> Self {
Self {
sys,
deno_dir_provider: Default::default(),
emit_cache: Default::default(),
global_http_cache: Default::default(),
http_cache: Default::default(),
jsr_url: Default::default(),
lockfile: Default::default(),
node_modules_dir_path: Default::default(),
npm_cache_dir: Default::default(),
npmrc: Default::default(),
node_modules_dir_mode: Default::default(),
workspace_directory: Default::default(),
workspace_external_import_map_loader: Default::default(),
workspace_npm_link_packages: Default::default(),
initial_cwd,
options,
}
}
pub fn set_workspace_directory(
&mut self,
workspace_directory: WorkspaceDirectoryRc,
) {
self.workspace_directory = Deferred::from(workspace_directory);
}
pub fn deno_dir_provider(&self) -> &DenoDirProviderRc<TSys> {
self.deno_dir_provider.get_or_init(|| {
new_rc(DenoDirProvider::new(
self.sys.clone(),
DenoDirOptions {
maybe_custom_root: self.options.maybe_custom_deno_dir_root.clone(),
},
))
})
}
pub fn emit_cache(&self) -> Result<&EmitCacheRc<TSys>, anyhow::Error> {
self.emit_cache.get_or_try_init(|| {
Ok(new_rc(EmitCache::new(
&self.sys,
self.deno_dir()?.gen_cache.clone(),
#[cfg(feature = "deno_ast")]
Cow::Borrowed(deno_ast::VERSION),
#[cfg(not(feature = "deno_ast"))]
Cow::Borrowed(env!("CARGO_PKG_VERSION")),
)))
})
}
pub fn jsr_url(&self) -> &Url {
self.jsr_url.get_or_init(|| resolve_jsr_url(&self.sys))
}
pub fn initial_cwd(&self) -> &PathBuf {
&self.initial_cwd
}
pub fn no_npm(&self) -> bool {
self.options.no_npm
}
pub fn node_modules_dir_mode(
&self,
) -> Result<NodeModulesDirMode, anyhow::Error> {
self
.node_modules_dir_mode
.get_or_try_init(|| {
let raw_resolve = || -> Result<_, anyhow::Error> {
if let Some(process_state) = &self.options.npm_process_state {
if process_state.is_byonm {
return Ok(NodeModulesDirMode::Manual);
}
if process_state.node_modules_dir.is_some() {
return Ok(NodeModulesDirMode::Auto);
} else {
return Ok(NodeModulesDirMode::None);
}
}
if let Some(flag) = self.options.node_modules_dir {
return Ok(flag);
}
let workspace = &self.workspace_directory()?.workspace;
if let Some(mode) = workspace.node_modules_dir()? {
return Ok(mode);
}
let workspace = &self.workspace_directory()?.workspace;
if let Some(pkg_json) = workspace.root_pkg_json() {
if let Ok(deno_dir) = self.deno_dir() {
let deno_dir = &deno_dir.root;
// `deno_dir` can be symlink in macOS or on the CI
if let Ok(deno_dir) =
canonicalize_path_maybe_not_exists(&self.sys, deno_dir)
&& pkg_json.path.starts_with(deno_dir)
{
// if the package.json is in deno_dir, then do not use node_modules
// next to it as local node_modules dir
return Ok(NodeModulesDirMode::None);
}
}
Ok(NodeModulesDirMode::Manual)
} else if workspace.vendor_dir_path().is_some() {
Ok(NodeModulesDirMode::Auto)
} else {
// use the global cache
Ok(NodeModulesDirMode::None)
}
};
let mode = raw_resolve()?;
if mode == NodeModulesDirMode::Manual
&& self.options.is_package_manager_subcommand
{
// force using the managed resolver for package management
// sub commands so that it sets up the node_modules directory
Ok(NodeModulesDirMode::Auto)
} else {
Ok(mode)
}
})
.copied()
}
/// Resolves the path to use for a local node_modules folder.
pub fn node_modules_dir_path(&self) -> Result<Option<&Path>, anyhow::Error> {
fn resolve_from_root(root_folder: &FolderConfigs, cwd: &Path) -> PathBuf {
root_folder
.deno_json
.as_ref()
.map(|c| Cow::Owned(c.dir_path()))
.or_else(|| {
root_folder
.pkg_json
.as_ref()
.map(|c| Cow::Borrowed(c.dir_path()))
})
.unwrap_or(Cow::Borrowed(cwd))
.join("node_modules")
}
self
.node_modules_dir_path
.get_or_try_init(|| {
if let Some(path) = &self.options.root_node_modules_dir_override {
return Ok(Some(path.clone()));
}
if let Some(process_state) = &self.options.npm_process_state {
return Ok(
process_state
.node_modules_dir
.as_ref()
.map(|p| PathBuf::from(p.as_ref())),
);
}
let mode = self.node_modules_dir_mode()?;
let workspace = &self.workspace_directory()?.workspace;
let root_folder = workspace.root_folder_configs();
if !mode.uses_node_modules_dir() {
return Ok(None);
}
let node_modules_dir =
resolve_from_root(root_folder, &self.initial_cwd);
Ok(Some(canonicalize_path_maybe_not_exists(
&self.sys,
&node_modules_dir,
)?))
})
.map(|p| p.as_deref())
}
pub fn deno_dir(&self) -> Result<&DenoDir<TSys>, DenoDirResolutionError> {
self.deno_dir_provider().get_or_create()
}
pub fn global_http_cache(
&self,
) -> Result<&GlobalHttpCacheRc<TSys>, DenoDirResolutionError> {
self.global_http_cache.get_or_try_init(|| {
let global_cache_dir = self.deno_dir()?.remote_folder_path();
let global_http_cache = new_rc(deno_cache_dir::GlobalHttpCache::new(
self.sys.clone(),
global_cache_dir,
));
Ok(global_http_cache)
})
}
pub fn http_cache(
&self,
) -> Result<&deno_cache_dir::GlobalOrLocalHttpCache<TSys>, HttpCacheCreateError>
{
self.http_cache.get_or_try_init(|| {
let global_cache = self.global_http_cache()?.clone();
match self.workspace_directory()?.workspace.vendor_dir_path() {
Some(local_path) => {
let local_cache = LocalHttpCache::new(
local_path.clone(),
global_cache,
deno_cache_dir::GlobalToLocalCopy::Allow,
self.jsr_url().clone(),
);
Ok(new_rc(local_cache).into())
}
None => Ok(global_cache.into()),
}
})
}
pub async fn maybe_lockfile(
&self,
npm_package_info_provider: &dyn deno_lockfile::NpmPackageInfoProvider,
) -> Result<Option<&LockfileLockRc<TSys>>, anyhow::Error> {
self
.lockfile
.get_or_try_init(async move {
let workspace_directory = self.workspace_directory()?;
let maybe_external_import_map =
self.workspace_external_import_map_loader()?.get_or_load()?;
let maybe_lock_file = LockfileLock::discover(
self.sys().clone(),
crate::lockfile::LockfileFlags {
no_lock: self.options.no_lock,
frozen_lockfile: self.options.frozen_lockfile,
lock: self.options.lock_arg.as_ref().map(|path| {
#[cfg(not(target_arch = "wasm32"))]
debug_assert!(path.is_absolute());
path.clone()
}),
skip_write: self.options.lockfile_skip_write,
no_config: matches!(
self.options.config_discovery,
ConfigDiscoveryOption::Disabled
),
no_npm: self.options.no_npm,
},
&workspace_directory.workspace,
maybe_external_import_map.as_ref().map(|v| &v.value),
npm_package_info_provider,
)
.await?
.map(deno_maybe_sync::new_rc);
Ok(maybe_lock_file)
})
.await
.map(|c| c.as_ref())
}
pub fn npm_cache_dir(
&self,
) -> Result<&NpmCacheDirRc, NpmCacheDirCreateError> {
self.npm_cache_dir.get_or_try_init(|| {
let npm_cache_dir = self.deno_dir()?.npm_folder_path();
Ok(new_rc(NpmCacheDir::new(
&self.sys,
npm_cache_dir,
self.npmrc()?.get_all_known_registries_urls(),
)))
})
}
pub fn npmrc(&self) -> Result<&ResolvedNpmRcRc, NpmRcCreateError> {
self.npmrc_with_path().map(|(npmrc, _)| npmrc)
}
pub fn npmrc_with_path(
&self,
) -> Result<&(ResolvedNpmRcRc, Option<PathBuf>), NpmRcCreateError> {
self.npmrc.get_or_try_init(|| {
let (npmrc, path) = discover_npmrc_from_workspace(
&self.sys,
&self.workspace_directory()?.workspace,
)?;
Ok((new_rc(npmrc), path))
})
}
pub fn sys(&self) -> &TSys {
&self.sys
}
pub fn workspace_directory(
&self,
) -> Result<&WorkspaceDirectoryRc, WorkspaceDiscoverError> {
self.workspace_directory.get_or_try_init(|| {
let maybe_vendor_override = self.options.vendor.map(|v| match v {
true => VendorEnablement::Enable {
cwd: &self.initial_cwd,
},
false => VendorEnablement::Disable,
});
let resolve_workspace_discover_options = || {
let discover_pkg_json = !self.options.no_npm
&& !self.has_flag_env_var("DENO_NO_PACKAGE_JSON");
if !discover_pkg_json {
log::debug!("package.json auto-discovery is disabled");
}
WorkspaceDiscoverOptions {
deno_json_cache: None,
pkg_json_cache: Some(&node_resolver::PackageJsonThreadLocalCache),
workspace_cache: None,
additional_config_file_names: self
.options
.additional_config_file_names,
discover_pkg_json,
maybe_vendor_override,
}
};
let resolve_empty_options = || WorkspaceDirectoryEmptyOptions {
root_dir: new_rc(
deno_path_util::url_from_directory_path(&self.initial_cwd).unwrap(),
),
use_vendor_dir: maybe_vendor_override
.unwrap_or(VendorEnablement::Disable),
};
let dir = match &self.options.config_discovery {
ConfigDiscoveryOption::DiscoverCwd => WorkspaceDirectory::discover(
&self.sys,
WorkspaceDiscoverStart::Paths(std::slice::from_ref(
&self.initial_cwd,
)),
&resolve_workspace_discover_options(),
)?,
ConfigDiscoveryOption::Discover { start_paths } => {
WorkspaceDirectory::discover(
&self.sys,
WorkspaceDiscoverStart::Paths(start_paths),
&resolve_workspace_discover_options(),
)?
}
ConfigDiscoveryOption::Path(path) => {
#[cfg(not(target_arch = "wasm32"))]
debug_assert!(path.is_absolute());
WorkspaceDirectory::discover(
&self.sys,
WorkspaceDiscoverStart::ConfigFile(path),
&resolve_workspace_discover_options(),
)?
}
ConfigDiscoveryOption::Disabled => {
WorkspaceDirectory::empty(resolve_empty_options())
}
};
Ok(dir)
})
}
pub fn workspace_external_import_map_loader(
&self,
) -> Result<&WorkspaceExternalImportMapLoaderRc<TSys>, WorkspaceDiscoverError>
{
self
.workspace_external_import_map_loader
.get_or_try_init(|| {
Ok(new_rc(WorkspaceExternalImportMapLoader::new(
self.sys().clone(),
self.workspace_directory()?.workspace.clone(),
)))
})
}
pub fn workspace_npm_link_packages(
&self,
) -> Result<&WorkspaceNpmLinkPackagesRc, anyhow::Error> {
self
.workspace_npm_link_packages
.get_or_try_init(|| {
let workspace_dir = self.workspace_directory()?;
let npm_packages = WorkspaceNpmLinkPackagesRc::from_workspace(
workspace_dir.workspace.as_ref(),
);
if !npm_packages.0.is_empty() && !matches!(self.node_modules_dir_mode()?, NodeModulesDirMode::Auto | NodeModulesDirMode::Manual) {
bail!("Linking npm packages requires using a node_modules directory. Ensure you have a package.json or set the \"nodeModulesDir\" option to \"auto\" or \"manual\" in your workspace root deno.json.")
} else {
Ok(npm_packages)
}
})
}
fn has_flag_env_var(&self, name: &str) -> bool {
let value = self.sys.env_var_os(name);
match value {
Some(value) => value == "1",
None => false,
}
}
}
#[derive(Default)]
pub struct ResolverFactoryOptions {
pub compiler_options_overrides: CompilerOptionsOverrides,
pub is_cjs_resolution_mode: IsCjsResolutionMode,
/// Prevents installing packages newer than the specified date.
pub newest_dependency_date: Option<NewestDependencyDate>,
pub node_analysis_cache: Option<NodeAnalysisCacheRc>,
pub node_code_translator_mode: node_resolver::analyze::NodeCodeTranslatorMode,
pub node_resolver_options: NodeResolverOptions,
pub node_resolution_cache: Option<node_resolver::NodeResolutionCacheRc>,
pub npm_system_info: NpmSystemInfo,
pub package_json_cache: Option<node_resolver::PackageJsonCacheRc>,
pub package_json_dep_resolution: Option<PackageJsonDepResolution>,
pub specified_import_map: Option<Box<dyn SpecifiedImportMapProvider>>,
/// Whether to resolve bare node builtins (ex. "path" as "node:path").
pub bare_node_builtins: bool,
pub unstable_sloppy_imports: bool,
#[cfg(feature = "graph")]
pub on_mapped_resolution_diagnostic:
Option<crate::graph::OnMappedResolutionDiagnosticFn>,
pub allow_json_imports: AllowJsonImports,
/// Modules loaded via --require flag that should always be treated as CommonJS
pub require_modules: Vec<Url>,
}
pub struct ResolverFactory<TSys: WorkspaceFactorySys> {
options: ResolverFactoryOptions,
sys: NodeResolutionSys<TSys>,
cjs_module_export_analyzer: Deferred<DenoCjsModuleExportAnalyzerRc<TSys>>,
cjs_tracker: Deferred<CjsTrackerRc<DenoInNpmPackageChecker, TSys>>,
compiler_options_resolver: Deferred<CompilerOptionsResolverRc>,
#[cfg(feature = "graph")]
deno_resolver:
async_once_cell::OnceCell<crate::graph::DefaultDenoResolverRc<TSys>>,
#[cfg(feature = "deno_ast")]
emitter: Deferred<crate::emit::EmitterRc<DenoInNpmPackageChecker, TSys>>,
#[cfg(feature = "graph")]
found_package_json_dep_flag: crate::graph::FoundPackageJsonDepFlagRc,
in_npm_package_checker: Deferred<DenoInNpmPackageChecker>,
#[cfg(feature = "graph")]
jsr_version_resolver: Deferred<JsrVersionResolverRc>,
minimum_dependency_age: Deferred<MinimumDependencyAgeConfig>,
node_code_translator: Deferred<DenoNodeCodeTranslatorRc<TSys>>,
node_resolver: Deferred<
NodeResolverRc<
DenoInNpmPackageChecker,
DenoIsBuiltInNodeModuleChecker,
NpmResolver<TSys>,
TSys,
>,
>,
npm_module_loader: Deferred<DenoNpmModuleLoaderRc<TSys>>,
npm_req_resolver: Deferred<
NpmReqResolverRc<
DenoInNpmPackageChecker,
DenoIsBuiltInNodeModuleChecker,
NpmResolver<TSys>,
TSys,
>,
>,
npm_resolver: Deferred<NpmResolver<TSys>>,
npm_resolution: NpmResolutionCellRc,
npm_version_resolver: Deferred<NpmVersionResolverRc>,
#[cfg(feature = "deno_ast")]
parsed_source_cache: crate::cache::ParsedSourceCacheRc,
pkg_json_resolver: Deferred<PackageJsonResolverRc<TSys>>,
#[cfg(all(feature = "graph", feature = "deno_ast"))]
module_loader: Deferred<crate::loader::ModuleLoaderRc<TSys>>,
raw_deno_resolver: async_once_cell::OnceCell<DefaultRawDenoResolverRc<TSys>>,
workspace_factory: WorkspaceFactoryRc<TSys>,
workspace_resolver: async_once_cell::OnceCell<WorkspaceResolverRc<TSys>>,
}
impl<TSys: WorkspaceFactorySys> ResolverFactory<TSys> {
pub fn new(
workspace_factory: WorkspaceFactoryRc<TSys>,
options: ResolverFactoryOptions,
) -> Self {
Self {
sys: NodeResolutionSys::new(
workspace_factory.sys.clone(),
options.node_resolution_cache.clone(),
),
cjs_module_export_analyzer: Default::default(),
cjs_tracker: Default::default(),
compiler_options_resolver: Default::default(),
raw_deno_resolver: Default::default(),
#[cfg(feature = "graph")]
deno_resolver: Default::default(),
#[cfg(feature = "deno_ast")]
emitter: Default::default(),
#[cfg(feature = "graph")]
found_package_json_dep_flag: Default::default(),
in_npm_package_checker: Default::default(),
#[cfg(feature = "graph")]
jsr_version_resolver: Default::default(),
minimum_dependency_age: Default::default(),
node_code_translator: Default::default(),
node_resolver: Default::default(),
npm_module_loader: Default::default(),
npm_req_resolver: Default::default(),
npm_resolution: Default::default(),
npm_resolver: Default::default(),
npm_version_resolver: Default::default(),
#[cfg(feature = "deno_ast")]
parsed_source_cache: Default::default(),
pkg_json_resolver: Default::default(),
#[cfg(all(feature = "graph", feature = "deno_ast"))]
module_loader: Default::default(),
workspace_factory,
workspace_resolver: Default::default(),
options,
}
}
pub async fn raw_deno_resolver(
&self,
) -> Result<&DefaultRawDenoResolverRc<TSys>, anyhow::Error> {
self
.raw_deno_resolver
.get_or_try_init(
async {
Ok(new_rc(RawDenoResolver::new(DenoResolverOptions {
in_npm_pkg_checker: self.in_npm_package_checker()?.clone(),
node_and_req_resolver: if self.workspace_factory.no_npm() {
None
} else {
Some(NodeAndNpmResolvers {
node_resolver: self.node_resolver()?.clone(),
npm_resolver: self.npm_resolver()?.clone(),
npm_req_resolver: self.npm_req_resolver()?.clone(),
})
},
bare_node_builtins: self.bare_node_builtins()?,
is_byonm: self.use_byonm()?,
maybe_vendor_dir: self
.workspace_factory
.workspace_directory()?
.workspace
.vendor_dir_path(),
workspace_resolver: self.workspace_resolver().await?.clone(),
})))
}
// boxed to prevent the futures getting big and exploding the stack
.boxed_local(),
)
.await
}
pub fn cjs_module_export_analyzer(
&self,
) -> Result<&DenoCjsModuleExportAnalyzerRc<TSys>, anyhow::Error> {
self.cjs_module_export_analyzer.get_or_try_init(|| {
let code_analyzer = DenoCjsCodeAnalyzer::new(
self
.options
.node_analysis_cache
.clone()
.unwrap_or_else(|| new_rc(NullNodeAnalysisCache)),
self.cjs_tracker()?.clone(),
#[cfg(feature = "deno_ast")]
new_rc(crate::cjs::analyzer::DenoAstModuleExportAnalyzer::new(
self.parsed_source_cache().clone(),
)),
#[cfg(not(feature = "deno_ast"))]
new_rc(crate::cjs::analyzer::NotImplementedModuleExportAnalyzer),
self.workspace_factory.sys().clone(),
);
Ok(new_rc(
node_resolver::analyze::CjsModuleExportAnalyzer::new(
code_analyzer,
self.in_npm_package_checker()?.clone(),
self.node_resolver()?.clone(),
self.npm_resolver()?.clone(),
self.pkg_json_resolver().clone(),
self.workspace_factory.sys().clone(),
),
))
})
}
pub fn cjs_tracker(
&self,
) -> Result<&CjsTrackerRc<DenoInNpmPackageChecker, TSys>, anyhow::Error> {
self.cjs_tracker.get_or_try_init(|| {
Ok(new_rc(CjsTracker::new(
self.in_npm_package_checker()?.clone(),
self.pkg_json_resolver().clone(),
self.options.is_cjs_resolution_mode,
self.options.require_modules.clone(),
)))
})
}
pub fn compiler_options_resolver(
&self,
) -> Result<&CompilerOptionsResolverRc, anyhow::Error> {
self.compiler_options_resolver.get_or_try_init(|| {
Ok(new_rc(CompilerOptionsResolver::new(
&self.sys,
&self.workspace_factory.workspace_directory()?.workspace,
self.node_resolver()?,
&self.workspace_factory.options.config_discovery,
&self.options.compiler_options_overrides,
)))
})
}
#[cfg(feature = "deno_ast")]
pub fn emitter(
&self,
) -> Result<
&crate::emit::EmitterRc<DenoInNpmPackageChecker, TSys>,
anyhow::Error,
> {
self.emitter.get_or_try_init(|| {
Ok(new_rc(crate::emit::Emitter::new(
self.cjs_tracker()?.clone(),
self.workspace_factory.emit_cache()?.clone(),
self.parsed_source_cache().clone(),
self.compiler_options_resolver()?.clone(),
)))
})
}
#[cfg(feature = "graph")]
pub fn found_package_json_dep_flag(
&self,
) -> &crate::graph::FoundPackageJsonDepFlagRc {
&self.found_package_json_dep_flag
}
#[cfg(feature = "graph")]
pub async fn deno_resolver(
&self,
) -> Result<&crate::graph::DefaultDenoResolverRc<TSys>, anyhow::Error> {
self
.deno_resolver
.get_or_try_init(async {
Ok(new_rc(crate::graph::DenoResolver::new(
self.raw_deno_resolver().await?.clone(),
self.workspace_factory.sys.clone(),
self.found_package_json_dep_flag.clone(),
self.options.on_mapped_resolution_diagnostic.clone(),
)))
})
.await
}
pub fn in_npm_package_checker(
&self,
) -> Result<&DenoInNpmPackageChecker, anyhow::Error> {
self.in_npm_package_checker.get_or_try_init(|| {
let options = match self.use_byonm()? {
true => CreateInNpmPkgCheckerOptions::Byonm,
false => CreateInNpmPkgCheckerOptions::Managed(
ManagedInNpmPkgCheckerCreateOptions {
root_cache_dir_url: self
.workspace_factory
.npm_cache_dir()?
.root_dir_url(),
maybe_node_modules_path: self
.workspace_factory
.node_modules_dir_path()?,
},
),
};
Ok(DenoInNpmPackageChecker::new(options))
})
}
#[cfg(feature = "graph")]
pub fn jsr_version_resolver(
&self,
) -> Result<&JsrVersionResolverRc, anyhow::Error> {
self.jsr_version_resolver.get_or_try_init(|| {
let minimum_dependency_age_config =
self.minimum_dependency_age_config()?;
Ok(new_rc(deno_graph::packages::JsrVersionResolver {
newest_dependency_date_options:
deno_graph::packages::NewestDependencyDateOptions {
date: minimum_dependency_age_config
.age
.as_ref()
.and_then(|d| d.into_option())
.map(deno_graph::packages::NewestDependencyDate),
exclude_jsr_pkgs: minimum_dependency_age_config
.exclude
.iter()
.filter_map(|v| v.strip_prefix("jsr:"))
.map(|v| v.into())
.collect(),
},
}))
})
}
/// The newest allowed dependency date.
pub fn minimum_dependency_age_config(
&self,
) -> Result<&MinimumDependencyAgeConfig, anyhow::Error> {
self.minimum_dependency_age.get_or_try_init(|| {
let config = if let Some(date) = self.options.newest_dependency_date {
MinimumDependencyAgeConfig {
age: Some(date),
exclude: Vec::new(),
}
} else {
let workspace_factory = self.workspace_factory();
let workspace = &workspace_factory.workspace_directory()?.workspace;
workspace.minimum_dependency_age(workspace_factory.sys())?
};
if let Some(newest_dependency_date) =
config.age.and_then(|d| d.into_option())
{
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/loader/module_loader.rs | libs/resolver/loader/module_loader.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use boxed_error::Boxed;
use deno_ast::ModuleKind;
use deno_graph::JsModule;
use deno_graph::JsonModule;
use deno_graph::ModuleGraph;
use deno_graph::WasmModule;
use deno_media_type::MediaType;
use node_resolver::InNpmPackageChecker;
use node_resolver::errors::PackageJsonLoadError;
use url::Url;
use super::AllowJsonImports;
use super::DenoNpmModuleLoaderRc;
use super::LoadedModule;
use super::LoadedModuleOrAsset;
use super::LoadedModuleSource;
use super::NpmModuleLoadError;
use super::RequestedModuleType;
use crate::cache::ParsedSourceCacheRc;
use crate::cjs::CjsTrackerRc;
use crate::emit::EmitParsedSourceHelperError;
use crate::emit::EmitterRc;
use crate::factory::DenoNodeCodeTranslatorRc;
use crate::graph::EnhanceGraphErrorMode;
use crate::graph::enhance_graph_error;
use crate::npm::DenoInNpmPackageChecker;
#[allow(clippy::disallowed_types)]
type ArcStr = std::sync::Arc<str>;
#[derive(Debug, thiserror::Error, deno_error::JsError)]
#[error("{message}")]
#[class(inherit)]
pub struct EnhancedGraphError {
#[inherit]
pub error: deno_graph::ModuleError,
pub message: String,
}
#[derive(Debug, deno_error::JsError, Boxed)]
#[class(inherit)]
pub struct LoadPreparedModuleError(pub Box<LoadPreparedModuleErrorKind>);
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum LoadPreparedModuleErrorKind {
#[class(inherit)]
#[error(transparent)]
Graph(#[from] EnhancedGraphError),
#[class(inherit)]
#[error(transparent)]
ClosestPkgJson(#[from] PackageJsonLoadError),
#[class(inherit)]
#[error(transparent)]
LoadMaybeCjs(#[from] LoadMaybeCjsError),
#[class(inherit)]
#[error(transparent)]
Emit(#[from] EmitParsedSourceHelperError),
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum LoadMaybeCjsError {
#[class(inherit)]
#[error(transparent)]
NpmModuleLoad(#[from] crate::emit::EmitParsedSourceHelperError),
#[class(inherit)]
#[error(transparent)]
TranslateCjsToEsm(#[from] node_resolver::analyze::TranslateCjsToEsmError),
}
#[derive(Debug, deno_error::JsError, Boxed)]
#[class(inherit)]
pub struct LoadCodeSourceError(pub Box<LoadCodeSourceErrorKind>);
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum LoadCodeSourceErrorKind {
#[class(inherit)]
#[error(transparent)]
LoadPreparedModule(#[from] LoadPreparedModuleError),
#[class(inherit)]
#[error(transparent)]
LoadUnpreparedModule(#[from] LoadUnpreparedModuleError),
#[class(generic)]
#[error(
"Attempted to load JSON module without specifying \"type\": \"json\" attribute in the import statement."
)]
MissingJsonAttribute,
#[class(inherit)]
#[error(transparent)]
NpmModuleLoad(#[from] NpmModuleLoadError),
#[class(inherit)]
#[error(transparent)]
PathToUrl(#[from] deno_path_util::PathToUrlError),
#[class(inherit)]
#[error(transparent)]
UnsupportedScheme(#[from] UnsupportedSchemeError),
}
// this message list additional `npm` and `jsr` schemes, but they should actually be handled
// before these APIs are even hit.
#[derive(Debug, thiserror::Error, deno_error::JsError)]
#[class(type)]
#[error(
"Unsupported scheme \"{}\" for module \"{}\". Supported schemes:\n - \"blob\"\n - \"data\"\n - \"file\"\n - \"http\"\n - \"https\"\n - \"jsr\"\n - \"npm\"", url.scheme(), url
)]
pub struct UnsupportedSchemeError {
pub url: Url,
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
#[class(generic)]
#[error("Loading unprepared module: {}{}", .specifier, .maybe_referrer.as_ref().map(|r| format!(", imported from: {}", r)).unwrap_or_default())]
pub struct LoadUnpreparedModuleError {
specifier: Url,
maybe_referrer: Option<Url>,
}
#[allow(clippy::disallowed_types)]
pub type ModuleLoaderRc<TSys> = deno_maybe_sync::MaybeArc<ModuleLoader<TSys>>;
#[sys_traits::auto_impl]
pub trait ModuleLoaderSys:
super::NpmModuleLoaderSys
+ crate::emit::EmitterSys
+ node_resolver::analyze::NodeCodeTranslatorSys
+ crate::cjs::analyzer::DenoCjsCodeAnalyzerSys
+ crate::npm::NpmResolverSys
{
}
enum CodeOrDeferredEmit<'a> {
Source(LoadedModule<'a>),
DeferredEmit {
specifier: &'a Url,
media_type: MediaType,
source: &'a ArcStr,
},
Cjs {
specifier: &'a Url,
media_type: MediaType,
source: &'a ArcStr,
},
ExternalAsset {
specifier: &'a Url,
},
}
pub struct ModuleLoader<TSys: ModuleLoaderSys> {
in_npm_pkg_checker: DenoInNpmPackageChecker,
npm_module_loader: DenoNpmModuleLoaderRc<TSys>,
prepared_module_loader: PreparedModuleLoader<TSys>,
allow_json_imports: AllowJsonImports,
}
impl<TSys: ModuleLoaderSys> ModuleLoader<TSys> {
#[allow(clippy::too_many_arguments)]
pub fn new(
cjs_tracker: CjsTrackerRc<DenoInNpmPackageChecker, TSys>,
emitter: EmitterRc<DenoInNpmPackageChecker, TSys>,
in_npm_pkg_checker: DenoInNpmPackageChecker,
node_code_translator: DenoNodeCodeTranslatorRc<TSys>,
npm_module_loader: DenoNpmModuleLoaderRc<TSys>,
parsed_source_cache: ParsedSourceCacheRc,
sys: TSys,
allow_json_imports: AllowJsonImports,
) -> Self {
Self {
in_npm_pkg_checker,
npm_module_loader,
prepared_module_loader: PreparedModuleLoader {
cjs_tracker,
emitter,
node_code_translator,
parsed_source_cache,
sys,
},
allow_json_imports,
}
}
/// Loads a module using the graph or file system.
///
/// Note that the referrer is only used to enhance error messages and
/// doesn't need to be provided.
pub async fn load<'a>(
&self,
graph: &'a ModuleGraph,
specifier: &'a Url,
// todo(#30074): we should remove passing the referrer in here and remove the
// referrer from all error messages. This should be up to deno_core to display.
maybe_referrer: Option<&Url>,
requested_module_type: &RequestedModuleType<'_>,
) -> Result<LoadedModuleOrAsset<'a>, LoadCodeSourceError> {
let source = match self
.prepared_module_loader
.load_prepared_module(graph, specifier, requested_module_type)
.await
.map_err(LoadCodeSourceError::from)?
{
Some(module_or_asset) => module_or_asset,
None => {
if !matches!(
specifier.scheme(),
"https" | "http" | "file" | "blob" | "data"
) {
return Err(
UnsupportedSchemeError {
url: specifier.clone(),
}
.into(),
);
} else if self.in_npm_pkg_checker.in_npm_package(specifier) {
let loaded_module = self
.npm_module_loader
.load(
Cow::Borrowed(specifier),
maybe_referrer,
requested_module_type,
)
.await
.map_err(LoadCodeSourceError::from)?;
LoadedModuleOrAsset::Module(loaded_module)
} else {
match requested_module_type {
RequestedModuleType::Text | RequestedModuleType::Bytes => {
LoadedModuleOrAsset::ExternalAsset {
specifier: Cow::Borrowed(specifier),
statically_analyzable: false,
}
}
_ => {
return Err(LoadCodeSourceError::from(
LoadUnpreparedModuleError {
specifier: specifier.clone(),
maybe_referrer: maybe_referrer.cloned(),
},
));
}
}
}
}
};
match &source {
LoadedModuleOrAsset::Module(loaded_module) => {
// If we loaded a JSON file, but the "requested_module_type" (that is computed from
// import attributes) is not JSON we need to fail.
if loaded_module.media_type == MediaType::Json
&& !matches!(requested_module_type, RequestedModuleType::Json)
&& matches!(self.allow_json_imports, AllowJsonImports::WithAttribute)
{
Err(LoadCodeSourceErrorKind::MissingJsonAttribute.into_box())
} else {
Ok(source)
}
}
LoadedModuleOrAsset::ExternalAsset { .. } => {
// these are never type: "json"
Ok(source)
}
}
}
pub fn load_prepared_module_for_source_map_sync<'graph>(
&self,
graph: &'graph ModuleGraph,
specifier: &Url,
) -> Result<Option<LoadedModule<'graph>>, anyhow::Error> {
self
.prepared_module_loader
.load_prepared_module_for_source_map_sync(graph, specifier)
}
}
struct PreparedModuleLoader<TSys: ModuleLoaderSys> {
cjs_tracker: CjsTrackerRc<DenoInNpmPackageChecker, TSys>,
emitter: EmitterRc<DenoInNpmPackageChecker, TSys>,
node_code_translator: DenoNodeCodeTranslatorRc<TSys>,
parsed_source_cache: ParsedSourceCacheRc,
sys: TSys,
}
impl<TSys: ModuleLoaderSys> PreparedModuleLoader<TSys> {
pub async fn load_prepared_module<'graph>(
&self,
graph: &'graph ModuleGraph,
specifier: &Url,
requested_module_type: &RequestedModuleType<'_>,
) -> Result<Option<LoadedModuleOrAsset<'graph>>, LoadPreparedModuleError> {
// Note: keep this in sync with the sync version below
match self.load_prepared_module_or_defer_emit(
graph,
specifier,
requested_module_type,
)? {
Some(CodeOrDeferredEmit::Source(source)) => {
Ok(Some(LoadedModuleOrAsset::Module(source)))
}
Some(CodeOrDeferredEmit::DeferredEmit {
specifier,
media_type,
source,
}) => {
let transpile_result = self
.emitter
.maybe_emit_source(specifier, media_type, ModuleKind::Esm, source)
.await?;
// at this point, we no longer need the parsed source in memory, so free it
self.parsed_source_cache.free(specifier);
Ok(Some(LoadedModuleOrAsset::Module(LoadedModule {
// note: it's faster to provide a string to v8 if we know it's a string
source: LoadedModuleSource::ArcStr(transpile_result),
specifier: Cow::Borrowed(specifier),
media_type,
})))
}
Some(CodeOrDeferredEmit::Cjs {
specifier,
media_type,
source,
}) => self
.load_maybe_cjs(specifier, media_type, source)
.await
.map(|text| {
Some(LoadedModuleOrAsset::Module(LoadedModule {
specifier: Cow::Borrowed(specifier),
media_type,
source: LoadedModuleSource::ArcStr(text),
}))
})
.map_err(|e| LoadPreparedModuleErrorKind::LoadMaybeCjs(e).into_box()),
Some(CodeOrDeferredEmit::ExternalAsset { specifier }) => {
Ok(Some(LoadedModuleOrAsset::ExternalAsset {
specifier: Cow::Borrowed(specifier),
// came from graph, so yes
statically_analyzable: true,
}))
}
None => Ok(None),
}
}
pub fn load_prepared_module_for_source_map_sync<'graph>(
&self,
graph: &'graph ModuleGraph,
specifier: &Url,
) -> Result<Option<LoadedModule<'graph>>, anyhow::Error> {
// Note: keep this in sync with the async version above
match self.load_prepared_module_or_defer_emit(
graph,
specifier,
&RequestedModuleType::None,
)? {
Some(CodeOrDeferredEmit::Source(code_source)) => Ok(Some(code_source)),
Some(CodeOrDeferredEmit::DeferredEmit {
specifier,
media_type,
source,
}) => {
let transpile_result = self.emitter.maybe_emit_source_sync(
specifier,
media_type,
ModuleKind::Esm,
source,
)?;
// at this point, we no longer need the parsed source in memory, so free it
self.parsed_source_cache.free(specifier);
Ok(Some(LoadedModule {
// note: it's faster to provide a string if we know it's a string
source: LoadedModuleSource::ArcStr(transpile_result),
specifier: Cow::Borrowed(specifier),
media_type,
}))
}
Some(CodeOrDeferredEmit::Cjs { .. }) => {
self.parsed_source_cache.free(specifier);
// todo(dsherret): to make this work, we should probably just
// rely on the CJS export cache. At the moment this is hard because
// cjs export analysis is only async
Ok(None)
}
Some(CodeOrDeferredEmit::ExternalAsset { .. }) | None => Ok(None),
}
}
fn load_prepared_module_or_defer_emit<'graph>(
&self,
graph: &'graph ModuleGraph,
specifier: &Url,
requested_module_type: &RequestedModuleType,
) -> Result<Option<CodeOrDeferredEmit<'graph>>, LoadPreparedModuleError> {
let maybe_module =
graph.try_get(specifier).map_err(|err| EnhancedGraphError {
message: enhance_graph_error(
&self.sys,
&deno_graph::ModuleGraphError::ModuleError(err.clone()),
EnhanceGraphErrorMode::ShowRange,
),
error: err.clone(),
})?;
match maybe_module {
Some(deno_graph::Module::Json(JsonModule {
source,
media_type,
specifier,
..
})) => match requested_module_type {
RequestedModuleType::Bytes => match source.try_get_original_bytes() {
Some(bytes) => Ok(Some(CodeOrDeferredEmit::Source(LoadedModule {
source: LoadedModuleSource::ArcBytes(bytes),
specifier: Cow::Borrowed(specifier),
media_type: *media_type,
}))),
None => Ok(Some(CodeOrDeferredEmit::ExternalAsset { specifier })),
},
RequestedModuleType::Text => {
Ok(Some(CodeOrDeferredEmit::Source(LoadedModule {
source: LoadedModuleSource::ArcStr(source.text.clone()),
specifier: Cow::Borrowed(specifier),
media_type: *media_type,
})))
}
_ => Ok(Some(CodeOrDeferredEmit::Source(LoadedModule {
source: LoadedModuleSource::ArcStr(source.text.clone()),
specifier: Cow::Borrowed(specifier),
media_type: *media_type,
}))),
},
Some(deno_graph::Module::Js(JsModule {
source,
media_type,
specifier,
is_script,
..
})) => match requested_module_type {
RequestedModuleType::Bytes => match source.try_get_original_bytes() {
Some(bytes) => Ok(Some(CodeOrDeferredEmit::Source(LoadedModule {
source: LoadedModuleSource::ArcBytes(bytes),
specifier: Cow::Borrowed(specifier),
media_type: *media_type,
}))),
None => Ok(Some(CodeOrDeferredEmit::ExternalAsset { specifier })),
},
RequestedModuleType::Text => {
Ok(Some(CodeOrDeferredEmit::Source(LoadedModule {
source: LoadedModuleSource::ArcStr(source.text.clone()),
specifier: Cow::Borrowed(specifier),
media_type: *media_type,
})))
}
_ => {
if self.cjs_tracker.is_cjs_with_known_is_script(
specifier,
*media_type,
*is_script,
)? {
return Ok(Some(CodeOrDeferredEmit::Cjs {
specifier,
media_type: *media_type,
source: &source.text,
}));
}
let code = match media_type {
MediaType::JavaScript
| MediaType::Unknown
| MediaType::Mjs
| MediaType::Json => source.text.clone(),
MediaType::Dts | MediaType::Dcts | MediaType::Dmts => {
Default::default()
}
MediaType::Cjs | MediaType::Cts => {
return Ok(Some(CodeOrDeferredEmit::Cjs {
specifier,
media_type: *media_type,
source: &source.text,
}));
}
MediaType::TypeScript
| MediaType::Mts
| MediaType::Jsx
| MediaType::Tsx => {
return Ok(Some(CodeOrDeferredEmit::DeferredEmit {
specifier,
media_type: *media_type,
source: &source.text,
}));
}
MediaType::Css
| MediaType::Html
| MediaType::Jsonc
| MediaType::Json5
| MediaType::Sql
| MediaType::Wasm
| MediaType::SourceMap => {
panic!("Unexpected media type {media_type} for {specifier}")
}
};
// at this point, we no longer need the parsed source in memory, so free it
self.parsed_source_cache.free(specifier);
Ok(Some(CodeOrDeferredEmit::Source(LoadedModule {
source: LoadedModuleSource::ArcStr(code),
specifier: Cow::Borrowed(specifier),
media_type: *media_type,
})))
}
},
Some(deno_graph::Module::Wasm(WasmModule {
source, specifier, ..
})) => Ok(Some(CodeOrDeferredEmit::Source(LoadedModule {
source: LoadedModuleSource::ArcBytes(source.clone()),
specifier: Cow::Borrowed(specifier),
media_type: MediaType::Wasm,
}))),
Some(deno_graph::Module::External(module)) => {
if module.specifier.as_str().contains("/node_modules/") {
return Ok(None);
}
Ok(Some(CodeOrDeferredEmit::ExternalAsset {
specifier: &module.specifier,
}))
}
Some(deno_graph::Module::Node(_) | deno_graph::Module::Npm(_)) | None => {
Ok(None)
}
}
}
async fn load_maybe_cjs(
&self,
specifier: &Url,
media_type: MediaType,
original_source: &ArcStr,
) -> Result<ArcStr, LoadMaybeCjsError> {
let js_source = self
.emitter
.maybe_emit_source(
specifier,
media_type,
ModuleKind::Cjs,
original_source,
)
.await?;
let text = self
.node_code_translator
.translate_cjs_to_esm(specifier, Some(Cow::Borrowed(js_source.as_ref())))
.await?;
// at this point, we no longer need the parsed source in memory, so free it
self.parsed_source_cache.free(specifier);
Ok(match text {
// perf: if the text is borrowed, that means it didn't make any changes
// to the original source, so we can just provide that instead of cloning
// the borrowed text
Cow::Borrowed(_) => js_source.clone(),
Cow::Owned(text) => text.into(),
})
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/loader/mod.rs | libs/resolver/loader/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
mod npm;
#[cfg(all(feature = "graph", feature = "deno_ast"))]
mod module_loader;
use std::borrow::Cow;
use std::collections::HashMap;
use deno_cache_dir::file_fetcher::File;
use deno_media_type::MediaType;
#[cfg(all(feature = "graph", feature = "deno_ast"))]
pub use module_loader::*;
pub use npm::*;
use parking_lot::RwLock;
use url::Url;
#[derive(Debug, Clone, Copy, Default)]
pub enum AllowJsonImports {
Always,
#[default]
WithAttribute,
}
#[derive(Debug)]
pub enum RequestedModuleType<'a> {
None,
Json,
Text,
Bytes,
Other(&'a str),
}
#[allow(clippy::disallowed_types)]
type ArcStr = std::sync::Arc<str>;
#[allow(clippy::disallowed_types)]
type ArcBytes = std::sync::Arc<[u8]>;
pub enum LoadedModuleOrAsset<'a> {
Module(LoadedModule<'a>),
/// An external asset that the caller must fetch.
ExternalAsset {
specifier: Cow<'a, Url>,
/// Whether this was a module the graph knows about.
statically_analyzable: bool,
},
}
pub struct LoadedModule<'a> {
pub specifier: Cow<'a, Url>,
pub media_type: MediaType,
pub source: LoadedModuleSource,
}
pub enum LoadedModuleSource {
ArcStr(ArcStr),
ArcBytes(ArcBytes),
String(Cow<'static, str>),
Bytes(Cow<'static, [u8]>),
}
impl LoadedModuleSource {
pub fn as_bytes(&self) -> &[u8] {
match self {
LoadedModuleSource::ArcStr(text) => text.as_bytes(),
LoadedModuleSource::ArcBytes(bytes) => bytes,
LoadedModuleSource::String(text) => text.as_bytes(),
LoadedModuleSource::Bytes(bytes) => bytes,
}
}
}
#[allow(clippy::disallowed_types)]
pub type MemoryFilesRc = deno_maybe_sync::MaybeArc<MemoryFiles>;
#[derive(Debug, Default)]
pub struct MemoryFiles(RwLock<HashMap<Url, File>>);
impl MemoryFiles {
pub fn insert(&self, specifier: Url, file: File) -> Option<File> {
self.0.write().insert(specifier, file)
}
pub fn clear(&self) {
self.0.write().clear();
}
}
impl deno_cache_dir::file_fetcher::MemoryFiles for MemoryFiles {
fn get(&self, specifier: &Url) -> Option<File> {
self.0.read().get(specifier).cloned()
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/loader/npm.rs | libs/resolver/loader/npm.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::path::PathBuf;
use deno_media_type::MediaType;
use node_resolver::InNpmPackageChecker;
use node_resolver::IsBuiltInNodeModuleChecker;
use node_resolver::NpmPackageFolderResolver;
use node_resolver::analyze::CjsCodeAnalyzer;
use node_resolver::analyze::NodeCodeTranslatorRc;
use node_resolver::analyze::NodeCodeTranslatorSys;
use thiserror::Error;
use url::Url;
use super::LoadedModule;
use super::LoadedModuleSource;
use super::RequestedModuleType;
use crate::cjs::CjsTrackerRc;
#[derive(Debug, Error, deno_error::JsError)]
#[class(type)]
#[error("[{}]: Stripping types is currently unsupported for files under node_modules, for \"{}\"", self.code(), specifier)]
pub struct StrippingTypesNodeModulesError {
pub specifier: Url,
}
impl StrippingTypesNodeModulesError {
pub fn code(&self) -> &'static str {
"ERR_UNSUPPORTED_NODE_MODULES_TYPE_STRIPPING"
}
}
#[derive(Debug, Error, deno_error::JsError)]
pub enum NpmModuleLoadError {
#[class(inherit)]
#[error(transparent)]
UrlToFilePath(#[from] deno_path_util::UrlToFilePathError),
#[class(inherit)]
#[error(transparent)]
StrippingTypesNodeModules(#[from] StrippingTypesNodeModulesError),
#[class(inherit)]
#[error(transparent)]
ClosestPkgJson(#[from] node_resolver::errors::PackageJsonLoadError),
#[class(inherit)]
#[error(transparent)]
TranslateCjsToEsm(#[from] node_resolver::analyze::TranslateCjsToEsmError),
#[class(inherit)]
#[error("Unable to load {}{}", file_path.display(), maybe_referrer.as_ref().map(|r| format!(" imported from {}", r)).unwrap_or_default())]
UnableToLoad {
file_path: PathBuf,
maybe_referrer: Option<Url>,
#[source]
#[inherit]
source: std::io::Error,
},
#[class(inherit)]
#[error(
"{}",
format_dir_import_message(file_path, maybe_referrer, suggestion)
)]
DirImport {
file_path: PathBuf,
maybe_referrer: Option<Url>,
suggestion: Option<&'static str>,
#[source]
#[inherit]
source: std::io::Error,
},
}
fn format_dir_import_message(
file_path: &std::path::Path,
maybe_referrer: &Option<Url>,
suggestion: &Option<&'static str>,
) -> String {
// directory imports are not allowed when importing from an
// ES module, so provide the user with a helpful error message
let dir_path = file_path;
let mut msg = "Directory import ".to_string();
msg.push_str(&dir_path.to_string_lossy());
if let Some(referrer) = maybe_referrer {
msg.push_str(" is not supported resolving import from ");
msg.push_str(referrer.as_str());
}
if let Some(entrypoint_name) = suggestion {
msg.push_str("\nDid you mean to import ");
msg.push_str(entrypoint_name);
msg.push_str(" within the directory?");
}
msg
}
#[sys_traits::auto_impl]
pub trait NpmModuleLoaderSys: NodeCodeTranslatorSys {}
#[allow(clippy::disallowed_types)]
pub type DenoNpmModuleLoaderRc<TSys> =
deno_maybe_sync::MaybeArc<DenoNpmModuleLoader<TSys>>;
pub type DenoNpmModuleLoader<TSys> = NpmModuleLoader<
crate::cjs::analyzer::DenoCjsCodeAnalyzer<TSys>,
crate::npm::DenoInNpmPackageChecker,
node_resolver::DenoIsBuiltInNodeModuleChecker,
crate::npm::NpmResolver<TSys>,
TSys,
>;
#[derive(Clone)]
pub struct NpmModuleLoader<
TCjsCodeAnalyzer: CjsCodeAnalyzer,
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: NpmModuleLoaderSys,
> {
cjs_tracker: CjsTrackerRc<TInNpmPackageChecker, TSys>,
node_code_translator: NodeCodeTranslatorRc<
TCjsCodeAnalyzer,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
sys: TSys,
}
impl<
TCjsCodeAnalyzer: CjsCodeAnalyzer,
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: NpmModuleLoaderSys,
>
NpmModuleLoader<
TCjsCodeAnalyzer,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>
{
pub fn new(
cjs_tracker: CjsTrackerRc<TInNpmPackageChecker, TSys>,
node_code_translator: NodeCodeTranslatorRc<
TCjsCodeAnalyzer,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
sys: TSys,
) -> Self {
Self {
cjs_tracker,
node_code_translator,
sys,
}
}
pub async fn load<'a>(
&self,
specifier: Cow<'a, Url>,
maybe_referrer: Option<&Url>,
requested_module_type: &RequestedModuleType<'_>,
) -> Result<LoadedModule<'a>, NpmModuleLoadError> {
let file_path = deno_path_util::url_to_file_path(&specifier)?;
let code = self.sys.fs_read(&file_path).map_err(|source| {
if self.sys.fs_is_dir_no_err(&file_path) {
let suggestion = ["index.mjs", "index.js", "index.cjs"]
.into_iter()
.find(|e| self.sys.fs_is_file_no_err(file_path.join(e)));
NpmModuleLoadError::DirImport {
file_path,
maybe_referrer: maybe_referrer.cloned(),
suggestion,
source,
}
} else {
NpmModuleLoadError::UnableToLoad {
file_path,
maybe_referrer: maybe_referrer.cloned(),
source,
}
}
})?;
let media_type = MediaType::from_specifier(&specifier);
match requested_module_type {
RequestedModuleType::Text | RequestedModuleType::Bytes => {
Ok(LoadedModule {
specifier,
media_type,
source: LoadedModuleSource::Bytes(code),
})
}
RequestedModuleType::None
| RequestedModuleType::Json
| RequestedModuleType::Other(_) => {
if media_type.is_emittable() {
return Err(NpmModuleLoadError::StrippingTypesNodeModules(
StrippingTypesNodeModulesError {
specifier: specifier.into_owned(),
},
));
}
let source = if self.cjs_tracker.is_maybe_cjs(&specifier, media_type)? {
// translate cjs to esm if it's cjs and inject node globals
let code = from_utf8_lossy_cow(code);
LoadedModuleSource::String(
self
.node_code_translator
.translate_cjs_to_esm(&specifier, Some(code))
.await?
.into_owned()
.into(),
)
} else {
// esm and json code is untouched
LoadedModuleSource::Bytes(code)
};
Ok(LoadedModule {
source,
specifier,
media_type,
})
}
}
}
}
#[inline(always)]
fn from_utf8_lossy_cow(bytes: Cow<'_, [u8]>) -> Cow<'_, str> {
match bytes {
Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes),
Cow::Owned(bytes) => Cow::Owned(from_utf8_lossy_owned(bytes)),
}
}
// todo(https://github.com/rust-lang/rust/issues/129436): remove once stabilized
#[inline(always)]
fn from_utf8_lossy_owned(bytes: Vec<u8>) -> String {
match String::from_utf8_lossy(&bytes) {
Cow::Owned(code) => code,
// SAFETY: `String::from_utf8_lossy` guarantees that the result is valid
// UTF-8 if `Cow::Borrowed` is returned.
Cow::Borrowed(_) => unsafe { String::from_utf8_unchecked(bytes) },
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/cache/deno_dir.rs | libs/resolver/cache/deno_dir.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::env;
use std::path::PathBuf;
use deno_cache_dir::DenoDirResolutionError;
use deno_cache_dir::ResolveDenoDirSys;
use super::DiskCache;
use super::DiskCacheSys;
#[derive(Debug, Clone)]
pub struct DenoDirOptions {
pub maybe_custom_root: Option<PathBuf>,
}
#[sys_traits::auto_impl]
pub trait DenoDirSys: DiskCacheSys + ResolveDenoDirSys + Clone {}
#[allow(clippy::disallowed_types)]
pub type DenoDirProviderRc<TSys> =
deno_maybe_sync::MaybeArc<DenoDirProvider<TSys>>;
/// Lazily creates the deno dir which might be useful in scenarios
/// where functionality wants to continue if the DENO_DIR can't be created.
pub struct DenoDirProvider<TSys: DenoDirSys> {
options: DenoDirOptions,
sys: TSys,
deno_dir_cell: once_cell::sync::OnceCell<DenoDir<TSys>>,
}
impl<TSys: DenoDirSys> DenoDirProvider<TSys> {
pub fn new(sys: TSys, options: DenoDirOptions) -> Self {
Self {
options,
sys,
deno_dir_cell: Default::default(),
}
}
pub fn get_or_create(
&self,
) -> Result<&DenoDir<TSys>, DenoDirResolutionError> {
self.deno_dir_cell.get_or_try_init(|| {
let path = deno_cache_dir::resolve_deno_dir(
&self.sys,
self.options.maybe_custom_root.clone(),
)?;
Ok(DenoDir::new(self.sys.clone(), path))
})
}
}
/// `DenoDir` serves as coordinator for multiple `DiskCache`s containing them
/// in single directory that can be controlled with `$DENO_DIR` env variable.
#[derive(Debug, Clone)]
pub struct DenoDir<TSys: DiskCacheSys> {
/// Example: /Users/rld/.deno/
pub root: PathBuf,
/// Used by TsCompiler to cache compiler output.
pub gen_cache: DiskCache<TSys>,
}
impl<TSys: DiskCacheSys> DenoDir<TSys> {
pub fn new(sys: TSys, root: PathBuf) -> Self {
#[cfg(not(target_arch = "wasm32"))]
assert!(root.is_absolute());
let gen_path = root.join("gen");
Self {
root,
gen_cache: DiskCache::new(sys, gen_path),
}
}
/// The root directory of the DENO_DIR for display purposes only.
pub fn root_path_for_display(&self) -> std::path::Display<'_> {
self.root.display()
}
/// Path for the V8 code cache.
pub fn code_cache_db_file_path(&self) -> PathBuf {
// bump this version name to invalidate the entire cache
self.root.join("v8_code_cache_v2")
}
/// Path for the incremental cache used for formatting.
pub fn fmt_incremental_cache_db_file_path(&self) -> PathBuf {
// bump this version name to invalidate the entire cache
self.root.join("fmt_incremental_cache_v2")
}
/// Path for the incremental cache used for linting.
pub fn lint_incremental_cache_db_file_path(&self) -> PathBuf {
// bump this version name to invalidate the entire cache
self.root.join("lint_incremental_cache_v2")
}
/// Path for caching swc dependency analysis.
pub fn dep_analysis_db_file_path(&self) -> PathBuf {
// bump this version name to invalidate the entire cache
self.root.join("dep_analysis_cache_v2")
}
/// Path for the cache used for fast check.
pub fn fast_check_cache_db_file_path(&self) -> PathBuf {
// bump this version name to invalidate the entire cache
self.root.join("fast_check_cache_v2")
}
/// Path for caching node analysis.
pub fn node_analysis_db_file_path(&self) -> PathBuf {
// bump this version name to invalidate the entire cache
self.root.join("node_analysis_cache_v2")
}
/// Path for the cache used for type checking.
pub fn type_checking_cache_db_file_path(&self) -> PathBuf {
// bump this version name to invalidate the entire cache
self.root.join("check_cache_v2")
}
/// Path to the registries cache, used for the lps.
pub fn registries_folder_path(&self) -> PathBuf {
self.root.join("registries")
}
/// Path to the remote cache folder.
pub fn remote_folder_path(&self) -> PathBuf {
self.root.join("remote")
}
/// Path to the origin data cache folder.
pub fn origin_data_folder_path(&self) -> PathBuf {
// TODO(@crowlKats): change to origin_data for 2.0
self.root.join("location_data")
}
/// File used for the upgrade checker.
pub fn upgrade_check_file_path(&self) -> PathBuf {
self.root.join("latest.txt")
}
/// Folder used for the npm cache.
pub fn npm_folder_path(&self) -> PathBuf {
self.root.join("npm")
}
/// Path used for the REPL history file.
/// Can be overridden or disabled by setting `DENO_REPL_HISTORY` environment variable.
pub fn repl_history_file_path(&self) -> Option<PathBuf> {
if let Some(deno_repl_history) = env::var_os("DENO_REPL_HISTORY") {
if deno_repl_history.is_empty() {
None
} else {
Some(PathBuf::from(deno_repl_history))
}
} else {
Some(self.root.join("deno_history.txt"))
}
}
/// Folder path used for downloading new versions of deno.
pub fn dl_folder_path(&self) -> PathBuf {
self.root.join("dl")
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/cache/emit.rs | libs/resolver/cache/emit.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::hash::Hash;
use std::hash::Hasher;
use std::path::PathBuf;
use anyhow::Error as AnyError;
use anyhow::anyhow;
use deno_unsync::sync::AtomicFlag;
use url::Url;
use super::DiskCache;
use super::DiskCacheSys;
#[allow(clippy::disallowed_types)]
pub type EmitCacheRc<TSys> = deno_maybe_sync::MaybeArc<EmitCache<TSys>>;
#[sys_traits::auto_impl]
pub trait EmitCacheSys: DiskCacheSys + sys_traits::EnvVar {}
/// The cache that stores previously emitted files.
#[derive(Debug)]
pub struct EmitCache<TSys: EmitCacheSys> {
disk_cache: DiskCache<TSys>,
emit_failed_flag: AtomicFlag,
file_serializer: EmitFileSerializer,
mode: Mode,
}
#[derive(Debug)]
enum Mode {
Normal,
Disable,
}
impl<TSys: EmitCacheSys> EmitCache<TSys> {
pub fn new(
sys: &TSys,
disk_cache: DiskCache<TSys>,
cache_version: Cow<'static, str>,
) -> Self {
let emit_cache_mode = sys.env_var_os("DENO_EMIT_CACHE_MODE");
let mode = match emit_cache_mode.as_ref().and_then(|s| s.to_str()) {
Some("normal") | Some("") | None => Mode::Normal,
Some("disable") => Mode::Disable,
_ => {
log::warn!("Unknown DENO_EMIT_CACHE_MODE value, defaulting to normal");
Mode::Normal
}
};
Self {
disk_cache,
emit_failed_flag: Default::default(),
file_serializer: EmitFileSerializer { cache_version },
mode,
}
}
/// Gets the emitted code with embedded sourcemap from the cache.
///
/// The expected source hash is used in order to verify
/// that you're getting a value from the cache that is
/// for the provided source.
///
/// Cached emits from previous CLI releases will not be returned
/// or emits that do not match the source.
pub fn get_emit_code(
&self,
specifier: &Url,
expected_source_hash: u64,
) -> Option<String> {
if matches!(self.mode, Mode::Disable) {
return None;
}
let emit_filename = self.get_emit_filename(specifier)?;
let bytes = self.disk_cache.get(&emit_filename).ok()?;
self
.file_serializer
.deserialize(bytes, expected_source_hash)
}
/// Sets the emit code in the cache.
pub fn set_emit_code(&self, specifier: &Url, source_hash: u64, code: &[u8]) {
if let Err(err) = self.set_emit_code_result(specifier, source_hash, code) {
// might error in cases such as a readonly file system
log::debug!("Error saving emit data ({}): {}", specifier, err);
// assume the cache can't be written to and disable caching to it
self.emit_failed_flag.raise();
}
}
fn set_emit_code_result(
&self,
specifier: &Url,
source_hash: u64,
code: &[u8],
) -> Result<(), AnyError> {
if matches!(self.mode, Mode::Disable) || self.emit_failed_flag.is_raised() {
log::debug!("Skipped emit cache save of {}", specifier);
return Ok(());
}
let emit_filename = self
.get_emit_filename(specifier)
.ok_or_else(|| anyhow!("Could not get emit filename."))?;
let cache_data = self.file_serializer.serialize(code, source_hash);
self.disk_cache.set(&emit_filename, &cache_data)?;
Ok(())
}
fn get_emit_filename(&self, specifier: &Url) -> Option<PathBuf> {
self
.disk_cache
.get_cache_filename_with_extension(specifier, "js")
}
}
const LAST_LINE_PREFIX: &str = "\n// denoCacheMetadata=";
#[derive(Debug)]
struct EmitFileSerializer {
cache_version: Cow<'static, str>,
}
impl EmitFileSerializer {
pub fn deserialize(
&self,
mut bytes: Vec<u8>,
expected_source_hash: u64,
) -> Option<String> {
let last_newline_index = bytes.iter().rposition(|&b| b == b'\n')?;
let (content, last_line) = bytes.split_at(last_newline_index);
let hashes = last_line.strip_prefix(LAST_LINE_PREFIX.as_bytes())?;
let hashes = String::from_utf8_lossy(hashes);
let (source_hash, emit_hash) = hashes.split_once(',')?;
// verify the meta data file is for this source and CLI version
let source_hash = source_hash.parse::<u64>().ok()?;
if source_hash != expected_source_hash {
return None;
}
let emit_hash = emit_hash.parse::<u64>().ok()?;
// prevent using an emit from a different cli version or emits that were tampered with
if emit_hash != self.compute_emit_hash(content) {
return None;
}
// everything looks good, truncate and return it
bytes.truncate(content.len());
String::from_utf8(bytes).ok()
}
pub fn serialize(&self, code: &[u8], source_hash: u64) -> Vec<u8> {
let source_hash = source_hash.to_string();
let emit_hash = self.compute_emit_hash(code).to_string();
let capacity = code.len()
+ LAST_LINE_PREFIX.len()
+ source_hash.len()
+ 1
+ emit_hash.len();
let mut cache_data = Vec::with_capacity(capacity);
cache_data.extend(code);
cache_data.extend(LAST_LINE_PREFIX.as_bytes());
cache_data.extend(source_hash.as_bytes());
cache_data.push(b',');
cache_data.extend(emit_hash.as_bytes());
debug_assert_eq!(cache_data.len(), capacity);
cache_data
}
fn compute_emit_hash(&self, bytes: &[u8]) -> u64 {
// it's ok to use an insecure hash here because
// if someone can change the emit source then they
// can also change the version hash
let mut hasher = twox_hash::XxHash64::default();
bytes.hash(&mut hasher);
// emit should not be re-used between cli versions
self.cache_version.hash(&mut hasher);
hasher.finish()
}
}
#[cfg(test)]
mod test {
use test_util::TempDir;
use super::*;
#[test]
pub fn emit_cache_general_use() {
let temp_dir = TempDir::new();
let disk_cache =
DiskCache::new(sys_traits::impls::RealSys, temp_dir.path().to_path_buf());
let cache = EmitCache {
disk_cache: disk_cache.clone(),
file_serializer: EmitFileSerializer {
cache_version: "1.0.0".into(),
},
emit_failed_flag: Default::default(),
mode: Mode::Normal,
};
let specifier1 = deno_path_util::url_from_file_path(
temp_dir.path().join("file1.ts").as_path(),
)
.unwrap();
let specifier2 = deno_path_util::url_from_file_path(
temp_dir.path().join("file2.ts").as_path(),
)
.unwrap();
assert_eq!(cache.get_emit_code(&specifier1, 1), None);
let emit_code1 = "text1".to_string();
let emit_code2 = "text2".to_string();
cache.set_emit_code(&specifier1, 10, emit_code1.as_bytes());
cache.set_emit_code(&specifier2, 2, emit_code2.as_bytes());
// providing the incorrect source hash
assert_eq!(cache.get_emit_code(&specifier1, 5), None);
// providing the correct source hash
assert_eq!(
cache.get_emit_code(&specifier1, 10),
Some(emit_code1.clone()),
);
assert_eq!(cache.get_emit_code(&specifier2, 2), Some(emit_code2));
// try changing the cli version (should not load previous ones)
let cache = EmitCache {
disk_cache: disk_cache.clone(),
file_serializer: EmitFileSerializer {
cache_version: "2.0.0".into(),
},
emit_failed_flag: Default::default(),
mode: Mode::Normal,
};
assert_eq!(cache.get_emit_code(&specifier1, 10), None);
cache.set_emit_code(&specifier1, 5, emit_code1.as_bytes());
// recreating the cache should still load the data because the CLI version is the same
let cache = EmitCache {
disk_cache,
file_serializer: EmitFileSerializer {
cache_version: "2.0.0".into(),
},
emit_failed_flag: Default::default(),
mode: Mode::Normal,
};
assert_eq!(cache.get_emit_code(&specifier1, 5), Some(emit_code1));
// adding when already exists should not cause issue
let emit_code3 = "asdf".to_string();
cache.set_emit_code(&specifier1, 20, emit_code3.as_bytes());
assert_eq!(cache.get_emit_code(&specifier1, 5), None);
assert_eq!(cache.get_emit_code(&specifier1, 20), Some(emit_code3));
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/cache/disk_cache.rs | libs/resolver/cache/disk_cache.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::ffi::OsStr;
use std::path::Component;
use std::path::Path;
use std::path::PathBuf;
use std::path::Prefix;
use std::str;
use deno_cache_dir::CACHE_PERM;
use deno_cache_dir::url_to_filename;
use deno_path_util::fs::atomic_write_file_with_retries;
use sys_traits::FsRead;
use url::Host;
use url::Url;
#[sys_traits::auto_impl]
pub trait DiskCacheSys:
deno_path_util::fs::AtomicWriteFileWithRetriesSys + FsRead
{
}
#[derive(Debug, Clone)]
pub struct DiskCache<TSys: DiskCacheSys> {
sys: TSys,
pub location: PathBuf,
}
impl<TSys: DiskCacheSys> DiskCache<TSys> {
/// `location` must be an absolute path.
pub fn new(sys: TSys, location: PathBuf) -> Self {
#[cfg(not(target_arch = "wasm32"))]
assert!(location.is_absolute());
Self { sys, location }
}
pub fn get_cache_filename_with_extension(
&self,
url: &Url,
extension: &str,
) -> Option<PathBuf> {
let base = self.get_cache_filename(url)?;
match base.extension() {
None => Some(base.with_extension(extension)),
Some(ext) => {
let original_extension = OsStr::to_str(ext).unwrap();
let final_extension = format!("{original_extension}.{extension}");
Some(base.with_extension(final_extension))
}
}
}
fn get_cache_filename(&self, url: &Url) -> Option<PathBuf> {
let mut out = PathBuf::new();
let scheme = url.scheme();
out.push(scheme);
match scheme {
"wasm" => {
let host = url.host_str().unwrap();
let host_port = match url.port() {
// Windows doesn't support ":" in filenames, so we represent port using a
// special string.
Some(port) => format!("{host}_PORT{port}"),
None => host.to_string(),
};
out.push(host_port);
for path_seg in url.path_segments().unwrap() {
out.push(path_seg);
}
}
"http" | "https" | "data" | "blob" => out = url_to_filename(url).ok()?,
"file" => {
let path = match deno_path_util::url_to_file_path(url) {
Ok(path) => path,
Err(_) => return None,
};
let mut path_components = path.components();
if sys_traits::impls::is_windows() {
if url.path() == "/" {
return None; // not a valid windows path
}
if let Some(Component::Prefix(prefix_component)) =
path_components.next()
{
// Windows doesn't support ":" in filenames, so we need to extract disk prefix
// Example: file:///C:/deno/js/unit_test_runner.ts
// it should produce: file\c\deno\js\unit_test_runner.ts
match prefix_component.kind() {
Prefix::Disk(disk_byte) | Prefix::VerbatimDisk(disk_byte) => {
let disk = (disk_byte as char).to_string();
out.push(disk);
}
Prefix::UNC(server, share)
| Prefix::VerbatimUNC(server, share) => {
out.push("UNC");
let host = Host::parse(server.to_str().unwrap()).unwrap();
let host = host.to_string().replace(':', "_");
out.push(host);
out.push(share);
}
_ => unreachable!(),
}
}
}
// Must be relative, so strip forward slash
let mut remaining_components = path_components.as_path();
if let Ok(stripped) = remaining_components.strip_prefix("/") {
remaining_components = stripped;
};
out = out.join(remaining_components);
}
_ => return None,
};
Some(out)
}
pub fn get(&self, filename: &Path) -> std::io::Result<Vec<u8>> {
let path = self.location.join(filename);
Ok(self.sys.fs_read(path)?.into_owned())
}
pub fn set(&self, filename: &Path, data: &[u8]) -> std::io::Result<()> {
let path = self.location.join(filename);
atomic_write_file_with_retries(&self.sys, &path, data, CACHE_PERM)
}
}
#[cfg(test)]
mod tests {
// ok, testing
#[allow(clippy::disallowed_types)]
use sys_traits::impls::RealSys;
use test_util::TempDir;
use super::*;
#[test]
fn test_set_get_cache_file() {
let temp_dir = TempDir::new();
let sub_dir = temp_dir.path().join("sub_dir");
let cache = DiskCache::new(RealSys, sub_dir.to_path_buf());
let path = PathBuf::from("foo/bar.txt");
cache.set(&path, b"hello").unwrap();
assert_eq!(cache.get(&path).unwrap(), b"hello");
}
#[test]
fn test_get_cache_filename() {
let cache_location = if cfg!(target_os = "windows") {
PathBuf::from(r"C:\deno_dir\")
} else {
PathBuf::from("/deno_dir/")
};
let cache = DiskCache::new(RealSys, cache_location);
let mut test_cases = vec![
(
"http://deno.land/std/http/file_server.ts",
"http/deno.land/d8300752800fe3f0beda9505dc1c3b5388beb1ee45afd1f1e2c9fc0866df15cf",
),
(
"http://localhost:8000/std/http/file_server.ts",
"http/localhost_PORT8000/d8300752800fe3f0beda9505dc1c3b5388beb1ee45afd1f1e2c9fc0866df15cf",
),
(
"https://deno.land/std/http/file_server.ts",
"https/deno.land/d8300752800fe3f0beda9505dc1c3b5388beb1ee45afd1f1e2c9fc0866df15cf",
),
("wasm://wasm/d1c677ea", "wasm/wasm/d1c677ea"),
];
if cfg!(target_os = "windows") {
test_cases.push(("file:///D:/a/1/s/format.ts", "file/D/a/1/s/format.ts"));
// IPv4 localhost
test_cases.push((
"file://127.0.0.1/d$/a/1/s/format.ts",
"file/UNC/127.0.0.1/d$/a/1/s/format.ts",
));
// IPv6 localhost
test_cases.push((
"file://[0:0:0:0:0:0:0:1]/d$/a/1/s/format.ts",
"file/UNC/[__1]/d$/a/1/s/format.ts",
));
// shared folder
test_cases.push((
"file://comp/t-share/a/1/s/format.ts",
"file/UNC/comp/t-share/a/1/s/format.ts",
));
} else {
test_cases.push((
"file:///std/http/file_server.ts",
"file/std/http/file_server.ts",
));
}
for test_case in &test_cases {
let cache_filename =
cache.get_cache_filename(&Url::parse(test_case.0).unwrap());
assert_eq!(cache_filename, Some(PathBuf::from(test_case.1)));
}
}
#[test]
fn test_get_cache_filename_with_extension() {
let p = if cfg!(target_os = "windows") {
"C:\\foo"
} else {
"/foo"
};
let cache = DiskCache::new(RealSys, PathBuf::from(p));
let mut test_cases = vec![
(
"http://deno.land/std/http/file_server.ts",
"js",
"http/deno.land/d8300752800fe3f0beda9505dc1c3b5388beb1ee45afd1f1e2c9fc0866df15cf.js",
),
(
"http://deno.land/std/http/file_server.ts",
"js.map",
"http/deno.land/d8300752800fe3f0beda9505dc1c3b5388beb1ee45afd1f1e2c9fc0866df15cf.js.map",
),
];
if cfg!(target_os = "windows") {
test_cases.push((
"file:///D:/std/http/file_server",
"js",
"file/D/std/http/file_server.js",
));
} else {
test_cases.push((
"file:///std/http/file_server",
"js",
"file/std/http/file_server.js",
));
}
for test_case in &test_cases {
assert_eq!(
cache.get_cache_filename_with_extension(
&Url::parse(test_case.0).unwrap(),
test_case.1
),
Some(PathBuf::from(test_case.2))
)
}
}
#[test]
fn test_get_cache_filename_invalid_urls() {
let cache_location = if cfg!(target_os = "windows") {
PathBuf::from(r"C:\deno_dir\")
} else {
PathBuf::from("/deno_dir/")
};
let cache = DiskCache::new(RealSys, cache_location);
let mut test_cases = vec!["unknown://localhost/test.ts"];
if cfg!(target_os = "windows") {
test_cases.push("file://");
test_cases.push("file:///");
}
for test_case in &test_cases {
let cache_filename =
cache.get_cache_filename(&Url::parse(test_case).unwrap());
assert_eq!(cache_filename, None, "Failed for {:?}", test_case);
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/cache/parsed_source.rs | libs/resolver/cache/parsed_source.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use deno_ast::ParsedSource;
use deno_graph::ast::CapturingEsParser;
use deno_graph::ast::DefaultEsParser;
use deno_graph::ast::EsParser;
use deno_graph::ast::ParsedSourceStore;
use deno_media_type::MediaType;
use url::Url;
/// Lazily parses JS/TS sources from a `deno_graph::ModuleGraph` given
/// a `ParsedSourceCache`. Note that deno_graph doesn't necessarily cause
/// files to end up in the `ParsedSourceCache` because it might have all
/// the information it needs via caching in order to skip parsing.
#[derive(Clone, Copy)]
pub struct LazyGraphSourceParser<'a> {
cache: &'a ParsedSourceCache,
graph: &'a deno_graph::ModuleGraph,
}
impl<'a> LazyGraphSourceParser<'a> {
pub fn new(
cache: &'a ParsedSourceCache,
graph: &'a deno_graph::ModuleGraph,
) -> Self {
Self { cache, graph }
}
#[allow(clippy::result_large_err)]
pub fn get_or_parse_source(
&self,
module_specifier: &Url,
) -> Result<Option<ParsedSource>, deno_ast::ParseDiagnostic> {
let Some(deno_graph::Module::Js(module)) = self.graph.get(module_specifier)
else {
return Ok(None);
};
self
.cache
.get_parsed_source_from_js_module(module)
.map(Some)
}
}
#[allow(clippy::disallowed_types)] // ok because we always store source text as Arc<str>
type ArcStr = std::sync::Arc<str>;
#[allow(clippy::disallowed_types)]
pub type ParsedSourceCacheRc = deno_maybe_sync::MaybeArc<ParsedSourceCache>;
#[derive(Debug, Default)]
pub struct ParsedSourceCache {
sources: deno_maybe_sync::MaybeDashMap<Url, ParsedSource>,
}
impl ParsedSourceCache {
#[allow(clippy::result_large_err)]
pub fn get_parsed_source_from_js_module(
&self,
module: &deno_graph::JsModule,
) -> Result<ParsedSource, deno_ast::ParseDiagnostic> {
self.get_matching_parsed_source(
&module.specifier,
module.media_type,
module.source.text.clone(),
)
}
#[allow(clippy::result_large_err)]
pub fn get_matching_parsed_source(
&self,
specifier: &Url,
media_type: MediaType,
source: ArcStr,
) -> Result<ParsedSource, deno_ast::ParseDiagnostic> {
let parser = self.as_capturing_parser();
// this will conditionally parse because it's using a CapturingEsParser
parser.parse_program(deno_graph::ast::ParseOptions {
specifier,
source,
media_type,
scope_analysis: false,
})
}
#[allow(clippy::result_large_err, clippy::disallowed_types)]
pub fn remove_or_parse_module(
&self,
specifier: &Url,
media_type: MediaType,
source: ArcStr,
) -> Result<ParsedSource, deno_ast::ParseDiagnostic> {
if let Some(parsed_source) = self.remove_parsed_source(specifier)
&& parsed_source.media_type() == media_type
&& parsed_source.text().as_ref() == source.as_ref()
{
// note: message used tests
log::debug!("Removed parsed source: {}", specifier);
return Ok(parsed_source);
}
let options = deno_graph::ast::ParseOptions {
specifier,
source,
media_type,
scope_analysis: false,
};
DefaultEsParser.parse_program(options)
}
/// Frees the parsed source from memory.
pub fn free(&self, specifier: &Url) {
self.sources.remove(specifier);
}
/// Fress all parsed sources from memory.
pub fn free_all(&self) {
self.sources.clear();
}
/// Creates a parser that will reuse a ParsedSource from the store
/// if it exists, or else parse.
pub fn as_capturing_parser(&self) -> CapturingEsParser<'_> {
CapturingEsParser::new(None, self)
}
#[allow(clippy::len_without_is_empty)]
pub fn len(&self) -> usize {
self.sources.len()
}
}
/// It's ok that this is racy since in non-LSP situations
/// this will only ever store one form of a parsed source
/// and in LSP settings the concurrency will be enforced
/// at a higher level to ensure this will have the latest
/// parsed source.
impl ParsedSourceStore for ParsedSourceCache {
fn set_parsed_source(
&self,
specifier: Url,
parsed_source: ParsedSource,
) -> Option<ParsedSource> {
self.sources.insert(specifier, parsed_source)
}
fn get_parsed_source(&self, specifier: &Url) -> Option<ParsedSource> {
self.sources.get(specifier).map(|p| p.clone())
}
fn remove_parsed_source(&self, specifier: &Url) -> Option<ParsedSource> {
self.sources.remove(specifier).map(|(_, p)| p)
}
fn get_scope_analysis_parsed_source(
&self,
specifier: &Url,
) -> Option<ParsedSource> {
{
let parsed_source = self.sources.get(specifier)?;
if parsed_source.has_scope_analysis() {
return Some(parsed_source.clone());
}
}
// upgrade to have scope analysis
let (specifier, parsed_source) = self.sources.remove(specifier)?;
let parsed_source = parsed_source.into_with_scope_analysis();
self.sources.insert(specifier, parsed_source.clone());
Some(parsed_source)
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/cache/mod.rs | libs/resolver/cache/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
mod deno_dir;
mod disk_cache;
mod emit;
#[cfg(feature = "deno_ast")]
mod parsed_source;
pub use deno_dir::DenoDir;
pub use deno_dir::DenoDirOptions;
pub use deno_dir::DenoDirProvider;
pub use deno_dir::DenoDirProviderRc;
pub use deno_dir::DenoDirSys;
pub use disk_cache::DiskCache;
pub use disk_cache::DiskCacheSys;
pub use emit::EmitCache;
pub use emit::EmitCacheRc;
pub use emit::EmitCacheSys;
#[cfg(feature = "deno_ast")]
pub use parsed_source::LazyGraphSourceParser;
#[cfg(feature = "deno_ast")]
pub use parsed_source::ParsedSourceCache;
#[cfg(feature = "deno_ast")]
pub use parsed_source::ParsedSourceCacheRc;
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/cjs/mod.rs | libs/resolver/cjs/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use deno_maybe_sync::MaybeDashMap;
use deno_media_type::MediaType;
use node_resolver::InNpmPackageChecker;
use node_resolver::PackageJsonResolverRc;
use node_resolver::ResolutionMode;
use node_resolver::errors::PackageJsonLoadError;
use sys_traits::FsMetadata;
use sys_traits::FsRead;
use url::Url;
pub mod analyzer;
#[allow(clippy::disallowed_types)]
pub type CjsTrackerRc<TInNpmPackageChecker, TSys> =
deno_maybe_sync::MaybeArc<CjsTracker<TInNpmPackageChecker, TSys>>;
/// Keeps track of what module specifiers were resolved as CJS.
///
/// Modules that are `.js`, `.ts`, `.jsx`, and `tsx` are only known to
/// be CJS or ESM after they're loaded based on their contents. So these
/// files will be "maybe CJS" until they're loaded.
#[derive(Debug)]
pub struct CjsTracker<
TInNpmPackageChecker: InNpmPackageChecker,
TSys: FsRead + FsMetadata,
> {
is_cjs_resolver: IsCjsResolver<TInNpmPackageChecker, TSys>,
known: MaybeDashMap<Url, ResolutionMode>,
require_modules: Vec<Url>,
}
impl<TInNpmPackageChecker: InNpmPackageChecker, TSys: FsRead + FsMetadata>
CjsTracker<TInNpmPackageChecker, TSys>
{
pub fn new(
in_npm_pkg_checker: TInNpmPackageChecker,
pkg_json_resolver: PackageJsonResolverRc<TSys>,
mode: IsCjsResolutionMode,
require_modules: Vec<Url>,
) -> Self {
Self {
is_cjs_resolver: IsCjsResolver::new(
in_npm_pkg_checker,
pkg_json_resolver,
mode,
),
known: Default::default(),
require_modules,
}
}
/// Checks whether the file might be treated as CJS, but it's not for sure
/// yet because the source hasn't been loaded to see whether it contains
/// imports or exports.
pub fn is_maybe_cjs(
&self,
specifier: &Url,
media_type: MediaType,
) -> Result<bool, PackageJsonLoadError> {
self.treat_as_cjs_with_is_script(specifier, media_type, None)
}
/// Mark a file as being known CJS or ESM.
pub fn set_is_known_script(&self, specifier: &Url, is_script: bool) {
let new_value = if is_script {
ResolutionMode::Require
} else {
ResolutionMode::Import
};
// block to really ensure dashmap is not borrowed while trying to insert
{
if let Some(value) = self.known.get(specifier) {
// you shouldn't be insert a value in here that's
// already known and is a different value than what
// was previously determined
debug_assert_eq!(*value, new_value);
return;
}
}
self.known.insert(specifier.clone(), new_value);
}
/// Gets whether the file is CJS. If true, this is for sure
/// cjs because `is_script` is provided.
///
/// `is_script` should be `true` when the contents of the file at the
/// provided specifier are known to be a script and not an ES module.
pub fn is_cjs_with_known_is_script(
&self,
specifier: &Url,
media_type: MediaType,
is_script: bool,
) -> Result<bool, PackageJsonLoadError> {
self.treat_as_cjs_with_is_script(specifier, media_type, Some(is_script))
}
fn treat_as_cjs_with_is_script(
&self,
specifier: &Url,
media_type: MediaType,
is_script: Option<bool>,
) -> Result<bool, PackageJsonLoadError> {
let kind = match self
.get_known_mode_with_is_script(specifier, media_type, is_script)
{
Some(kind) => kind,
None => self.is_cjs_resolver.check_based_on_pkg_json(specifier)?,
};
Ok(kind == ResolutionMode::Require)
}
/// Gets the referrer for the specified module specifier.
///
/// Generally the referrer should already be tracked by calling
/// `is_cjs_with_known_is_script` before calling this method.
pub fn get_referrer_kind(&self, specifier: &Url) -> ResolutionMode {
if specifier.scheme() != "file" {
return ResolutionMode::Import;
}
self
.get_known_mode(specifier, MediaType::from_specifier(specifier))
.unwrap_or(ResolutionMode::Import)
}
fn get_known_mode(
&self,
specifier: &Url,
media_type: MediaType,
) -> Option<ResolutionMode> {
self.get_known_mode_with_is_script(specifier, media_type, None)
}
fn get_known_mode_with_is_script(
&self,
specifier: &Url,
media_type: MediaType,
is_script: Option<bool>,
) -> Option<ResolutionMode> {
let is_from_require = self.require_modules.contains(specifier);
self.is_cjs_resolver.get_known_mode_with_is_script(
specifier,
media_type,
is_script,
is_from_require,
&self.known,
)
}
}
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)]
pub enum IsCjsResolutionMode {
/// Requires an explicit `"type": "commonjs"` in the package.json.
ExplicitTypeCommonJs,
/// Implicitly uses `"type": "commonjs"` if no `"type"` is specified.
ImplicitTypeCommonJs,
/// Does not respect `"type": "commonjs"` and always treats ambiguous files as ESM.
#[default]
Disabled,
}
/// Resolves whether a module is CJS or ESM.
#[derive(Debug)]
pub struct IsCjsResolver<
TInNpmPackageChecker: InNpmPackageChecker,
TSys: FsRead + FsMetadata,
> {
in_npm_pkg_checker: TInNpmPackageChecker,
pkg_json_resolver: PackageJsonResolverRc<TSys>,
mode: IsCjsResolutionMode,
}
impl<TInNpmPackageChecker: InNpmPackageChecker, TSys: FsRead + FsMetadata>
IsCjsResolver<TInNpmPackageChecker, TSys>
{
pub fn new(
in_npm_pkg_checker: TInNpmPackageChecker,
pkg_json_resolver: PackageJsonResolverRc<TSys>,
mode: IsCjsResolutionMode,
) -> Self {
Self {
in_npm_pkg_checker,
pkg_json_resolver,
mode,
}
}
/// Gets the resolution mode for a module in the LSP.
pub fn get_lsp_resolution_mode(
&self,
specifier: &Url,
is_script: Option<bool>,
) -> ResolutionMode {
if specifier.scheme() != "file" {
return ResolutionMode::Import;
}
match MediaType::from_specifier(specifier) {
MediaType::Mts | MediaType::Mjs | MediaType::Dmts => ResolutionMode::Import,
MediaType::Cjs | MediaType::Cts | MediaType::Dcts => ResolutionMode::Require,
MediaType::Dts => {
// dts files are always determined based on the package.json because
// they contain imports/exports even when considered CJS
self.check_based_on_pkg_json(specifier).unwrap_or(ResolutionMode::Import)
}
MediaType::Wasm |
MediaType::Json => ResolutionMode::Import,
MediaType::JavaScript
| MediaType::Jsx
| MediaType::TypeScript
| MediaType::Tsx
// treat these as unknown
| MediaType::Css
| MediaType::Html
| MediaType::Jsonc
| MediaType::Json5
| MediaType::SourceMap
| MediaType::Sql
| MediaType::Unknown => {
match is_script {
Some(true) => self.check_based_on_pkg_json(specifier).unwrap_or(ResolutionMode::Import),
Some(false) | None => ResolutionMode::Import,
}
}
}
}
fn get_known_mode_with_is_script(
&self,
specifier: &Url,
media_type: MediaType,
is_script: Option<bool>,
is_from_require: bool,
known_cache: &MaybeDashMap<Url, ResolutionMode>,
) -> Option<ResolutionMode> {
if specifier.scheme() != "file" {
return Some(ResolutionMode::Import);
}
match media_type {
MediaType::Mts | MediaType::Mjs | MediaType::Dmts => Some(ResolutionMode::Import),
MediaType::Cjs | MediaType::Cts | MediaType::Dcts => Some(ResolutionMode::Require),
MediaType::Dts => {
// dts files are always determined based on the package.json because
// they contain imports/exports even when considered CJS
if let Some(value) = known_cache.get(specifier).map(|v| *v) {
Some(value)
} else {
let value = self.check_based_on_pkg_json(specifier).ok();
if let Some(value) = value {
known_cache.insert(specifier.clone(), value);
}
Some(value.unwrap_or(ResolutionMode::Import))
}
}
MediaType::Wasm |
MediaType::Json => Some(ResolutionMode::Import),
MediaType::JavaScript
| MediaType::Jsx
| MediaType::TypeScript
| MediaType::Tsx
// treat these as unknown
| MediaType::Css
| MediaType::Html
| MediaType::Jsonc
| MediaType::Json5
| MediaType::SourceMap
| MediaType::Sql
| MediaType::Unknown => {
if is_from_require {
return Some(ResolutionMode::Require);
}
if let Some(value) = known_cache.get(specifier).map(|v| *v) {
if value == ResolutionMode::Require && is_script == Some(false) {
// we now know this is actually esm
known_cache.insert(specifier.clone(), ResolutionMode::Import);
Some(ResolutionMode::Import)
} else {
Some(value)
}
} else if is_script == Some(false) {
// we know this is esm
known_cache.insert(specifier.clone(), ResolutionMode::Import);
Some(ResolutionMode::Import)
} else {
None
}
}
}
}
fn check_based_on_pkg_json(
&self,
specifier: &Url,
) -> Result<ResolutionMode, PackageJsonLoadError> {
if self.in_npm_pkg_checker.in_npm_package(specifier) {
let Ok(path) = deno_path_util::url_to_file_path(specifier) else {
return Ok(ResolutionMode::Require);
};
if let Some(pkg_json) =
self.pkg_json_resolver.get_closest_package_json(&path)?
{
let is_file_location_cjs = pkg_json.typ != "module";
Ok(if is_file_location_cjs || path.extension().is_none() {
ResolutionMode::Require
} else {
ResolutionMode::Import
})
} else {
Ok(ResolutionMode::Require)
}
} else if self.mode != IsCjsResolutionMode::Disabled {
let Ok(path) = deno_path_util::url_to_file_path(specifier) else {
return Ok(ResolutionMode::Import);
};
if let Some(pkg_json) =
self.pkg_json_resolver.get_closest_package_json(&path)?
{
let is_cjs_type = pkg_json.typ == "commonjs"
|| self.mode == IsCjsResolutionMode::ImplicitTypeCommonJs
&& pkg_json.typ == "none";
Ok(if is_cjs_type {
ResolutionMode::Require
} else {
ResolutionMode::Import
})
} else if self.mode == IsCjsResolutionMode::ImplicitTypeCommonJs {
Ok(ResolutionMode::Require)
} else {
Ok(ResolutionMode::Import)
}
} else {
Ok(ResolutionMode::Import)
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/cjs/analyzer/deno_ast.rs | libs/resolver/cjs/analyzer/deno_ast.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use deno_ast::MediaType;
use deno_ast::ParsedSource;
use deno_error::JsErrorBox;
use deno_graph::ast::ParsedSourceStore;
use url::Url;
use super::ModuleExportAnalyzer;
use crate::cache::ParsedSourceCacheRc;
pub struct DenoAstModuleExportAnalyzer {
parsed_source_cache: ParsedSourceCacheRc,
}
impl DenoAstModuleExportAnalyzer {
pub fn new(parsed_source_cache: ParsedSourceCacheRc) -> Self {
Self {
parsed_source_cache,
}
}
}
#[allow(clippy::disallowed_types)]
type ArcStr = std::sync::Arc<str>;
impl ModuleExportAnalyzer for DenoAstModuleExportAnalyzer {
fn parse_module(
&self,
specifier: Url,
media_type: MediaType,
source: ArcStr,
) -> Result<Box<dyn super::ModuleForExportAnalysis>, JsErrorBox> {
let maybe_parsed_source =
self.parsed_source_cache.remove_parsed_source(&specifier);
let parsed_source = maybe_parsed_source
.map(Ok)
.unwrap_or_else(|| {
deno_ast::parse_program(deno_ast::ParseParams {
specifier,
text: source,
media_type,
capture_tokens: true,
scope_analysis: false,
maybe_syntax: None,
})
})
.map_err(JsErrorBox::from_err)?;
Ok(Box::new(parsed_source))
}
}
impl super::ModuleForExportAnalysis for ParsedSource {
fn specifier(&self) -> &Url {
self.specifier()
}
fn compute_is_script(&self) -> bool {
self.compute_is_script()
}
fn analyze_cjs(&self) -> super::ModuleExportsAndReExports {
let analysis = ParsedSource::analyze_cjs(self);
super::ModuleExportsAndReExports {
exports: analysis.exports,
reexports: analysis.reexports,
}
}
fn analyze_es_runtime_exports(&self) -> super::ModuleExportsAndReExports {
let analysis = ParsedSource::analyze_es_runtime_exports(self);
super::ModuleExportsAndReExports {
exports: analysis.exports,
reexports: analysis.reexports,
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/cjs/analyzer/mod.rs | libs/resolver/cjs/analyzer/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use deno_error::JsErrorBox;
use deno_maybe_sync::MaybeSend;
use deno_maybe_sync::MaybeSync;
use deno_media_type::MediaType;
use node_resolver::analyze::CjsAnalysis as ExtNodeCjsAnalysis;
use node_resolver::analyze::CjsAnalysisExports;
use node_resolver::analyze::CjsCodeAnalyzer;
use node_resolver::analyze::EsmAnalysisMode;
use serde::Deserialize;
use serde::Serialize;
use url::Url;
use super::CjsTrackerRc;
use crate::npm::DenoInNpmPackageChecker;
#[cfg(feature = "deno_ast")]
mod deno_ast;
#[cfg(feature = "deno_ast")]
pub use deno_ast::DenoAstModuleExportAnalyzer;
#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct ModuleExportsAndReExports {
pub exports: Vec<String>,
pub reexports: Vec<String>,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum DenoCjsAnalysis {
/// The module was found to be an ES module.
Esm,
/// The module was found to be an ES module and
/// it was analyzed for imports and exports.
EsmAnalysis(ModuleExportsAndReExports),
/// The module was CJS.
Cjs(ModuleExportsAndReExports),
}
#[derive(Debug, Copy, Clone)]
pub struct NodeAnalysisCacheSourceHash(pub u64);
#[allow(clippy::disallowed_types)]
pub type NodeAnalysisCacheRc = deno_maybe_sync::MaybeArc<dyn NodeAnalysisCache>;
pub trait NodeAnalysisCache: MaybeSend + MaybeSync {
fn compute_source_hash(&self, source: &str) -> NodeAnalysisCacheSourceHash;
fn get_cjs_analysis(
&self,
specifier: &Url,
source_hash: NodeAnalysisCacheSourceHash,
) -> Option<DenoCjsAnalysis>;
fn set_cjs_analysis(
&self,
specifier: &Url,
source_hash: NodeAnalysisCacheSourceHash,
analysis: &DenoCjsAnalysis,
);
}
pub struct NullNodeAnalysisCache;
impl NodeAnalysisCache for NullNodeAnalysisCache {
fn compute_source_hash(&self, _source: &str) -> NodeAnalysisCacheSourceHash {
NodeAnalysisCacheSourceHash(0)
}
fn get_cjs_analysis(
&self,
_specifier: &Url,
_source_hash: NodeAnalysisCacheSourceHash,
) -> Option<DenoCjsAnalysis> {
None
}
fn set_cjs_analysis(
&self,
_specifier: &Url,
_source_hash: NodeAnalysisCacheSourceHash,
_analysis: &DenoCjsAnalysis,
) {
}
}
#[sys_traits::auto_impl]
pub trait DenoCjsCodeAnalyzerSys:
sys_traits::FsRead + sys_traits::FsMetadata + MaybeSend + MaybeSync + 'static
{
}
pub trait ModuleForExportAnalysis {
fn specifier(&self) -> &Url;
fn compute_is_script(&self) -> bool;
fn analyze_cjs(&self) -> ModuleExportsAndReExports;
fn analyze_es_runtime_exports(&self) -> ModuleExportsAndReExports;
}
#[allow(clippy::disallowed_types)]
pub type ModuleExportAnalyzerRc =
deno_maybe_sync::MaybeArc<dyn ModuleExportAnalyzer>;
#[allow(clippy::disallowed_types)]
type ArcStr = std::sync::Arc<str>;
pub trait ModuleExportAnalyzer: MaybeSend + MaybeSync {
fn parse_module(
&self,
specifier: Url,
media_type: MediaType,
source: ArcStr,
) -> Result<Box<dyn ModuleForExportAnalysis>, JsErrorBox>;
}
/// A module export analyzer that will error when parsing a module.
pub struct NotImplementedModuleExportAnalyzer;
impl ModuleExportAnalyzer for NotImplementedModuleExportAnalyzer {
fn parse_module(
&self,
_specifier: Url,
_media_type: MediaType,
_source: ArcStr,
) -> Result<Box<dyn ModuleForExportAnalysis>, JsErrorBox> {
panic!("Enable the deno_ast feature to get module export analysis.");
}
}
#[allow(clippy::disallowed_types)]
pub type DenoCjsCodeAnalyzerRc<TSys> =
deno_maybe_sync::MaybeArc<DenoCjsCodeAnalyzer<TSys>>;
pub struct DenoCjsCodeAnalyzer<TSys: DenoCjsCodeAnalyzerSys> {
cache: NodeAnalysisCacheRc,
cjs_tracker: CjsTrackerRc<DenoInNpmPackageChecker, TSys>,
module_export_analyzer: ModuleExportAnalyzerRc,
sys: TSys,
}
impl<TSys: DenoCjsCodeAnalyzerSys> DenoCjsCodeAnalyzer<TSys> {
pub fn new(
cache: NodeAnalysisCacheRc,
cjs_tracker: CjsTrackerRc<DenoInNpmPackageChecker, TSys>,
module_export_analyzer: ModuleExportAnalyzerRc,
sys: TSys,
) -> Self {
Self {
cache,
cjs_tracker,
module_export_analyzer,
sys,
}
}
async fn inner_cjs_analysis(
&self,
specifier: &Url,
source: &str,
esm_analysis_mode: EsmAnalysisMode,
) -> Result<DenoCjsAnalysis, JsErrorBox> {
let source = source.strip_prefix('\u{FEFF}').unwrap_or(source); // strip BOM
let source_hash = self.cache.compute_source_hash(source);
if let Some(analysis) = self.cache.get_cjs_analysis(specifier, source_hash)
{
return Ok(analysis);
}
let media_type = MediaType::from_specifier(specifier);
if media_type == MediaType::Json {
return Ok(DenoCjsAnalysis::Cjs(Default::default()));
}
let cjs_tracker = self.cjs_tracker.clone();
let is_maybe_cjs = cjs_tracker
.is_maybe_cjs(specifier, media_type)
.map_err(JsErrorBox::from_err)?;
let analysis = if is_maybe_cjs
|| esm_analysis_mode == EsmAnalysisMode::SourceImportsAndExports
{
let module_export_analyzer = self.module_export_analyzer.clone();
let analyze = {
let specifier = specifier.clone();
let source: ArcStr = source.into();
move || -> Result<_, JsErrorBox> {
let parsed_source = module_export_analyzer
.parse_module(specifier, media_type, source)?;
let is_script = is_maybe_cjs && parsed_source.compute_is_script();
let is_cjs = is_maybe_cjs
&& cjs_tracker
.is_cjs_with_known_is_script(
parsed_source.specifier(),
media_type,
is_script,
)
.map_err(JsErrorBox::from_err)?;
if is_cjs {
let analysis = parsed_source.analyze_cjs();
Ok(DenoCjsAnalysis::Cjs(analysis))
} else {
match esm_analysis_mode {
EsmAnalysisMode::SourceOnly => Ok(DenoCjsAnalysis::Esm),
EsmAnalysisMode::SourceImportsAndExports => {
Ok(DenoCjsAnalysis::EsmAnalysis(
parsed_source.analyze_es_runtime_exports(),
))
}
}
}
}
};
#[cfg(feature = "sync")]
{
crate::rt::spawn_blocking(analyze).await.unwrap()?
}
#[cfg(not(feature = "sync"))]
analyze()?
} else {
DenoCjsAnalysis::Esm
};
self
.cache
.set_cjs_analysis(specifier, source_hash, &analysis);
Ok(analysis)
}
}
#[async_trait::async_trait(?Send)]
impl<TSys: DenoCjsCodeAnalyzerSys> CjsCodeAnalyzer
for DenoCjsCodeAnalyzer<TSys>
{
async fn analyze_cjs<'a>(
&self,
specifier: &Url,
source: Option<Cow<'a, str>>,
esm_analysis_mode: EsmAnalysisMode,
) -> Result<ExtNodeCjsAnalysis<'a>, JsErrorBox> {
let source = match source {
Some(source) => source,
None => {
if let Ok(path) = deno_path_util::url_to_file_path(specifier) {
if let Ok(source_from_file) = self.sys.fs_read_to_string_lossy(path) {
source_from_file
} else {
return Ok(ExtNodeCjsAnalysis::Cjs(CjsAnalysisExports {
exports: vec![],
reexports: vec![],
}));
}
} else {
return Ok(ExtNodeCjsAnalysis::Cjs(CjsAnalysisExports {
exports: vec![],
reexports: vec![],
}));
}
}
};
let analysis = self
.inner_cjs_analysis(specifier, &source, esm_analysis_mode)
.await?;
match analysis {
DenoCjsAnalysis::Esm => Ok(ExtNodeCjsAnalysis::Esm(source, None)),
DenoCjsAnalysis::EsmAnalysis(analysis) => Ok(ExtNodeCjsAnalysis::Esm(
source,
Some(CjsAnalysisExports {
exports: analysis.exports,
reexports: analysis.reexports,
}),
)),
DenoCjsAnalysis::Cjs(analysis) => {
Ok(ExtNodeCjsAnalysis::Cjs(CjsAnalysisExports {
exports: analysis.exports,
reexports: analysis.reexports,
}))
}
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/npm/local.rs | libs/resolver/npm/local.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use deno_cache_dir::npm::mixed_case_package_name_decode;
use deno_npm::NpmPackageCacheFolderId;
use deno_semver::StackString;
use deno_semver::package::PackageNv;
#[inline]
pub fn get_package_folder_id_folder_name(
folder_id: &NpmPackageCacheFolderId,
) -> String {
get_package_folder_id_folder_name_from_parts(
&folder_id.nv,
folder_id.copy_index,
)
}
pub fn get_package_folder_id_folder_name_from_parts(
nv: &PackageNv,
copy_index: u8,
) -> String {
let copy_str = if copy_index == 0 {
Cow::Borrowed("")
} else {
Cow::Owned(format!("_{}", copy_index))
};
let name = normalize_pkg_name_for_node_modules_deno_folder(&nv.name);
format!("{}@{}{}", name, nv.version, copy_str)
}
pub fn get_package_folder_id_from_folder_name(
folder_name: &str,
) -> Option<NpmPackageCacheFolderId> {
let folder_name = folder_name.replace('+', "/");
let (name, ending) = folder_name.rsplit_once('@')?;
let name: StackString = if let Some(encoded_name) = name.strip_prefix('_') {
StackString::from_string(mixed_case_package_name_decode(encoded_name)?)
} else {
name.into()
};
let (raw_version, copy_index) = match ending.split_once('_') {
Some((raw_version, copy_index)) => {
let copy_index = copy_index.parse::<u8>().ok()?;
(raw_version, copy_index)
}
None => (ending, 0),
};
let version = deno_semver::Version::parse_from_npm(raw_version).ok()?;
Some(NpmPackageCacheFolderId {
nv: PackageNv { name, version },
copy_index,
})
}
/// Normalizes a package name for use at `node_modules/.deno/<pkg-name>@<version>[_<copy_index>]`
pub fn normalize_pkg_name_for_node_modules_deno_folder(
name: &str,
) -> Cow<'_, str> {
let name = if name.to_lowercase() == name {
Cow::Borrowed(name)
} else {
Cow::Owned(format!("_{}", mixed_case_package_name_encode(name)))
};
if name.starts_with('@') {
name.replace('/', "+").into()
} else {
name
}
}
fn mixed_case_package_name_encode(name: &str) -> String {
// use base32 encoding because it's reversible and the character set
// only includes the characters within 0-9 and A-Z so it can be lower cased
base32::encode(
base32::Alphabet::Rfc4648Lower { padding: false },
name.as_bytes(),
)
.to_lowercase()
}
#[cfg(test)]
mod test {
use deno_npm::NpmPackageCacheFolderId;
use deno_semver::package::PackageNv;
use super::*;
#[test]
fn test_get_package_folder_id_folder_name() {
let cases = vec![
(
NpmPackageCacheFolderId {
nv: PackageNv::from_str("@types/foo@1.2.3").unwrap(),
copy_index: 1,
},
"@types+foo@1.2.3_1".to_string(),
),
(
NpmPackageCacheFolderId {
nv: PackageNv::from_str("JSON@3.2.1").unwrap(),
copy_index: 0,
},
"_jjju6tq@3.2.1".to_string(),
),
];
for (input, output) in cases {
assert_eq!(get_package_folder_id_folder_name(&input), output);
let folder_id = get_package_folder_id_from_folder_name(&output).unwrap();
assert_eq!(folder_id, input);
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/npm/byonm.rs | libs/resolver/npm/byonm.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::path::Path;
use std::path::PathBuf;
use deno_package_json::PackageJson;
use deno_package_json::PackageJsonDepValue;
use deno_package_json::PackageJsonRc;
use deno_path_util::url_to_file_path;
use deno_semver::StackString;
use deno_semver::Version;
use deno_semver::package::PackageReq;
use node_resolver::InNpmPackageChecker;
use node_resolver::NpmPackageFolderResolver;
use node_resolver::PackageJsonResolverRc;
use node_resolver::UrlOrPathRef;
use node_resolver::cache::NodeResolutionSys;
use node_resolver::errors::PackageFolderResolveError;
use node_resolver::errors::PackageFolderResolveIoError;
use node_resolver::errors::PackageJsonLoadError;
use node_resolver::errors::PackageNotFoundError;
use sys_traits::FsCanonicalize;
use sys_traits::FsDirEntry;
use sys_traits::FsMetadata;
use sys_traits::FsRead;
use sys_traits::FsReadDir;
use thiserror::Error;
use url::Url;
use super::local::normalize_pkg_name_for_node_modules_deno_folder;
#[derive(Debug, Error, deno_error::JsError)]
pub enum ByonmResolvePkgFolderFromDenoReqError {
#[class(generic)]
#[error("Could not find \"{}\" in a node_modules folder. Deno expects the node_modules/ directory to be up to date. Did you forget to run `deno install`?", .0)]
MissingAlias(StackString),
#[class(inherit)]
#[error(transparent)]
PackageJson(#[from] PackageJsonLoadError),
#[class(generic)]
#[error("Could not find a matching package for 'npm:{}' in the node_modules directory. Ensure you have all your JSR and npm dependencies listed in your deno.json or package.json, then run `deno install`. Alternatively, turn on auto-install by specifying `\"nodeModulesDir\": \"auto\"` in your deno.json file.", .0)]
UnmatchedReq(PackageReq),
#[class(inherit)]
#[error(transparent)]
Io(#[from] std::io::Error),
#[class(generic)]
#[error("JSR specifiers are not supported in package.json: {req}")]
JsrReqUnsupported { req: PackageReq },
}
pub struct ByonmNpmResolverCreateOptions<TSys: FsRead + FsMetadata> {
// todo(dsherret): investigate removing this
pub root_node_modules_dir: Option<PathBuf>,
pub sys: NodeResolutionSys<TSys>,
pub pkg_json_resolver: PackageJsonResolverRc<TSys>,
}
#[sys_traits::auto_impl]
pub trait ByonmNpmResolverSys:
FsCanonicalize + FsRead + FsMetadata + FsReadDir
{
}
#[allow(clippy::disallowed_types)]
pub type ByonmNpmResolverRc<TSys> =
deno_maybe_sync::MaybeArc<ByonmNpmResolver<TSys>>;
#[derive(Debug)]
pub struct ByonmNpmResolver<TSys: ByonmNpmResolverSys> {
sys: NodeResolutionSys<TSys>,
pkg_json_resolver: PackageJsonResolverRc<TSys>,
root_node_modules_dir: Option<PathBuf>,
}
impl<TSys: ByonmNpmResolverSys + Clone> Clone for ByonmNpmResolver<TSys> {
fn clone(&self) -> Self {
Self {
sys: self.sys.clone(),
pkg_json_resolver: self.pkg_json_resolver.clone(),
root_node_modules_dir: self.root_node_modules_dir.clone(),
}
}
}
impl<TSys: ByonmNpmResolverSys> ByonmNpmResolver<TSys> {
pub fn new(options: ByonmNpmResolverCreateOptions<TSys>) -> Self {
Self {
root_node_modules_dir: options.root_node_modules_dir,
sys: options.sys,
pkg_json_resolver: options.pkg_json_resolver,
}
}
pub fn root_node_modules_path(&self) -> Option<&Path> {
self.root_node_modules_dir.as_deref()
}
/// Finds the ancestor package.json that contains the specified dependency.
pub fn find_ancestor_package_json_with_dep(
&self,
dep_name: &str,
referrer: &Url,
) -> Option<PackageJsonRc> {
let referrer_path = url_to_file_path(referrer).ok()?;
for result in self
.pkg_json_resolver
.get_closest_package_jsons(&referrer_path)
{
let Ok(pkg_json) = result else {
continue;
};
if let Some(deps) = &pkg_json.dependencies
&& deps.contains_key(dep_name)
{
return Some(pkg_json);
}
if let Some(deps) = &pkg_json.dev_dependencies
&& deps.contains_key(dep_name)
{
return Some(pkg_json);
}
}
None
}
pub fn resolve_pkg_folder_from_deno_module_req(
&self,
req: &PackageReq,
referrer: &Url,
) -> Result<PathBuf, ByonmResolvePkgFolderFromDenoReqError> {
fn node_resolve_dir<TSys: FsCanonicalize + FsMetadata>(
sys: &NodeResolutionSys<TSys>,
alias: &str,
start_dir: &Path,
) -> std::io::Result<Option<PathBuf>> {
for ancestor in start_dir.ancestors() {
let node_modules_folder = ancestor.join("node_modules");
let sub_dir = join_package_name(Cow::Owned(node_modules_folder), alias);
if sys.is_dir(&sub_dir) {
return Ok(Some(
deno_path_util::fs::canonicalize_path_maybe_not_exists(
sys, &sub_dir,
)?,
));
}
}
Ok(None)
}
// now attempt to resolve if it's found in any package.json
let maybe_pkg_json_and_alias =
self.resolve_pkg_json_and_alias_for_req(req, referrer)?;
match maybe_pkg_json_and_alias {
Some((pkg_json, alias)) => {
// now try node resolution
if let Some(resolved) =
node_resolve_dir(&self.sys, &alias, pkg_json.dir_path())?
{
return Ok(resolved);
}
Err(ByonmResolvePkgFolderFromDenoReqError::MissingAlias(alias))
}
None => {
// now check if node_modules/.deno/ matches this constraint
if let Some(folder) = self.resolve_folder_in_root_node_modules(req) {
return Ok(folder);
}
Err(ByonmResolvePkgFolderFromDenoReqError::UnmatchedReq(
req.clone(),
))
}
}
}
fn resolve_pkg_json_and_alias_for_req(
&self,
req: &PackageReq,
referrer: &Url,
) -> Result<
Option<(PackageJsonRc, StackString)>,
ByonmResolvePkgFolderFromDenoReqError,
> {
fn resolve_alias_from_pkg_json(
req: &PackageReq,
pkg_json: &PackageJson,
) -> Result<Option<StackString>, ByonmResolvePkgFolderFromDenoReqError>
{
let deps = pkg_json.resolve_local_package_json_deps();
for (key, value) in
deps.dependencies.iter().chain(deps.dev_dependencies.iter())
{
if let Ok(value) = value {
match value {
PackageJsonDepValue::File(_) => {
// skip
}
PackageJsonDepValue::JsrReq(req) => {
return Err(
ByonmResolvePkgFolderFromDenoReqError::JsrReqUnsupported {
req: req.clone(),
},
);
}
PackageJsonDepValue::Req(dep_req) => {
if dep_req.name == req.name
&& dep_req.version_req.intersects(&req.version_req)
{
return Ok(Some(key.clone()));
}
}
PackageJsonDepValue::Workspace(_workspace) => {
if key.as_str() == req.name
&& req.version_req.tag() == Some("workspace")
{
return Ok(Some(key.clone()));
}
}
}
}
}
Ok(None)
}
// attempt to resolve the npm specifier from the referrer's package.json,
let maybe_referrer_path = url_to_file_path(referrer).ok();
if let Some(file_path) = maybe_referrer_path {
for result in self.pkg_json_resolver.get_closest_package_jsons(&file_path)
{
let pkg_json = result?;
if let Some(alias) =
resolve_alias_from_pkg_json(req, pkg_json.as_ref())?
{
return Ok(Some((pkg_json, alias)));
}
}
}
// fall fallback to the project's package.json
if let Some(root_node_modules_dir) = &self.root_node_modules_dir {
let root_pkg_json_path =
root_node_modules_dir.parent().unwrap().join("package.json");
if let Some(pkg_json) = self
.pkg_json_resolver
.load_package_json(&root_pkg_json_path)?
&& let Some(alias) =
resolve_alias_from_pkg_json(req, pkg_json.as_ref())?
{
return Ok(Some((pkg_json, alias)));
}
}
// now try to resolve based on the closest node_modules directory
let maybe_referrer_path = url_to_file_path(referrer).ok();
let search_node_modules = |node_modules: &Path| {
if req.version_req.tag().is_some() {
return None;
}
let pkg_folder = node_modules.join(&req.name);
if let Ok(Some(dep_pkg_json)) = self
.pkg_json_resolver
.load_package_json(&pkg_folder.join("package.json"))
&& dep_pkg_json.name.as_deref() == Some(req.name.as_str())
{
let matches_req = dep_pkg_json
.version
.as_ref()
.and_then(|v| Version::parse_from_npm(v).ok())
.map(|version| req.version_req.matches(&version))
.unwrap_or(true);
if matches_req {
return Some((dep_pkg_json, req.name.clone()));
}
}
None
};
if let Some(file_path) = &maybe_referrer_path {
for dir_path in file_path.as_path().ancestors().skip(1) {
if let Some(result) =
search_node_modules(&dir_path.join("node_modules"))
{
return Ok(Some(result));
}
}
}
// and finally check the root node_modules directory
if let Some(root_node_modules_dir) = &self.root_node_modules_dir {
let already_searched = maybe_referrer_path
.as_ref()
.and_then(|referrer_path| {
root_node_modules_dir
.parent()
.map(|root_dir| referrer_path.starts_with(root_dir))
})
.unwrap_or(false);
if !already_searched
&& let Some(result) = search_node_modules(root_node_modules_dir)
{
return Ok(Some(result));
}
}
Ok(None)
}
fn resolve_folder_in_root_node_modules(
&self,
req: &PackageReq,
) -> Option<PathBuf> {
// now check if node_modules/.deno/ matches this constraint
let root_node_modules_dir = self.root_node_modules_dir.as_ref()?;
let node_modules_deno_dir = root_node_modules_dir.join(".deno");
let Ok(entries) = self.sys.fs_read_dir(&node_modules_deno_dir) else {
return None;
};
let search_prefix = format!(
"{}@",
normalize_pkg_name_for_node_modules_deno_folder(&req.name)
);
let mut best_version = None;
// example entries:
// - @denotest+add@1.0.0
// - @denotest+add@1.0.0_1
for entry in entries {
let Ok(entry) = entry else {
continue;
};
let Ok(file_type) = entry.file_type() else {
continue;
};
if !file_type.is_dir() {
continue;
}
let entry_name = entry.file_name().to_string_lossy().into_owned();
let Some(version_and_copy_idx) = entry_name.strip_prefix(&search_prefix)
else {
continue;
};
let version = version_and_copy_idx
.rsplit_once('_')
.map(|(v, _)| v)
.unwrap_or(version_and_copy_idx);
let Ok(version) = Version::parse_from_npm(version) else {
continue;
};
if let Some(tag) = req.version_req.tag() {
let initialized_file =
node_modules_deno_dir.join(&entry_name).join(".initialized");
let Ok(contents) = self.sys.fs_read_to_string_lossy(&initialized_file)
else {
continue;
};
let mut tags = contents.split(',').map(str::trim);
if tags.any(|t| t == tag) {
if let Some((best_version_version, _)) = &best_version {
if version > *best_version_version {
best_version = Some((version, entry_name));
}
} else {
best_version = Some((version, entry_name));
}
}
} else if req.version_req.matches(&version) {
if let Some((best_version_version, _)) = &best_version {
if version > *best_version_version {
best_version = Some((version, entry_name));
}
} else {
best_version = Some((version, entry_name));
}
}
}
best_version.map(|(_version, entry_name)| {
join_package_name(
Cow::Owned(node_modules_deno_dir.join(entry_name).join("node_modules")),
&req.name,
)
})
}
}
impl<TSys: FsCanonicalize + FsMetadata + FsRead + FsReadDir>
NpmPackageFolderResolver for ByonmNpmResolver<TSys>
{
fn resolve_package_folder_from_package(
&self,
name: &str,
referrer: &UrlOrPathRef,
) -> Result<PathBuf, PackageFolderResolveError> {
fn inner<TSys: FsMetadata>(
sys: &NodeResolutionSys<TSys>,
name: &str,
referrer: &UrlOrPathRef,
) -> Result<PathBuf, PackageFolderResolveError> {
let maybe_referrer_file = referrer.path().ok();
let maybe_start_folder =
maybe_referrer_file.as_ref().and_then(|f| f.parent());
if let Some(start_folder) = maybe_start_folder {
for current_folder in start_folder.ancestors() {
let node_modules_folder = if current_folder.ends_with("node_modules")
{
Cow::Borrowed(current_folder)
} else {
Cow::Owned(current_folder.join("node_modules"))
};
let sub_dir = join_package_name(node_modules_folder, name);
if sys.is_dir(&sub_dir) {
return Ok(sub_dir);
}
}
}
Err(
PackageNotFoundError {
package_name: name.to_string(),
referrer: referrer.display(),
referrer_extra: None,
}
.into(),
)
}
let path = inner(&self.sys, name, referrer)?;
self.sys.fs_canonicalize(&path).map_err(|err| {
PackageFolderResolveIoError {
package_name: name.to_string(),
referrer: referrer.display(),
source: err,
}
.into()
})
}
fn resolve_types_package_folder(
&self,
types_package_name: &str,
_maybe_package_version: Option<&Version>,
maybe_referrer: Option<&UrlOrPathRef>,
) -> Option<PathBuf> {
self
.resolve_package_folder_from_package(types_package_name, maybe_referrer?)
.ok()
}
}
#[derive(Debug, Clone)]
pub struct ByonmInNpmPackageChecker;
impl InNpmPackageChecker for ByonmInNpmPackageChecker {
fn in_npm_package(&self, specifier: &Url) -> bool {
specifier.scheme() == "file"
&& specifier
.path()
.to_ascii_lowercase()
.contains("/node_modules/")
}
}
fn join_package_name(mut path: Cow<'_, Path>, package_name: &str) -> PathBuf {
// ensure backslashes are used on windows
for part in package_name.split('/') {
match path {
Cow::Borrowed(inner) => path = Cow::Owned(inner.join(part)),
Cow::Owned(ref mut path) => {
path.push(part);
}
}
}
path.into_owned()
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/npm/mod.rs | libs/resolver/npm/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::fmt::Debug;
use std::path::Path;
use std::path::PathBuf;
use boxed_error::Boxed;
use deno_error::JsError;
use deno_maybe_sync::MaybeSend;
use deno_maybe_sync::MaybeSync;
use deno_maybe_sync::new_rc;
use deno_semver::Version;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq;
use deno_semver::package::PackageReqReference;
use node_resolver::InNpmPackageChecker;
use node_resolver::IsBuiltInNodeModuleChecker;
use node_resolver::NodeResolution;
use node_resolver::NodeResolutionKind;
use node_resolver::NodeResolverRc;
use node_resolver::NpmPackageFolderResolver;
use node_resolver::ResolutionMode;
use node_resolver::UrlOrPath;
use node_resolver::UrlOrPathRef;
use node_resolver::errors::NodeJsErrorCode;
use node_resolver::errors::NodeJsErrorCoded;
use node_resolver::errors::NodeResolveError;
use node_resolver::errors::NodeResolveErrorKind;
use node_resolver::errors::PackageFolderResolveErrorKind;
use node_resolver::errors::PackageFolderResolveIoError;
use node_resolver::errors::PackageNotFoundError;
use node_resolver::errors::PackageResolveErrorKind;
use node_resolver::errors::PackageSubpathFromDenoModuleResolveError;
use node_resolver::errors::TypesNotFoundError;
use thiserror::Error;
use url::Url;
pub use self::byonm::ByonmInNpmPackageChecker;
pub use self::byonm::ByonmNpmResolver;
pub use self::byonm::ByonmNpmResolverCreateOptions;
pub use self::byonm::ByonmNpmResolverRc;
pub use self::byonm::ByonmResolvePkgFolderFromDenoReqError;
pub use self::local::get_package_folder_id_folder_name;
pub use self::local::normalize_pkg_name_for_node_modules_deno_folder;
use self::managed::ManagedInNpmPackageChecker;
use self::managed::ManagedInNpmPkgCheckerCreateOptions;
pub use self::managed::ManagedNpmResolver;
use self::managed::ManagedNpmResolverCreateOptions;
pub use self::managed::ManagedNpmResolverRc;
use self::managed::create_managed_in_npm_pkg_checker;
mod byonm;
mod local;
pub mod managed;
#[derive(Debug)]
pub enum CreateInNpmPkgCheckerOptions<'a> {
Managed(ManagedInNpmPkgCheckerCreateOptions<'a>),
Byonm,
}
#[derive(Debug, Clone)]
pub enum DenoInNpmPackageChecker {
Managed(ManagedInNpmPackageChecker),
Byonm(ByonmInNpmPackageChecker),
}
impl DenoInNpmPackageChecker {
pub fn new(options: CreateInNpmPkgCheckerOptions) -> Self {
match options {
CreateInNpmPkgCheckerOptions::Managed(options) => {
DenoInNpmPackageChecker::Managed(create_managed_in_npm_pkg_checker(
options,
))
}
CreateInNpmPkgCheckerOptions::Byonm => {
DenoInNpmPackageChecker::Byonm(ByonmInNpmPackageChecker)
}
}
}
}
impl InNpmPackageChecker for DenoInNpmPackageChecker {
fn in_npm_package(&self, specifier: &Url) -> bool {
match self {
DenoInNpmPackageChecker::Managed(c) => c.in_npm_package(specifier),
DenoInNpmPackageChecker::Byonm(c) => c.in_npm_package(specifier),
}
}
}
#[derive(Debug, Error, JsError)]
#[class(generic)]
#[error(
"Could not resolve \"{}\", but found it in a package.json. Deno expects the node_modules/ directory to be up to date. Did you forget to run `deno install`?",
specifier
)]
pub struct NodeModulesOutOfDateError {
pub specifier: String,
}
#[derive(Debug, Error, JsError)]
#[class(generic)]
#[error("Could not find '{}'. Deno expects the node_modules/ directory to be up to date. Did you forget to run `deno install`?", package_json_path.display())]
pub struct MissingPackageNodeModulesFolderError {
pub package_json_path: PathBuf,
// Don't bother displaying this error, so don't name it "source"
pub inner: PackageSubpathFromDenoModuleResolveError,
}
#[derive(Debug, Boxed, JsError)]
pub struct ResolveIfForNpmPackageError(
pub Box<ResolveIfForNpmPackageErrorKind>,
);
#[derive(Debug, Error, JsError)]
pub enum ResolveIfForNpmPackageErrorKind {
#[class(inherit)]
#[error(transparent)]
NodeResolve(#[from] NodeResolveError),
#[class(inherit)]
#[error(transparent)]
NodeModulesOutOfDate(#[from] NodeModulesOutOfDateError),
}
#[derive(Debug, Error, JsError)]
#[error("npm specifiers were requested; but --no-npm is specified")]
#[class("generic")]
pub struct NoNpmError;
#[derive(Debug, JsError)]
#[class(inherit)]
pub struct ResolveNpmReqRefError {
pub npm_req_ref: NpmPackageReqReference,
#[inherit]
pub err: ResolveReqWithSubPathError,
}
impl std::error::Error for ResolveNpmReqRefError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
self.err.source()
}
}
impl std::fmt::Display for ResolveNpmReqRefError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(&self.err, f)
}
}
#[derive(Debug, Boxed, JsError)]
pub struct ResolveReqWithSubPathError(pub Box<ResolveReqWithSubPathErrorKind>);
impl ResolveReqWithSubPathError {
pub fn maybe_specifier(&self) -> Option<Cow<'_, UrlOrPath>> {
match self.as_kind() {
ResolveReqWithSubPathErrorKind::NoNpm(_) => None,
ResolveReqWithSubPathErrorKind::MissingPackageNodeModulesFolder(err) => {
err.inner.maybe_specifier()
}
ResolveReqWithSubPathErrorKind::ResolvePkgFolderFromDenoReq(err) => {
Some(Cow::Owned(UrlOrPath::Url(err.npm_specifier.clone())))
}
ResolveReqWithSubPathErrorKind::PackageSubpathResolve(err) => {
err.maybe_specifier()
}
}
}
}
#[derive(Debug, Error, JsError)]
pub enum ResolveReqWithSubPathErrorKind {
#[class(inherit)]
#[error(transparent)]
MissingPackageNodeModulesFolder(#[from] MissingPackageNodeModulesFolderError),
#[class(inherit)]
#[error(transparent)]
NoNpm(NoNpmError),
#[class(inherit)]
#[error(transparent)]
ResolvePkgFolderFromDenoReq(
#[from] ContextedResolvePkgFolderFromDenoReqError,
),
#[class(inherit)]
#[error(transparent)]
PackageSubpathResolve(#[from] PackageSubpathFromDenoModuleResolveError),
}
impl ResolveReqWithSubPathErrorKind {
pub fn as_types_not_found(&self) -> Option<&TypesNotFoundError> {
match self {
ResolveReqWithSubPathErrorKind::NoNpm(_) => None,
ResolveReqWithSubPathErrorKind::MissingPackageNodeModulesFolder(_)
| ResolveReqWithSubPathErrorKind::ResolvePkgFolderFromDenoReq(_) => None,
ResolveReqWithSubPathErrorKind::PackageSubpathResolve(
package_subpath_resolve_error,
) => package_subpath_resolve_error.as_types_not_found(),
}
}
pub fn maybe_code(&self) -> Option<NodeJsErrorCode> {
match self {
ResolveReqWithSubPathErrorKind::NoNpm(_) => None,
ResolveReqWithSubPathErrorKind::MissingPackageNodeModulesFolder(_) => {
None
}
ResolveReqWithSubPathErrorKind::ResolvePkgFolderFromDenoReq(_) => None,
ResolveReqWithSubPathErrorKind::PackageSubpathResolve(e) => {
Some(e.code())
}
}
}
}
#[derive(Debug, JsError)]
#[class(inherit)]
pub struct ContextedResolvePkgFolderFromDenoReqError {
pub npm_specifier: Url,
#[inherit]
pub inner: ResolvePkgFolderFromDenoReqError,
}
impl std::error::Error for ContextedResolvePkgFolderFromDenoReqError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
self.inner.source()
}
}
impl std::fmt::Display for ContextedResolvePkgFolderFromDenoReqError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(&self.inner, f)
}
}
#[derive(Debug, Error, JsError)]
pub enum ResolvePkgFolderFromDenoReqError {
#[class(inherit)]
#[error(transparent)]
Managed(managed::ManagedResolvePkgFolderFromDenoReqError),
#[class(inherit)]
#[error(transparent)]
Byonm(byonm::ByonmResolvePkgFolderFromDenoReqError),
}
pub enum NpmResolverCreateOptions<TSys: NpmResolverSys> {
Managed(ManagedNpmResolverCreateOptions<TSys>),
Byonm(ByonmNpmResolverCreateOptions<TSys>),
}
#[sys_traits::auto_impl]
pub trait NpmResolverSys:
managed::ManagedNpmResolverSys
+ byonm::ByonmNpmResolverSys
+ node_resolver::NodeResolverSys
+ std::fmt::Debug
+ MaybeSend
+ MaybeSync
+ Clone
+ 'static
{
}
#[derive(Debug, Clone)]
pub enum NpmResolver<TSys: NpmResolverSys> {
/// The resolver when "bring your own node_modules" is enabled where Deno
/// does not setup the node_modules directories automatically, but instead
/// uses what already exists on the file system.
Byonm(ByonmNpmResolverRc<TSys>),
Managed(ManagedNpmResolverRc<TSys>),
}
impl<TSys: NpmResolverSys> NpmResolver<TSys> {
pub fn new<TCreateSys: NpmResolverSys>(
options: NpmResolverCreateOptions<TCreateSys>,
) -> NpmResolver<TCreateSys> {
match options {
NpmResolverCreateOptions::Managed(options) => {
NpmResolver::Managed(new_rc(ManagedNpmResolver::<TCreateSys>::new::<
TCreateSys,
>(options)))
}
NpmResolverCreateOptions::Byonm(options) => {
NpmResolver::Byonm(new_rc(ByonmNpmResolver::new(options)))
}
}
}
pub fn is_byonm(&self) -> bool {
matches!(self, NpmResolver::Byonm(_))
}
pub fn is_managed(&self) -> bool {
matches!(self, NpmResolver::Managed(_))
}
pub fn as_managed(&self) -> Option<&ManagedNpmResolverRc<TSys>> {
match self {
NpmResolver::Managed(resolver) => Some(resolver),
NpmResolver::Byonm(_) => None,
}
}
pub fn root_node_modules_path(&self) -> Option<&Path> {
match self {
NpmResolver::Byonm(resolver) => resolver.root_node_modules_path(),
NpmResolver::Managed(resolver) => resolver.root_node_modules_path(),
}
}
pub fn resolve_pkg_folder_from_deno_module_req(
&self,
req: &PackageReq,
referrer: &Url,
) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError> {
match self {
NpmResolver::Byonm(byonm_resolver) => byonm_resolver
.resolve_pkg_folder_from_deno_module_req(req, referrer)
.map_err(ResolvePkgFolderFromDenoReqError::Byonm),
NpmResolver::Managed(managed_resolver) => managed_resolver
.resolve_pkg_folder_from_deno_module_req(req)
.map_err(ResolvePkgFolderFromDenoReqError::Managed),
}
}
}
impl<TSys: NpmResolverSys> NpmPackageFolderResolver for NpmResolver<TSys> {
fn resolve_package_folder_from_package(
&self,
specifier: &str,
referrer: &UrlOrPathRef,
) -> Result<PathBuf, node_resolver::errors::PackageFolderResolveError> {
match self {
NpmResolver::Byonm(byonm_resolver) => {
byonm_resolver.resolve_package_folder_from_package(specifier, referrer)
}
NpmResolver::Managed(managed_resolver) => managed_resolver
.resolve_package_folder_from_package(specifier, referrer),
}
}
fn resolve_types_package_folder(
&self,
types_package_name: &str,
maybe_package_version: Option<&Version>,
maybe_referrer: Option<&UrlOrPathRef>,
) -> Option<PathBuf> {
match self {
NpmResolver::Byonm(byonm_resolver) => byonm_resolver
.resolve_types_package_folder(
types_package_name,
maybe_package_version,
maybe_referrer,
),
NpmResolver::Managed(managed_resolver) => managed_resolver
.resolve_types_package_folder(
types_package_name,
maybe_package_version,
maybe_referrer,
),
}
}
}
pub struct NpmReqResolverOptions<
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: NpmResolverSys,
> {
pub in_npm_pkg_checker: TInNpmPackageChecker,
pub node_resolver: NodeResolverRc<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
pub npm_resolver: NpmResolver<TSys>,
pub sys: TSys,
}
#[allow(clippy::disallowed_types)]
pub type NpmReqResolverRc<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
> = deno_maybe_sync::MaybeArc<
NpmReqResolver<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
>;
#[derive(Debug)]
pub struct NpmReqResolver<
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: NpmResolverSys,
> {
sys: TSys,
in_npm_pkg_checker: TInNpmPackageChecker,
node_resolver: NodeResolverRc<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
npm_resolver: NpmResolver<TSys>,
}
impl<
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: NpmResolverSys,
>
NpmReqResolver<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>
{
pub fn new(
options: NpmReqResolverOptions<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
) -> Self {
Self {
sys: options.sys,
in_npm_pkg_checker: options.in_npm_pkg_checker,
node_resolver: options.node_resolver,
npm_resolver: options.npm_resolver,
}
}
pub fn resolve_req_reference(
&self,
req_ref: &NpmPackageReqReference,
referrer: &Url,
resolution_mode: ResolutionMode,
resolution_kind: NodeResolutionKind,
) -> Result<UrlOrPath, ResolveNpmReqRefError> {
self.resolve_req_with_sub_path(
req_ref.req(),
req_ref.sub_path(),
referrer,
resolution_mode,
resolution_kind,
)
}
pub fn resolve_req_with_sub_path(
&self,
req: &PackageReq,
sub_path: Option<&str>,
referrer: &Url,
resolution_mode: ResolutionMode,
resolution_kind: NodeResolutionKind,
) -> Result<UrlOrPath, ResolveNpmReqRefError> {
self
.resolve_req_with_sub_path_inner(
req,
sub_path,
referrer,
resolution_mode,
resolution_kind,
)
.map_err(|source| ResolveNpmReqRefError {
npm_req_ref: NpmPackageReqReference::new(PackageReqReference {
req: req.clone(),
sub_path: sub_path.map(|s| s.into()),
}),
err: source,
})
}
fn resolve_req_with_sub_path_inner(
&self,
req: &PackageReq,
sub_path: Option<&str>,
referrer: &Url,
resolution_mode: ResolutionMode,
resolution_kind: NodeResolutionKind,
) -> Result<UrlOrPath, ResolveReqWithSubPathError> {
let package_folder = self
.npm_resolver
.resolve_pkg_folder_from_deno_module_req(req, referrer)
.map_err(|inner| ContextedResolvePkgFolderFromDenoReqError {
npm_specifier: Url::parse(&format!(
"npm:{}{}",
req,
sub_path.map(|s| format!("/{}", s)).unwrap_or_default(),
))
.unwrap(),
inner,
})?;
let resolution_result =
self.node_resolver.resolve_package_subpath_from_deno_module(
&package_folder,
sub_path,
Some(referrer),
resolution_mode,
resolution_kind,
);
match resolution_result {
Ok(url) => Ok(url),
Err(err) => {
if matches!(self.npm_resolver, NpmResolver::Byonm(_)) {
let package_json_path = package_folder.join("package.json");
if !self.sys.fs_exists_no_err(&package_json_path) {
return Err(
MissingPackageNodeModulesFolderError {
package_json_path,
inner: err,
}
.into(),
);
}
}
Err(err.into())
}
}
}
pub fn resolve_if_for_npm_pkg(
&self,
specifier: &str,
referrer: &Url,
resolution_mode: ResolutionMode,
resolution_kind: NodeResolutionKind,
) -> Result<Option<NodeResolution>, ResolveIfForNpmPackageError> {
let resolution_result = self.node_resolver.resolve(
specifier,
referrer,
resolution_mode,
resolution_kind,
);
match resolution_result {
Ok(res) => Ok(Some(res)),
Err(err) => {
let err = err.into_kind();
match err {
NodeResolveErrorKind::RelativeJoin(_)
| NodeResolveErrorKind::PackageImportsResolve(_)
| NodeResolveErrorKind::UnsupportedEsmUrlScheme(_)
| NodeResolveErrorKind::DataUrlReferrer(_)
| NodeResolveErrorKind::PathToUrl(_)
| NodeResolveErrorKind::UrlToFilePath(_)
| NodeResolveErrorKind::TypesNotFound(_)
| NodeResolveErrorKind::UnknownBuiltInNodeModule(_)
| NodeResolveErrorKind::FinalizeResolution(_) => Err(
ResolveIfForNpmPackageErrorKind::NodeResolve(err.into()).into_box(),
),
NodeResolveErrorKind::PackageResolve(err) => {
let err = err.into_kind();
match err {
PackageResolveErrorKind::UrlToFilePath(err) => Err(
ResolveIfForNpmPackageErrorKind::NodeResolve(
NodeResolveErrorKind::UrlToFilePath(err).into_box(),
)
.into_box(),
),
PackageResolveErrorKind::PkgJsonLoad(_)
| PackageResolveErrorKind::InvalidModuleSpecifier(_)
| PackageResolveErrorKind::ExportsResolve(_)
| PackageResolveErrorKind::SubpathResolve(_) => Err(
ResolveIfForNpmPackageErrorKind::NodeResolve(
NodeResolveErrorKind::PackageResolve(err.into()).into(),
)
.into_box(),
),
PackageResolveErrorKind::PackageFolderResolve(err) => {
match err.as_kind() {
PackageFolderResolveErrorKind::PathToUrl(err) => Err(
ResolveIfForNpmPackageErrorKind::NodeResolve(
NodeResolveErrorKind::PathToUrl(err.clone()).into_box(),
)
.into_box(),
),
PackageFolderResolveErrorKind::Io(
PackageFolderResolveIoError { package_name, .. },
)
| PackageFolderResolveErrorKind::PackageNotFound(
PackageNotFoundError { package_name, .. },
) => {
if self.in_npm_pkg_checker.in_npm_package(referrer) {
return Err(
ResolveIfForNpmPackageErrorKind::NodeResolve(
NodeResolveErrorKind::PackageResolve(err.into())
.into(),
)
.into_box(),
);
}
if let NpmResolver::Byonm(byonm_npm_resolver) =
&self.npm_resolver
&& byonm_npm_resolver
.find_ancestor_package_json_with_dep(
package_name,
referrer,
)
.is_some()
{
return Err(
ResolveIfForNpmPackageErrorKind::NodeModulesOutOfDate(
NodeModulesOutOfDateError {
specifier: specifier.to_string(),
},
)
.into_box(),
);
}
Ok(None)
}
PackageFolderResolveErrorKind::ReferrerNotFound(_) => {
if self.in_npm_pkg_checker.in_npm_package(referrer) {
return Err(
ResolveIfForNpmPackageErrorKind::NodeResolve(
NodeResolveErrorKind::PackageResolve(err.into())
.into(),
)
.into_box(),
);
}
Ok(None)
}
}
}
}
}
}
}
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/npm/managed/local.rs | libs/resolver/npm/managed/local.rs | // Copyright 2018-2025 the Deno authors. MIT license.
//! Code for local node_modules resolution.
use std::borrow::Cow;
use std::path::Path;
use std::path::PathBuf;
use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId;
use deno_path_util::fs::canonicalize_path_maybe_not_exists;
use deno_path_util::url_from_directory_path;
use deno_semver::Version;
use node_resolver::NpmPackageFolderResolver;
use node_resolver::UrlOrPathRef;
use node_resolver::errors::PackageFolderResolveError;
use node_resolver::errors::PackageFolderResolveIoError;
use node_resolver::errors::PackageNotFoundError;
use node_resolver::errors::ReferrerNotFoundError;
use sys_traits::FsCanonicalize;
use sys_traits::FsMetadata;
use url::Url;
use super::common::join_package_name_to_path;
use super::resolution::NpmResolutionCellRc;
use crate::npm::local::get_package_folder_id_folder_name_from_parts;
use crate::npm::local::get_package_folder_id_from_folder_name;
/// Resolver that creates a local node_modules directory
/// and resolves packages from it.
#[derive(Debug)]
pub struct LocalNpmPackageResolver<TSys: FsCanonicalize + FsMetadata> {
resolution: NpmResolutionCellRc,
sys: TSys,
root_node_modules_path: PathBuf,
root_node_modules_url: Url,
}
impl<TSys: FsCanonicalize + FsMetadata> LocalNpmPackageResolver<TSys> {
#[allow(clippy::too_many_arguments)]
pub fn new(
resolution: NpmResolutionCellRc,
sys: TSys,
node_modules_folder: PathBuf,
) -> Self {
Self {
resolution,
sys,
root_node_modules_url: url_from_directory_path(&node_modules_folder)
.unwrap(),
root_node_modules_path: node_modules_folder,
}
}
pub fn node_modules_path(&self) -> Option<&Path> {
Some(self.root_node_modules_path.as_ref())
}
pub fn maybe_package_folder(&self, id: &NpmPackageId) -> Option<PathBuf> {
let folder_copy_index = self
.resolution
.resolve_pkg_cache_folder_copy_index_from_pkg_id(id)?;
// package is stored at:
// node_modules/.deno/<package_cache_folder_id_folder_name>/node_modules/<package_name>
Some(
self
.root_node_modules_path
.join(".deno")
.join(get_package_folder_id_folder_name_from_parts(
&id.nv,
folder_copy_index,
))
.join("node_modules")
.join(&id.nv.name),
)
}
pub fn resolve_package_cache_folder_id_from_specifier(
&self,
specifier: &Url,
) -> Result<Option<NpmPackageCacheFolderId>, std::io::Error> {
let Some(folder_path) =
self.resolve_package_folder_from_specifier(specifier)?
else {
return Ok(None);
};
// ex. project/node_modules/.deno/preact@10.24.3/node_modules/preact/
let Some(node_modules_ancestor) = folder_path
.ancestors()
.find(|ancestor| ancestor.ends_with("node_modules"))
else {
return Ok(None);
};
let Some(folder_name) =
node_modules_ancestor.parent().and_then(|p| p.file_name())
else {
return Ok(None);
};
Ok(get_package_folder_id_from_folder_name(
&folder_name.to_string_lossy(),
))
}
fn resolve_package_root(&self, path: &Path) -> PathBuf {
let mut last_found = path;
loop {
let parent = last_found.parent().unwrap();
if parent.file_name().unwrap() == "node_modules" {
return last_found.to_path_buf();
} else {
last_found = parent;
}
}
}
fn resolve_folder_for_specifier(
&self,
specifier: &Url,
) -> Result<Option<PathBuf>, std::io::Error> {
let Some(relative_url) =
self.root_node_modules_url.make_relative(specifier)
else {
return Ok(None);
};
if relative_url.starts_with("../") {
return Ok(None);
}
// it's within the directory, so use it
let Some(path) = deno_path_util::url_to_file_path(specifier).ok() else {
return Ok(None);
};
// Canonicalize the path so it's not pointing to the symlinked directory
// in `node_modules` directory of the referrer.
canonicalize_path_maybe_not_exists(&self.sys, &path).map(Some)
}
fn resolve_package_folder_from_specifier(
&self,
specifier: &Url,
) -> Result<Option<PathBuf>, std::io::Error> {
let Some(local_path) = self.resolve_folder_for_specifier(specifier)? else {
return Ok(None);
};
let package_root_path = self.resolve_package_root(&local_path);
Ok(Some(package_root_path))
}
}
impl<TSys: FsCanonicalize + FsMetadata> NpmPackageFolderResolver
for LocalNpmPackageResolver<TSys>
{
fn resolve_package_folder_from_package(
&self,
name: &str,
referrer: &UrlOrPathRef,
) -> Result<PathBuf, PackageFolderResolveError> {
let maybe_local_path = self
.resolve_folder_for_specifier(referrer.url()?)
.map_err(|err| PackageFolderResolveIoError {
package_name: name.to_string(),
referrer: referrer.display(),
source: err,
})?;
let Some(local_path) = maybe_local_path else {
return Err(
ReferrerNotFoundError {
referrer: referrer.display(),
referrer_extra: None,
}
.into(),
);
};
// go from the current path down because it might have bundled dependencies
for current_folder in local_path.ancestors().skip(1) {
let node_modules_folder = if current_folder.ends_with("node_modules") {
Cow::Borrowed(current_folder)
} else {
Cow::Owned(current_folder.join("node_modules"))
};
let sub_dir = join_package_name_to_path(&node_modules_folder, name);
if self.sys.fs_is_dir_no_err(&sub_dir) {
return Ok(self.sys.fs_canonicalize(&sub_dir).map_err(|err| {
PackageFolderResolveIoError {
package_name: name.to_string(),
referrer: referrer.display(),
source: err,
}
})?);
}
if current_folder == self.root_node_modules_path {
break;
}
}
Err(
PackageNotFoundError {
package_name: name.to_string(),
referrer: referrer.display(),
referrer_extra: None,
}
.into(),
)
}
fn resolve_types_package_folder(
&self,
types_package_name: &str,
maybe_package_version: Option<&Version>,
maybe_referrer: Option<&UrlOrPathRef>,
) -> Option<PathBuf> {
if let Some(referrer) = maybe_referrer
&& let Ok(path) =
self.resolve_package_folder_from_package(types_package_name, referrer)
{
Some(path)
} else {
// otherwise, try to find one in the snapshot
let snapshot = self.resolution.snapshot();
let pkg_id = super::common::find_definitely_typed_package_from_snapshot(
types_package_name,
maybe_package_version,
&snapshot,
)?;
self.maybe_package_folder(pkg_id)
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/npm/managed/global.rs | libs/resolver/npm/managed/global.rs | // Copyright 2018-2025 the Deno authors. MIT license.
//! Code for global npm cache resolution.
use std::borrow::Cow;
use std::path::PathBuf;
use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId;
use deno_semver::StackString;
use deno_semver::Version;
use deno_semver::package::PackageNv;
use node_resolver::NpmPackageFolderResolver;
use node_resolver::UrlOrPathRef;
use node_resolver::errors::PackageFolderResolveError;
use node_resolver::errors::PackageFolderResolveIoError;
use node_resolver::errors::PackageNotFoundError;
use node_resolver::errors::ReferrerNotFoundError;
use sys_traits::FsCanonicalize;
use sys_traits::FsMetadata;
use url::Url;
use super::NpmCacheDirRc;
use super::resolution::NpmResolutionCellRc;
use crate::npm::managed::common::join_package_name_to_path;
use crate::npmrc::ResolvedNpmRcRc;
/// Resolves packages from the global npm cache.
#[derive(Debug)]
pub struct GlobalNpmPackageResolver<TSys: FsCanonicalize + FsMetadata> {
cache: NpmCacheDirRc,
npm_rc: ResolvedNpmRcRc,
resolution: NpmResolutionCellRc,
sys: TSys,
}
impl<TSys: FsCanonicalize + FsMetadata> GlobalNpmPackageResolver<TSys> {
pub fn new(
cache: NpmCacheDirRc,
npm_rc: ResolvedNpmRcRc,
resolution: NpmResolutionCellRc,
sys: TSys,
) -> Self {
Self {
cache,
npm_rc,
resolution,
sys,
}
}
pub fn maybe_package_folder(&self, id: &NpmPackageId) -> Option<PathBuf> {
let folder_copy_index = self
.resolution
.resolve_pkg_cache_folder_copy_index_from_pkg_id(id)?;
let registry_url = self.npm_rc.get_registry_url(&id.nv.name);
Some(self.cache.package_folder_for_id(
&id.nv.name,
&id.nv.version.to_string(),
folder_copy_index,
registry_url,
))
}
pub fn resolve_package_cache_folder_id_from_specifier(
&self,
specifier: &Url,
) -> Result<Option<NpmPackageCacheFolderId>, std::io::Error> {
Ok(self.resolve_package_cache_folder_id_from_specifier_inner(specifier))
}
fn resolve_package_cache_folder_id_from_specifier_inner(
&self,
specifier: &Url,
) -> Option<NpmPackageCacheFolderId> {
self
.cache
.resolve_package_folder_id_from_specifier(specifier)
.and_then(|cache_id| {
Some(NpmPackageCacheFolderId {
nv: PackageNv {
name: StackString::from_string(cache_id.name),
version: Version::parse_from_npm(&cache_id.version).ok()?,
},
copy_index: cache_id.copy_index,
})
})
}
}
impl<TSys: FsCanonicalize + FsMetadata> NpmPackageFolderResolver
for GlobalNpmPackageResolver<TSys>
{
fn resolve_package_folder_from_package(
&self,
name: &str,
referrer: &UrlOrPathRef,
) -> Result<PathBuf, PackageFolderResolveError> {
use deno_npm::resolution::PackageNotFoundFromReferrerError;
let Some(referrer_cache_folder_id) = self
.resolve_package_cache_folder_id_from_specifier_inner(referrer.url()?)
else {
return Err(
ReferrerNotFoundError {
referrer: referrer.display(),
referrer_extra: None,
}
.into(),
);
};
let resolve_result = self
.resolution
.resolve_package_from_package(name, &referrer_cache_folder_id);
match resolve_result {
Ok(pkg) => match self.maybe_package_folder(&pkg.id) {
Some(folder) => Ok(folder),
None => Err(
PackageNotFoundError {
package_name: name.to_string(),
referrer: referrer.display(),
referrer_extra: Some(format!(
"{} -> {}",
referrer_cache_folder_id,
pkg.id.as_serialized()
)),
}
.into(),
),
},
Err(err) => match *err {
PackageNotFoundFromReferrerError::Referrer(cache_folder_id) => Err(
ReferrerNotFoundError {
referrer: referrer.display(),
referrer_extra: Some(cache_folder_id.to_string()),
}
.into(),
),
PackageNotFoundFromReferrerError::Package {
name,
referrer: cache_folder_id_referrer,
} => {
// check for any bundled dependencies within the package
if let Ok(referrer_path) = referrer.path() {
let cache_location = self.cache.get_cache_location();
for current_folder in referrer_path
.ancestors()
.skip(1)
.take_while(|path| path.starts_with(&cache_location))
{
let node_modules_folder =
if current_folder.ends_with("node_modules") {
Cow::Borrowed(current_folder)
} else {
Cow::Owned(current_folder.join("node_modules"))
};
let sub_dir =
join_package_name_to_path(&node_modules_folder, &name);
if self.sys.fs_is_dir_no_err(&sub_dir) {
return Ok(self.sys.fs_canonicalize(&sub_dir).map_err(
|err| PackageFolderResolveIoError {
package_name: name.to_string(),
referrer: referrer.display(),
source: err,
},
)?);
}
}
}
Err(
PackageNotFoundError {
package_name: name,
referrer: referrer.display(),
referrer_extra: Some(cache_folder_id_referrer.to_string()),
}
.into(),
)
}
},
}
}
fn resolve_types_package_folder(
&self,
types_package_name: &str,
maybe_package_version: Option<&Version>,
_maybe_referrer: Option<&UrlOrPathRef>,
) -> Option<PathBuf> {
let snapshot = self.resolution.snapshot();
let pkg_id = super::common::find_definitely_typed_package_from_snapshot(
types_package_name,
maybe_package_version,
&snapshot,
)?;
self.maybe_package_folder(pkg_id)
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/npm/managed/resolution.rs | libs/resolver/npm/managed/resolution.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_npm::NpmSystemInfo;
use deno_npm::resolution::NpmPackagesPartitioned;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::resolution::PackageCacheFolderIdNotFoundError;
use deno_npm::resolution::PackageNotFoundFromReferrerError;
use deno_npm::resolution::PackageNvNotFoundError;
use deno_npm::resolution::PackageReqNotFoundError;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use deno_unsync::sync::AtomicFlag;
use parking_lot::RwLock;
#[allow(clippy::disallowed_types)]
pub type NpmResolutionCellRc = deno_maybe_sync::MaybeArc<NpmResolutionCell>;
/// Handles updating and storing npm resolution in memory.
///
/// This does not interact with the file system.
#[derive(Default)]
pub struct NpmResolutionCell {
snapshot: RwLock<NpmResolutionSnapshot>,
is_pending: deno_unsync::sync::AtomicFlag,
}
impl std::fmt::Debug for NpmResolutionCell {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let snapshot = self.snapshot.read();
f.debug_struct("NpmResolution")
.field("snapshot", &snapshot.as_valid_serialized().as_serialized())
.finish()
}
}
impl NpmResolutionCell {
pub fn from_serialized(
initial_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
) -> Self {
let snapshot =
NpmResolutionSnapshot::new(initial_snapshot.unwrap_or_default());
Self::new(snapshot)
}
pub fn new(initial_snapshot: NpmResolutionSnapshot) -> Self {
Self {
snapshot: RwLock::new(initial_snapshot),
is_pending: AtomicFlag::lowered(),
}
}
pub fn resolve_pkg_cache_folder_copy_index_from_pkg_id(
&self,
id: &NpmPackageId,
) -> Option<u8> {
self
.snapshot
.read()
.package_from_id(id)
.map(|p| p.copy_index)
}
pub fn resolve_pkg_id_from_pkg_cache_folder_id(
&self,
id: &NpmPackageCacheFolderId,
) -> Result<NpmPackageId, PackageCacheFolderIdNotFoundError> {
self
.snapshot
.read()
.resolve_pkg_from_pkg_cache_folder_id(id)
.map(|pkg| pkg.id.clone())
}
pub fn resolve_package_from_package(
&self,
name: &str,
referrer: &NpmPackageCacheFolderId,
) -> Result<NpmResolutionPackage, Box<PackageNotFoundFromReferrerError>> {
self
.snapshot
.read()
.resolve_package_from_package(name, referrer)
.cloned()
}
/// Resolve a node package from a deno module.
pub fn resolve_pkg_id_from_pkg_req(
&self,
req: &PackageReq,
) -> Result<NpmPackageId, PackageReqNotFoundError> {
self
.snapshot
.read()
.resolve_pkg_from_pkg_req(req)
.map(|pkg| pkg.id.clone())
}
pub fn resolve_pkg_reqs_from_pkg_id(
&self,
id: &NpmPackageId,
) -> Vec<PackageReq> {
let snapshot = self.snapshot.read();
let mut pkg_reqs = snapshot
.package_reqs()
.iter()
.filter(|(_, nv)| *nv == &id.nv)
.map(|(req, _)| req.clone())
.collect::<Vec<_>>();
pkg_reqs.sort(); // be deterministic
pkg_reqs
}
pub fn resolve_pkg_id_from_deno_module(
&self,
id: &PackageNv,
) -> Result<NpmPackageId, PackageNvNotFoundError> {
self
.snapshot
.read()
.resolve_package_from_deno_module(id)
.map(|pkg| pkg.id.clone())
}
pub fn package_reqs(&self) -> Vec<(PackageReq, PackageNv)> {
self
.snapshot
.read()
.package_reqs()
.iter()
.map(|(k, v)| (k.clone(), v.clone()))
.collect()
}
pub fn top_level_packages(&self) -> Vec<NpmPackageId> {
self
.snapshot
.read()
.top_level_packages()
.cloned()
.collect::<Vec<_>>()
}
pub fn any_top_level_package(
&self,
check: impl Fn(&NpmPackageId) -> bool,
) -> bool {
self.snapshot.read().top_level_packages().any(check)
}
pub fn all_system_packages(
&self,
system_info: &NpmSystemInfo,
) -> Vec<NpmResolutionPackage> {
self.snapshot.read().all_system_packages(system_info)
}
pub fn all_system_packages_partitioned(
&self,
system_info: &NpmSystemInfo,
) -> NpmPackagesPartitioned {
self
.snapshot
.read()
.all_system_packages_partitioned(system_info)
}
pub fn snapshot(&self) -> NpmResolutionSnapshot {
self.snapshot.read().clone()
}
pub fn serialized_valid_snapshot(
&self,
) -> ValidSerializedNpmResolutionSnapshot {
self.snapshot.read().as_valid_serialized()
}
pub fn serialized_valid_snapshot_for_system(
&self,
system_info: &NpmSystemInfo,
) -> ValidSerializedNpmResolutionSnapshot {
self
.snapshot
.read()
.as_valid_serialized_for_system(system_info)
}
pub fn subset(&self, package_reqs: &[PackageReq]) -> NpmResolutionSnapshot {
self.snapshot.read().subset(package_reqs)
}
pub fn set_snapshot(&self, snapshot: NpmResolutionSnapshot) {
*self.snapshot.write() = snapshot;
}
/// Checks if the resolution is "pending" meaning that its
/// current state requires an npm install to get it up
/// to date. This occurs when the workspace config changes
/// and deno_lockfile has incompletely updated the npm
/// snapshot.
pub fn is_pending(&self) -> bool {
self.is_pending.is_raised()
}
pub fn mark_pending(&self) {
self.is_pending.raise();
}
pub fn mark_not_pending(&self) {
self.is_pending.lower();
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/npm/managed/mod.rs | libs/resolver/npm/managed/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
mod common;
mod global;
mod local;
mod resolution;
use std::path::Path;
use std::path::PathBuf;
use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId;
use deno_npm::NpmSystemInfo;
use deno_npm::resolution::PackageCacheFolderIdNotFoundError;
use deno_npm::resolution::PackageNvNotFoundError;
use deno_npm::resolution::PackageReqNotFoundError;
use deno_path_util::fs::canonicalize_path_maybe_not_exists;
use deno_semver::Version;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use node_resolver::InNpmPackageChecker;
use node_resolver::NpmPackageFolderResolver;
use node_resolver::UrlOrPathRef;
use sys_traits::FsCanonicalize;
use sys_traits::FsMetadata;
use url::Url;
use self::common::NpmPackageFsResolver;
use self::global::GlobalNpmPackageResolver;
use self::local::LocalNpmPackageResolver;
pub use self::resolution::NpmResolutionCell;
pub use self::resolution::NpmResolutionCellRc;
use crate::NpmCacheDirRc;
use crate::npmrc::ResolvedNpmRcRc;
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum ResolvePkgFolderFromDenoModuleError {
#[class(inherit)]
#[error(transparent)]
PackageNvNotFound(#[from] PackageNvNotFoundError),
#[class(inherit)]
#[error(transparent)]
ResolvePkgFolderFromPkgId(#[from] ResolvePkgFolderFromPkgIdError),
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
#[error(transparent)]
pub enum ResolvePkgFolderFromPkgIdError {
#[class(inherit)]
#[error(transparent)]
NotFound(#[from] NpmManagedPackageFolderNotFoundError),
#[class(inherit)]
#[error(transparent)]
FailedCanonicalizing(#[from] FailedCanonicalizingError),
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
#[class(generic)]
#[error("Package folder not found for '{0}'")]
pub struct NpmManagedPackageFolderNotFoundError(deno_semver::StackString);
#[derive(Debug, thiserror::Error, deno_error::JsError)]
#[class(generic)]
#[error("Failed canonicalizing '{}'", path.display())]
pub struct FailedCanonicalizingError {
path: PathBuf,
#[source]
source: std::io::Error,
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum ManagedResolvePkgFolderFromDenoReqError {
#[class(inherit)]
#[error(transparent)]
PackageReqNotFound(#[from] PackageReqNotFoundError),
#[class(inherit)]
#[error(transparent)]
ResolvePkgFolderFromPkgId(#[from] ResolvePkgFolderFromPkgIdError),
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum ResolvePkgIdFromSpecifierError {
#[class(inherit)]
#[error(transparent)]
Io(#[from] std::io::Error),
#[class(inherit)]
#[error(transparent)]
NotFound(#[from] PackageCacheFolderIdNotFoundError),
}
pub struct ManagedNpmResolverCreateOptions<TSys: ManagedNpmResolverSys> {
pub npm_cache_dir: NpmCacheDirRc,
pub sys: TSys,
pub maybe_node_modules_path: Option<PathBuf>,
pub npm_system_info: NpmSystemInfo,
pub npmrc: ResolvedNpmRcRc,
pub npm_resolution: NpmResolutionCellRc,
}
#[sys_traits::auto_impl]
pub trait ManagedNpmResolverSys: FsCanonicalize + FsMetadata + Clone {}
#[allow(clippy::disallowed_types)]
pub type ManagedNpmResolverRc<TSys> =
deno_maybe_sync::MaybeArc<ManagedNpmResolver<TSys>>;
#[derive(Debug)]
pub struct ManagedNpmResolver<TSys: ManagedNpmResolverSys> {
fs_resolver: NpmPackageFsResolver<TSys>,
npm_cache_dir: NpmCacheDirRc,
resolution: NpmResolutionCellRc,
sys: TSys,
}
impl<TSys: ManagedNpmResolverSys> ManagedNpmResolver<TSys> {
pub fn new<TCreateSys: ManagedNpmResolverSys>(
options: ManagedNpmResolverCreateOptions<TCreateSys>,
) -> ManagedNpmResolver<TCreateSys> {
let fs_resolver = match options.maybe_node_modules_path {
Some(node_modules_folder) => {
NpmPackageFsResolver::Local(LocalNpmPackageResolver::new(
options.npm_resolution.clone(),
options.sys.clone(),
node_modules_folder,
))
}
None => NpmPackageFsResolver::Global(GlobalNpmPackageResolver::new(
options.npm_cache_dir.clone(),
options.npmrc.clone(),
options.npm_resolution.clone(),
options.sys.clone(),
)),
};
ManagedNpmResolver {
fs_resolver,
npm_cache_dir: options.npm_cache_dir,
sys: options.sys,
resolution: options.npm_resolution,
}
}
#[inline]
pub fn root_node_modules_path(&self) -> Option<&Path> {
self.fs_resolver.node_modules_path()
}
pub fn global_cache_root_path(&self) -> &Path {
self.npm_cache_dir.root_dir()
}
pub fn global_cache_root_url(&self) -> &Url {
self.npm_cache_dir.root_dir_url()
}
pub fn resolution(&self) -> &NpmResolutionCell {
self.resolution.as_ref()
}
/// Checks if the provided package req's folder is cached.
pub fn is_pkg_req_folder_cached(&self, req: &PackageReq) -> bool {
self
.resolution
.resolve_pkg_id_from_pkg_req(req)
.ok()
.and_then(|id| self.resolve_pkg_folder_from_pkg_id(&id).ok())
.map(|folder| self.sys.fs_exists_no_err(folder))
.unwrap_or(false)
}
pub fn resolve_pkg_folder_from_pkg_id(
&self,
package_id: &NpmPackageId,
) -> Result<PathBuf, ResolvePkgFolderFromPkgIdError> {
let path = self
.fs_resolver
.maybe_package_folder(package_id)
.ok_or_else(|| {
NpmManagedPackageFolderNotFoundError(package_id.as_serialized())
})?;
// todo(dsherret): investigate if this canonicalization is always
// necessary. For example, maybe it's not necessary for the global cache
let path = canonicalize_path_maybe_not_exists(&self.sys, &path).map_err(
|source| FailedCanonicalizingError {
path: path.to_path_buf(),
source,
},
)?;
log::debug!(
"Resolved package folder of {} to {}",
package_id.as_serialized(),
path.display()
);
Ok(path)
}
pub fn resolve_pkg_folder_from_deno_module(
&self,
nv: &PackageNv,
) -> Result<PathBuf, ResolvePkgFolderFromDenoModuleError> {
let pkg_id = self.resolution.resolve_pkg_id_from_deno_module(nv)?;
Ok(self.resolve_pkg_folder_from_pkg_id(&pkg_id)?)
}
pub fn resolve_pkg_id_from_deno_module_req(
&self,
req: &PackageReq,
) -> Result<NpmPackageId, PackageReqNotFoundError> {
self.resolution.resolve_pkg_id_from_pkg_req(req)
}
pub fn resolve_pkg_folder_from_deno_module_req(
&self,
req: &PackageReq,
) -> Result<PathBuf, ManagedResolvePkgFolderFromDenoReqError> {
let pkg_id = self.resolution.resolve_pkg_id_from_pkg_req(req)?;
Ok(self.resolve_pkg_folder_from_pkg_id(&pkg_id)?)
}
#[inline]
pub fn resolve_package_cache_folder_id_from_specifier(
&self,
specifier: &Url,
) -> Result<Option<NpmPackageCacheFolderId>, std::io::Error> {
self
.fs_resolver
.resolve_package_cache_folder_id_from_specifier(specifier)
}
/// Resolves the package id from the provided specifier.
pub fn resolve_pkg_id_from_specifier(
&self,
specifier: &Url,
) -> Result<Option<NpmPackageId>, ResolvePkgIdFromSpecifierError> {
let Some(cache_folder_id) = self
.fs_resolver
.resolve_package_cache_folder_id_from_specifier(specifier)?
else {
return Ok(None);
};
Ok(Some(
self
.resolution
.resolve_pkg_id_from_pkg_cache_folder_id(&cache_folder_id)?,
))
}
}
impl<TSys: ManagedNpmResolverSys> NpmPackageFolderResolver
for ManagedNpmResolver<TSys>
{
fn resolve_package_folder_from_package(
&self,
specifier: &str,
referrer: &UrlOrPathRef,
) -> Result<PathBuf, node_resolver::errors::PackageFolderResolveError> {
let path = self
.fs_resolver
.resolve_package_folder_from_package(specifier, referrer)?;
log::debug!(
"Resolved {} from {} to {}",
specifier,
referrer.display(),
path.display()
);
Ok(path)
}
fn resolve_types_package_folder(
&self,
types_package_name: &str,
maybe_package_version: Option<&Version>,
maybe_referrer: Option<&UrlOrPathRef>,
) -> Option<PathBuf> {
self.fs_resolver.resolve_types_package_folder(
types_package_name,
maybe_package_version,
maybe_referrer,
)
}
}
#[derive(Debug, Clone)]
pub struct ManagedInNpmPackageChecker {
root_dir: Url,
}
impl InNpmPackageChecker for ManagedInNpmPackageChecker {
fn in_npm_package(&self, specifier: &Url) -> bool {
specifier.as_ref().starts_with(self.root_dir.as_str())
}
}
#[derive(Debug)]
pub struct ManagedInNpmPkgCheckerCreateOptions<'a> {
pub root_cache_dir_url: &'a Url,
pub maybe_node_modules_path: Option<&'a Path>,
}
pub fn create_managed_in_npm_pkg_checker(
options: ManagedInNpmPkgCheckerCreateOptions,
) -> ManagedInNpmPackageChecker {
let root_dir = match options.maybe_node_modules_path {
Some(node_modules_folder) => {
deno_path_util::url_from_directory_path(node_modules_folder).unwrap()
}
None => options.root_cache_dir_url.clone(),
};
debug_assert!(root_dir.as_str().ends_with('/'));
ManagedInNpmPackageChecker { root_dir }
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/resolver/npm/managed/common.rs | libs/resolver/npm/managed/common.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::path::Path;
use std::path::PathBuf;
use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_semver::Version;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use node_resolver::NpmPackageFolderResolver;
use node_resolver::UrlOrPathRef;
use sys_traits::FsCanonicalize;
use sys_traits::FsMetadata;
use url::Url;
#[derive(Debug)]
pub enum NpmPackageFsResolver<TSys: FsCanonicalize + FsMetadata> {
Local(super::local::LocalNpmPackageResolver<TSys>),
Global(super::global::GlobalNpmPackageResolver<TSys>),
}
impl<TSys: FsCanonicalize + FsMetadata> NpmPackageFsResolver<TSys> {
/// The local node_modules folder (only for the local resolver).
pub fn node_modules_path(&self) -> Option<&Path> {
match self {
NpmPackageFsResolver::Local(resolver) => resolver.node_modules_path(),
NpmPackageFsResolver::Global(_) => None,
}
}
pub fn maybe_package_folder(
&self,
package_id: &NpmPackageId,
) -> Option<PathBuf> {
match self {
NpmPackageFsResolver::Local(resolver) => {
resolver.maybe_package_folder(package_id)
}
NpmPackageFsResolver::Global(resolver) => {
resolver.maybe_package_folder(package_id)
}
}
}
pub fn resolve_package_cache_folder_id_from_specifier(
&self,
specifier: &Url,
) -> Result<Option<NpmPackageCacheFolderId>, std::io::Error> {
match self {
NpmPackageFsResolver::Local(resolver) => {
resolver.resolve_package_cache_folder_id_from_specifier(specifier)
}
NpmPackageFsResolver::Global(resolver) => {
resolver.resolve_package_cache_folder_id_from_specifier(specifier)
}
}
}
}
impl<TSys: FsCanonicalize + FsMetadata> NpmPackageFolderResolver
for NpmPackageFsResolver<TSys>
{
fn resolve_package_folder_from_package(
&self,
specifier: &str,
referrer: &UrlOrPathRef,
) -> Result<PathBuf, node_resolver::errors::PackageFolderResolveError> {
match self {
NpmPackageFsResolver::Local(r) => {
r.resolve_package_folder_from_package(specifier, referrer)
}
NpmPackageFsResolver::Global(r) => {
r.resolve_package_folder_from_package(specifier, referrer)
}
}
}
fn resolve_types_package_folder(
&self,
types_package_name: &str,
maybe_package_version: Option<&Version>,
maybe_referrer: Option<&UrlOrPathRef>,
) -> Option<PathBuf> {
match self {
NpmPackageFsResolver::Local(r) => r.resolve_types_package_folder(
types_package_name,
maybe_package_version,
maybe_referrer,
),
NpmPackageFsResolver::Global(r) => r.resolve_types_package_folder(
types_package_name,
maybe_package_version,
maybe_referrer,
),
}
}
}
pub fn join_package_name_to_path(path: &Path, package_name: &str) -> PathBuf {
let mut path = Cow::Borrowed(path);
// ensure backslashes are used on windows
for part in package_name.split('/') {
path = Cow::Owned(path.join(part));
}
path.into_owned()
}
pub fn find_definitely_typed_package_from_snapshot<'a>(
types_package_name: &str,
maybe_package_version: Option<&Version>,
snapshot: &'a NpmResolutionSnapshot,
) -> Option<&'a NpmPackageId> {
let (_, nv) = find_definitely_typed_package(
types_package_name,
maybe_package_version,
snapshot.package_reqs().iter(),
)?;
snapshot.resolve_package_id_from_deno_module(nv).ok()
}
/// Attempt to choose the "best" `@types/*` package
/// if possible. If multiple versions exist, try to match
/// the major and minor versions of the `@types` package with the
/// actual package, falling back to the highest @types version present.
pub fn find_definitely_typed_package<'a>(
types_package_name: &str,
maybe_package_version: Option<&Version>,
packages: impl IntoIterator<Item = (&'a PackageReq, &'a PackageNv)>,
) -> Option<(&'a PackageReq, &'a PackageNv)> {
let mut best_patch = 0;
let mut highest: Option<(&PackageReq, &PackageNv)> = None;
let mut best: Option<(&PackageReq, &PackageNv)> = None;
for (req, type_nv) in packages {
if type_nv.name != types_package_name {
continue;
}
if let Some(package_version) = maybe_package_version
&& type_nv.version.major == package_version.major
&& type_nv.version.minor == package_version.minor
&& type_nv.version.patch >= best_patch
&& type_nv.version.pre == package_version.pre
{
let should_replace = match &best {
Some((_, best_nv)) => type_nv.version > best_nv.version,
None => true,
};
if should_replace {
best = Some((req, type_nv));
best_patch = type_nv.version.patch;
}
}
if let Some((_, highest_nv)) = highest {
if type_nv.version > highest_nv.version {
highest = Some((req, type_nv));
}
} else {
highest = Some((req, type_nv));
}
}
best.or(highest)
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/node_resolver/errors.rs | libs/node_resolver/errors.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::fmt::Write;
use std::path::PathBuf;
use boxed_error::Boxed;
use deno_error::JsError;
use deno_package_json::MissingPkgJsonNameError;
use deno_path_util::UrlToFilePathError;
use thiserror::Error;
use url::Url;
use crate::NodeResolutionKind;
use crate::ResolutionMode;
use crate::path::UrlOrPath;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[allow(non_camel_case_types)]
pub enum NodeJsErrorCode {
ERR_INVALID_MODULE_SPECIFIER,
ERR_INVALID_PACKAGE_CONFIG,
ERR_INVALID_PACKAGE_TARGET,
ERR_MODULE_NOT_FOUND,
ERR_PACKAGE_IMPORT_NOT_DEFINED,
ERR_PACKAGE_PATH_NOT_EXPORTED,
ERR_UNKNOWN_FILE_EXTENSION,
ERR_UNSUPPORTED_DIR_IMPORT,
ERR_UNSUPPORTED_ESM_URL_SCHEME,
ERR_INVALID_FILE_URL_PATH,
ERR_UNKNOWN_BUILTIN_MODULE,
/// Deno specific since Node doesn't support type checking TypeScript.
ERR_TYPES_NOT_FOUND,
}
impl std::fmt::Display for NodeJsErrorCode {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.as_str())
}
}
impl NodeJsErrorCode {
pub fn as_str(&self) -> &'static str {
use NodeJsErrorCode::*;
match self {
ERR_INVALID_MODULE_SPECIFIER => "ERR_INVALID_MODULE_SPECIFIER",
ERR_INVALID_PACKAGE_CONFIG => "ERR_INVALID_PACKAGE_CONFIG",
ERR_INVALID_PACKAGE_TARGET => "ERR_INVALID_PACKAGE_TARGET",
ERR_MODULE_NOT_FOUND => "ERR_MODULE_NOT_FOUND",
ERR_PACKAGE_IMPORT_NOT_DEFINED => "ERR_PACKAGE_IMPORT_NOT_DEFINED",
ERR_PACKAGE_PATH_NOT_EXPORTED => "ERR_PACKAGE_PATH_NOT_EXPORTED",
ERR_UNKNOWN_FILE_EXTENSION => "ERR_UNKNOWN_FILE_EXTENSION",
ERR_UNSUPPORTED_DIR_IMPORT => "ERR_UNSUPPORTED_DIR_IMPORT",
ERR_UNSUPPORTED_ESM_URL_SCHEME => "ERR_UNSUPPORTED_ESM_URL_SCHEME",
ERR_TYPES_NOT_FOUND => "ERR_TYPES_NOT_FOUND",
ERR_INVALID_FILE_URL_PATH => "ERR_INVALID_FILE_URL_PATH",
ERR_UNKNOWN_BUILTIN_MODULE => "ERR_UNKNOWN_BUILTIN_MODULE",
}
}
}
impl From<NodeJsErrorCode> for deno_error::PropertyValue {
fn from(value: NodeJsErrorCode) -> Self {
deno_error::PropertyValue::String(value.as_str().into())
}
}
pub trait NodeJsErrorCoded {
fn code(&self) -> NodeJsErrorCode;
}
#[derive(Debug, Clone, Error, JsError)]
#[error(
"[{}] Invalid module '{}' {}{}",
self.code(),
request,
reason,
maybe_referrer.as_ref().map(|referrer| format!(" imported from '{}'", referrer)).unwrap_or_default()
)]
#[class(type)]
#[property("code" = self.code())]
pub struct InvalidModuleSpecifierError {
pub request: String,
pub reason: Cow<'static, str>,
pub maybe_referrer: Option<String>,
}
impl NodeJsErrorCoded for InvalidModuleSpecifierError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_INVALID_MODULE_SPECIFIER
}
}
#[derive(Debug, Boxed, JsError)]
pub struct LegacyResolveError(pub Box<LegacyResolveErrorKind>);
impl LegacyResolveError {
pub fn specifier(&self) -> &UrlOrPath {
match self.as_kind() {
LegacyResolveErrorKind::TypesNotFound(err) => &err.0.code_specifier,
LegacyResolveErrorKind::ModuleNotFound(err) => &err.specifier,
}
}
}
#[derive(Debug, Error, JsError)]
pub enum LegacyResolveErrorKind {
#[class(inherit)]
#[error(transparent)]
TypesNotFound(#[from] TypesNotFoundError),
#[class(inherit)]
#[error(transparent)]
ModuleNotFound(#[from] ModuleNotFoundError),
}
impl NodeJsErrorCoded for LegacyResolveError {
fn code(&self) -> NodeJsErrorCode {
match self.as_kind() {
LegacyResolveErrorKind::TypesNotFound(e) => e.code(),
LegacyResolveErrorKind::ModuleNotFound(e) => e.code(),
}
}
}
#[derive(Debug, Error, JsError)]
#[error(
"Could not find package '{}' from referrer '{}'{}.",
package_name,
referrer,
referrer_extra.as_ref().map(|r| format!(" ({})", r)).unwrap_or_default()
)]
#[class(generic)]
#[property("code" = self.code())]
pub struct PackageNotFoundError {
pub package_name: String,
pub referrer: UrlOrPath,
/// Extra information about the referrer.
pub referrer_extra: Option<String>,
}
impl NodeJsErrorCoded for PackageNotFoundError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_MODULE_NOT_FOUND
}
}
#[derive(Debug, Error, JsError)]
#[error(
"Could not find referrer npm package '{}'{}.",
referrer,
referrer_extra.as_ref().map(|r| format!(" ({})", r)).unwrap_or_default()
)]
#[class(generic)]
#[property("code" = self.code())]
pub struct ReferrerNotFoundError {
pub referrer: UrlOrPath,
/// Extra information about the referrer.
pub referrer_extra: Option<String>,
}
impl NodeJsErrorCoded for ReferrerNotFoundError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_MODULE_NOT_FOUND
}
}
#[derive(Debug, Error, JsError)]
#[class(inherit)]
#[error("Failed resolving '{package_name}' from referrer '{referrer}'.")]
#[property("code" = self.code())]
pub struct PackageFolderResolveIoError {
pub package_name: String,
pub referrer: UrlOrPath,
#[source]
#[inherit]
pub source: std::io::Error,
}
impl NodeJsErrorCoded for PackageFolderResolveIoError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_MODULE_NOT_FOUND
}
}
impl NodeJsErrorCoded for PackageFolderResolveErrorKind {
fn code(&self) -> NodeJsErrorCode {
match self {
PackageFolderResolveErrorKind::PackageNotFound(e) => e.code(),
PackageFolderResolveErrorKind::ReferrerNotFound(e) => e.code(),
PackageFolderResolveErrorKind::Io(e) => e.code(),
PackageFolderResolveErrorKind::PathToUrl(_) => {
NodeJsErrorCode::ERR_INVALID_FILE_URL_PATH
}
}
}
}
#[derive(Debug, Boxed, JsError)]
pub struct PackageFolderResolveError(pub Box<PackageFolderResolveErrorKind>);
#[derive(Debug, Error, JsError)]
pub enum PackageFolderResolveErrorKind {
#[class(inherit)]
#[error(transparent)]
PackageNotFound(#[from] PackageNotFoundError),
#[class(inherit)]
#[error(transparent)]
ReferrerNotFound(#[from] ReferrerNotFoundError),
#[class(inherit)]
#[error(transparent)]
Io(#[from] PackageFolderResolveIoError),
#[class(inherit)]
#[error(transparent)]
#[property("code" = self.code())]
PathToUrl(#[from] deno_path_util::PathToUrlError),
}
#[derive(Debug, Boxed, JsError)]
pub struct PackageSubpathFromDenoModuleResolveError(
pub Box<PackageSubpathFromDenoModuleResolveErrorKind>,
);
impl NodeJsErrorCoded for PackageSubpathFromDenoModuleResolveError {
fn code(&self) -> NodeJsErrorCode {
match self.as_kind() {
PackageSubpathFromDenoModuleResolveErrorKind::SubPath(e) => e.code(),
PackageSubpathFromDenoModuleResolveErrorKind::FinalizeResolution(e) => {
e.code()
}
}
}
}
impl PackageSubpathFromDenoModuleResolveError {
pub fn as_types_not_found(&self) -> Option<&TypesNotFoundError> {
match self.as_kind() {
PackageSubpathFromDenoModuleResolveErrorKind::SubPath(e) => {
e.as_types_not_found()
}
PackageSubpathFromDenoModuleResolveErrorKind::FinalizeResolution(e) => {
e.as_types_not_found()
}
}
}
pub fn maybe_specifier(&self) -> Option<Cow<'_, UrlOrPath>> {
match self.as_kind() {
PackageSubpathFromDenoModuleResolveErrorKind::SubPath(e) => {
e.maybe_specifier()
}
PackageSubpathFromDenoModuleResolveErrorKind::FinalizeResolution(e) => {
e.maybe_specifier()
}
}
}
pub fn into_node_resolve_error(self) -> NodeResolveError {
match self.into_kind() {
PackageSubpathFromDenoModuleResolveErrorKind::SubPath(e) => {
NodeResolveErrorKind::PackageResolve(
PackageResolveErrorKind::SubpathResolve(e).into_box(),
)
}
PackageSubpathFromDenoModuleResolveErrorKind::FinalizeResolution(e) => {
NodeResolveErrorKind::FinalizeResolution(e)
}
}
.into_box()
}
}
#[derive(Debug, Error, JsError)]
pub enum PackageSubpathFromDenoModuleResolveErrorKind {
#[class(inherit)]
#[error(transparent)]
SubPath(#[from] PackageSubpathResolveError),
#[class(inherit)]
#[error(transparent)]
FinalizeResolution(#[from] FinalizeResolutionError),
}
#[derive(Debug, Boxed, JsError)]
pub struct PackageSubpathResolveError(pub Box<PackageSubpathResolveErrorKind>);
impl NodeJsErrorCoded for PackageSubpathResolveError {
fn code(&self) -> NodeJsErrorCode {
match self.as_kind() {
PackageSubpathResolveErrorKind::PkgJsonLoad(e) => e.code(),
PackageSubpathResolveErrorKind::Exports(e) => e.code(),
PackageSubpathResolveErrorKind::LegacyResolve(e) => e.code(),
}
}
}
impl PackageSubpathResolveError {
pub fn as_types_not_found(&self) -> Option<&TypesNotFoundError> {
self.as_kind().as_types_not_found()
}
pub fn maybe_specifier(&self) -> Option<Cow<'_, UrlOrPath>> {
match self.as_kind() {
PackageSubpathResolveErrorKind::PkgJsonLoad(_) => None,
PackageSubpathResolveErrorKind::Exports(err) => err.maybe_specifier(),
PackageSubpathResolveErrorKind::LegacyResolve(err) => {
Some(Cow::Borrowed(err.specifier()))
}
}
}
}
#[derive(Debug, Error, JsError)]
pub enum PackageSubpathResolveErrorKind {
#[class(inherit)]
#[error(transparent)]
PkgJsonLoad(#[from] PackageJsonLoadError),
#[class(inherit)]
#[error(transparent)]
Exports(PackageExportsResolveError),
#[class(inherit)]
#[error(transparent)]
LegacyResolve(LegacyResolveError),
}
impl PackageSubpathResolveErrorKind {
pub fn as_types_not_found(&self) -> Option<&TypesNotFoundError> {
match self {
PackageSubpathResolveErrorKind::PkgJsonLoad(_) => None,
PackageSubpathResolveErrorKind::Exports(err) => match err.as_kind() {
PackageExportsResolveErrorKind::PackagePathNotExported(_) => None,
PackageExportsResolveErrorKind::PackageTargetResolve(err) => {
match err.as_kind() {
PackageTargetResolveErrorKind::TypesNotFound(not_found) => {
Some(not_found)
}
PackageTargetResolveErrorKind::NotFound(_)
| PackageTargetResolveErrorKind::InvalidPackageTarget(_)
| PackageTargetResolveErrorKind::InvalidModuleSpecifier(_)
| PackageTargetResolveErrorKind::PackageResolve(_)
| PackageTargetResolveErrorKind::UnknownBuiltInNodeModule(_)
| PackageTargetResolveErrorKind::UrlToFilePath(_) => None,
}
}
},
PackageSubpathResolveErrorKind::LegacyResolve(err) => match err.as_kind()
{
LegacyResolveErrorKind::TypesNotFound(not_found) => Some(not_found),
LegacyResolveErrorKind::ModuleNotFound(_) => None,
},
}
}
}
#[derive(Debug, Error, JsError)]
#[class(generic)]
#[error(
"Target '{}' not found from '{}'{}{}.",
target,
pkg_json_path.display(),
maybe_referrer.as_ref().map(|r|
format!(
" from{} referrer {}",
match resolution_mode {
ResolutionMode::Import => "",
ResolutionMode::Require => " cjs",
},
r
)
).unwrap_or_default(),
match resolution_kind {
NodeResolutionKind::Execution => "",
NodeResolutionKind::Types => " for types",
}
)]
#[property("code" = self.code())]
pub struct PackageTargetNotFoundError {
pub pkg_json_path: PathBuf,
pub target: String,
pub maybe_resolved: Option<UrlOrPath>,
pub maybe_referrer: Option<UrlOrPath>,
pub resolution_mode: ResolutionMode,
pub resolution_kind: NodeResolutionKind,
}
impl NodeJsErrorCoded for PackageTargetNotFoundError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_MODULE_NOT_FOUND
}
}
impl NodeJsErrorCoded for PackageTargetResolveErrorKind {
fn code(&self) -> NodeJsErrorCode {
match self {
PackageTargetResolveErrorKind::NotFound(e) => e.code(),
PackageTargetResolveErrorKind::InvalidPackageTarget(e) => e.code(),
PackageTargetResolveErrorKind::InvalidModuleSpecifier(e) => e.code(),
PackageTargetResolveErrorKind::PackageResolve(e) => e.code(),
PackageTargetResolveErrorKind::TypesNotFound(e) => e.code(),
PackageTargetResolveErrorKind::UnknownBuiltInNodeModule(e) => e.code(),
PackageTargetResolveErrorKind::UrlToFilePath(_) => {
NodeJsErrorCode::ERR_INVALID_FILE_URL_PATH
}
}
}
}
#[derive(Debug, Boxed, JsError)]
pub struct PackageTargetResolveError(pub Box<PackageTargetResolveErrorKind>);
impl PackageTargetResolveError {
pub fn maybe_specifier(&self) -> Option<Cow<'_, UrlOrPath>> {
match self.as_kind() {
PackageTargetResolveErrorKind::NotFound(err) => {
err.maybe_resolved.as_ref().map(Cow::Borrowed)
}
PackageTargetResolveErrorKind::PackageResolve(err) => {
err.maybe_specifier()
}
PackageTargetResolveErrorKind::TypesNotFound(err) => {
Some(Cow::Borrowed(&err.0.code_specifier))
}
PackageTargetResolveErrorKind::UnknownBuiltInNodeModule(err) => {
err.maybe_specifier().map(|u| Cow::Owned(UrlOrPath::Url(u)))
}
PackageTargetResolveErrorKind::UrlToFilePath(e) => {
Some(Cow::Owned(UrlOrPath::Url(e.0.clone())))
}
PackageTargetResolveErrorKind::InvalidPackageTarget(_)
| PackageTargetResolveErrorKind::InvalidModuleSpecifier(_) => None,
}
}
}
#[derive(Debug, Error, JsError)]
pub enum PackageTargetResolveErrorKind {
#[class(inherit)]
#[error(transparent)]
NotFound(#[from] PackageTargetNotFoundError),
#[class(inherit)]
#[error(transparent)]
InvalidPackageTarget(#[from] InvalidPackageTargetError),
#[class(inherit)]
#[error(transparent)]
InvalidModuleSpecifier(#[from] InvalidModuleSpecifierError),
#[class(inherit)]
#[error(transparent)]
PackageResolve(#[from] PackageResolveError),
#[class(inherit)]
#[error(transparent)]
TypesNotFound(#[from] TypesNotFoundError),
#[class(inherit)]
#[error(transparent)]
UnknownBuiltInNodeModule(#[from] UnknownBuiltInNodeModuleError),
#[class(inherit)]
#[error(transparent)]
#[property("code" = self.code())]
UrlToFilePath(#[from] deno_path_util::UrlToFilePathError),
}
impl PackageTargetResolveErrorKind {
pub fn as_types_not_found(&self) -> Option<&TypesNotFoundError> {
match self {
Self::TypesNotFound(not_found) => Some(not_found),
_ => None,
}
}
}
impl NodeJsErrorCoded for PackageExportsResolveErrorKind {
fn code(&self) -> NodeJsErrorCode {
match self {
PackageExportsResolveErrorKind::PackagePathNotExported(e) => e.code(),
PackageExportsResolveErrorKind::PackageTargetResolve(e) => e.code(),
}
}
}
#[derive(Debug, Boxed, JsError)]
pub struct PackageExportsResolveError(pub Box<PackageExportsResolveErrorKind>);
impl PackageExportsResolveError {
pub fn maybe_specifier(&self) -> Option<Cow<'_, UrlOrPath>> {
match self.as_kind() {
PackageExportsResolveErrorKind::PackagePathNotExported(_) => None,
PackageExportsResolveErrorKind::PackageTargetResolve(err) => {
err.maybe_specifier()
}
}
}
}
#[derive(Debug, Error, JsError)]
pub enum PackageExportsResolveErrorKind {
#[class(inherit)]
#[error(transparent)]
PackagePathNotExported(#[from] PackagePathNotExportedError),
#[class(inherit)]
#[error(transparent)]
PackageTargetResolve(#[from] PackageTargetResolveError),
}
#[derive(Debug, Error, JsError)]
#[error(
"[{}] Could not find types for '{}'{}",
self.code(),
self.0.code_specifier,
self.0.maybe_referrer.as_ref().map(|r| format!(" imported from '{}'", r)).unwrap_or_default(),
)]
#[class(generic)]
#[property("code" = self.code())]
pub struct TypesNotFoundError(pub Box<TypesNotFoundErrorData>);
#[derive(Debug)]
pub struct TypesNotFoundErrorData {
pub code_specifier: UrlOrPath,
pub maybe_referrer: Option<UrlOrPath>,
}
impl NodeJsErrorCoded for TypesNotFoundError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_TYPES_NOT_FOUND
}
}
#[derive(Debug, Error, JsError)]
#[class(inherit)]
#[error("[{}] Invalid package config. {}", self.code(), .0)]
pub struct PackageJsonLoadError(pub deno_package_json::PackageJsonLoadError);
impl NodeJsErrorCoded for PackageJsonLoadError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_INVALID_PACKAGE_CONFIG
}
}
#[derive(Debug, Error, JsError)]
#[class(type)]
#[error(
"[{}] Package import specifier \"{}\" is not defined{}{}",
self.code(),
name,
package_json_path.as_ref().map(|p| format!(" in package {}", p.display())).unwrap_or_default(),
maybe_referrer.as_ref().map(|r| format!(" imported from '{}'", r)).unwrap_or_default(),
)]
#[property("code" = self.code())]
pub struct PackageImportNotDefinedError {
pub name: String,
pub package_json_path: Option<PathBuf>,
pub maybe_referrer: Option<UrlOrPath>,
}
impl NodeJsErrorCoded for PackageImportNotDefinedError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_PACKAGE_IMPORT_NOT_DEFINED
}
}
#[derive(Debug, Boxed, JsError)]
pub struct PackageImportsResolveError(pub Box<PackageImportsResolveErrorKind>);
impl PackageImportsResolveError {
pub fn maybe_specifier(&self) -> Option<Cow<'_, UrlOrPath>> {
match self.as_kind() {
PackageImportsResolveErrorKind::Target(err) => err.maybe_specifier(),
PackageImportsResolveErrorKind::PkgJsonLoad(_)
| PackageImportsResolveErrorKind::InvalidModuleSpecifier(_)
| PackageImportsResolveErrorKind::NotDefined(_) => None,
}
}
}
#[derive(Debug, Error, JsError)]
pub enum PackageImportsResolveErrorKind {
#[class(inherit)]
#[error(transparent)]
PkgJsonLoad(PackageJsonLoadError),
#[class(inherit)]
#[error(transparent)]
InvalidModuleSpecifier(#[from] InvalidModuleSpecifierError),
#[class(inherit)]
#[error(transparent)]
NotDefined(#[from] PackageImportNotDefinedError),
#[class(inherit)]
#[error(transparent)]
Target(#[from] PackageTargetResolveError),
}
impl PackageImportsResolveErrorKind {
pub fn as_types_not_found(&self) -> Option<&TypesNotFoundError> {
match self {
Self::Target(err) => err.as_types_not_found(),
_ => None,
}
}
}
impl NodeJsErrorCoded for PackageImportsResolveErrorKind {
fn code(&self) -> NodeJsErrorCode {
match self {
Self::PkgJsonLoad(e) => e.code(),
Self::InvalidModuleSpecifier(e) => e.code(),
Self::NotDefined(e) => e.code(),
Self::Target(e) => e.code(),
}
}
}
impl NodeJsErrorCoded for PackageResolveErrorKind {
fn code(&self) -> NodeJsErrorCode {
match self {
PackageResolveErrorKind::PkgJsonLoad(e) => e.code(),
PackageResolveErrorKind::InvalidModuleSpecifier(e) => e.code(),
PackageResolveErrorKind::PackageFolderResolve(e) => e.code(),
PackageResolveErrorKind::ExportsResolve(e) => e.code(),
PackageResolveErrorKind::SubpathResolve(e) => e.code(),
PackageResolveErrorKind::UrlToFilePath(_) => {
NodeJsErrorCode::ERR_INVALID_FILE_URL_PATH
}
}
}
}
#[derive(Debug, Boxed, JsError)]
pub struct PackageResolveError(pub Box<PackageResolveErrorKind>);
impl PackageResolveError {
pub fn maybe_specifier(&self) -> Option<Cow<'_, UrlOrPath>> {
match self.as_kind() {
PackageResolveErrorKind::ExportsResolve(err) => err.maybe_specifier(),
PackageResolveErrorKind::SubpathResolve(err) => err.maybe_specifier(),
PackageResolveErrorKind::UrlToFilePath(err) => {
Some(Cow::Owned(UrlOrPath::Url(err.0.clone())))
}
PackageResolveErrorKind::PkgJsonLoad(_)
| PackageResolveErrorKind::InvalidModuleSpecifier(_)
| PackageResolveErrorKind::PackageFolderResolve(_) => None,
}
}
}
#[derive(Debug, Error, JsError)]
pub enum PackageResolveErrorKind {
#[class(inherit)]
#[error(transparent)]
PkgJsonLoad(#[from] PackageJsonLoadError),
#[class(inherit)]
#[error(transparent)]
InvalidModuleSpecifier(#[from] InvalidModuleSpecifierError),
#[class(inherit)]
#[error(transparent)]
PackageFolderResolve(#[from] PackageFolderResolveError),
#[class(inherit)]
#[error(transparent)]
ExportsResolve(#[from] PackageExportsResolveError),
#[class(inherit)]
#[error(transparent)]
SubpathResolve(#[from] PackageSubpathResolveError),
#[class(inherit)]
#[error(transparent)]
#[property("code" = self.code())]
UrlToFilePath(#[from] UrlToFilePathError),
}
impl PackageResolveErrorKind {
pub fn as_types_not_found(&self) -> Option<&TypesNotFoundError> {
match self {
PackageResolveErrorKind::PkgJsonLoad(_)
| PackageResolveErrorKind::InvalidModuleSpecifier(_)
| PackageResolveErrorKind::PackageFolderResolve(_)
| PackageResolveErrorKind::ExportsResolve(_)
| PackageResolveErrorKind::UrlToFilePath(_) => None,
PackageResolveErrorKind::SubpathResolve(err) => err.as_types_not_found(),
}
}
}
#[derive(Debug, Error, JsError)]
#[class(generic)]
#[error("Failed joining '{path}' from '{base}'.")]
pub struct NodeResolveRelativeJoinError {
pub path: String,
pub base: Url,
#[source]
pub source: url::ParseError,
}
#[derive(Debug, Error, JsError)]
#[class(generic)]
#[error("Failed resolving specifier from data url referrer.")]
pub struct DataUrlReferrerError {
#[source]
pub source: url::ParseError,
}
#[derive(Debug, Boxed, JsError)]
pub struct NodeResolveError(pub Box<NodeResolveErrorKind>);
impl NodeResolveError {
pub fn maybe_specifier(&self) -> Option<Cow<'_, UrlOrPath>> {
match self.as_kind() {
NodeResolveErrorKind::PathToUrl(err) => {
Some(Cow::Owned(UrlOrPath::Path(err.0.clone())))
}
NodeResolveErrorKind::UrlToFilePath(err) => {
Some(Cow::Owned(UrlOrPath::Url(err.0.clone())))
}
NodeResolveErrorKind::PackageImportsResolve(err) => err.maybe_specifier(),
NodeResolveErrorKind::PackageResolve(err) => err.maybe_specifier(),
NodeResolveErrorKind::TypesNotFound(err) => {
Some(Cow::Borrowed(&err.0.code_specifier))
}
NodeResolveErrorKind::UnknownBuiltInNodeModule(err) => {
err.maybe_specifier().map(|u| Cow::Owned(UrlOrPath::Url(u)))
}
NodeResolveErrorKind::FinalizeResolution(err) => err.maybe_specifier(),
NodeResolveErrorKind::UnsupportedEsmUrlScheme(_)
| NodeResolveErrorKind::DataUrlReferrer(_)
| NodeResolveErrorKind::RelativeJoin(_) => None,
}
}
}
#[derive(Debug, Error, JsError)]
pub enum NodeResolveErrorKind {
#[class(inherit)]
#[error(transparent)]
RelativeJoin(#[from] NodeResolveRelativeJoinError),
#[class(inherit)]
#[error(transparent)]
PathToUrl(#[from] deno_path_util::PathToUrlError),
#[class(inherit)]
#[error(transparent)]
UrlToFilePath(#[from] deno_path_util::UrlToFilePathError),
#[class(inherit)]
#[error(transparent)]
PackageImportsResolve(#[from] PackageImportsResolveError),
#[class(inherit)]
#[error(transparent)]
UnsupportedEsmUrlScheme(#[from] UnsupportedEsmUrlSchemeError),
#[class(inherit)]
#[error(transparent)]
DataUrlReferrer(#[from] DataUrlReferrerError),
#[class(inherit)]
#[error(transparent)]
PackageResolve(#[from] PackageResolveError),
#[class(inherit)]
#[error(transparent)]
TypesNotFound(#[from] TypesNotFoundError),
#[class(inherit)]
#[error(transparent)]
UnknownBuiltInNodeModule(#[from] UnknownBuiltInNodeModuleError),
#[class(inherit)]
#[error(transparent)]
FinalizeResolution(#[from] FinalizeResolutionError),
}
impl NodeResolveErrorKind {
pub fn as_types_not_found(&self) -> Option<&TypesNotFoundError> {
match self {
NodeResolveErrorKind::TypesNotFound(not_found) => Some(not_found),
NodeResolveErrorKind::PackageImportsResolve(err) => {
err.as_kind().as_types_not_found()
}
NodeResolveErrorKind::PackageResolve(package_resolve_error) => {
package_resolve_error.as_types_not_found()
}
NodeResolveErrorKind::UnsupportedEsmUrlScheme(_)
| NodeResolveErrorKind::DataUrlReferrer(_)
| NodeResolveErrorKind::FinalizeResolution(_)
| NodeResolveErrorKind::RelativeJoin(_)
| NodeResolveErrorKind::PathToUrl(_)
| NodeResolveErrorKind::UnknownBuiltInNodeModule(_)
| NodeResolveErrorKind::UrlToFilePath(_) => None,
}
}
pub fn maybe_code(&self) -> Option<NodeJsErrorCode> {
match self {
NodeResolveErrorKind::RelativeJoin(_) => None,
NodeResolveErrorKind::PathToUrl(_) => None,
NodeResolveErrorKind::UrlToFilePath(_) => None,
NodeResolveErrorKind::PackageImportsResolve(e) => Some(e.code()),
NodeResolveErrorKind::UnsupportedEsmUrlScheme(e) => Some(e.code()),
NodeResolveErrorKind::DataUrlReferrer(_) => None,
NodeResolveErrorKind::PackageResolve(e) => Some(e.code()),
NodeResolveErrorKind::TypesNotFound(e) => Some(e.code()),
NodeResolveErrorKind::UnknownBuiltInNodeModule(e) => Some(e.code()),
NodeResolveErrorKind::FinalizeResolution(e) => Some(e.code()),
}
}
}
#[derive(Debug, Boxed, JsError)]
pub struct FinalizeResolutionError(pub Box<FinalizeResolutionErrorKind>);
impl FinalizeResolutionError {
pub fn maybe_specifier(&self) -> Option<Cow<'_, UrlOrPath>> {
match self.as_kind() {
FinalizeResolutionErrorKind::ModuleNotFound(err) => {
Some(Cow::Borrowed(&err.specifier))
}
FinalizeResolutionErrorKind::TypesNotFound(err) => {
Some(Cow::Borrowed(&err.0.code_specifier))
}
FinalizeResolutionErrorKind::UnsupportedDirImport(err) => {
Some(Cow::Borrowed(&err.dir_url))
}
FinalizeResolutionErrorKind::PackageSubpathResolve(err) => {
err.maybe_specifier()
}
FinalizeResolutionErrorKind::InvalidModuleSpecifierError(_)
| FinalizeResolutionErrorKind::UrlToFilePath(_) => None,
}
}
pub fn as_types_not_found(&self) -> Option<&TypesNotFoundError> {
match self.as_kind() {
FinalizeResolutionErrorKind::TypesNotFound(err) => Some(err),
FinalizeResolutionErrorKind::PackageSubpathResolve(err) => {
err.as_types_not_found()
}
FinalizeResolutionErrorKind::ModuleNotFound(_)
| FinalizeResolutionErrorKind::UnsupportedDirImport(_)
| FinalizeResolutionErrorKind::InvalidModuleSpecifierError(_)
| FinalizeResolutionErrorKind::UrlToFilePath(_) => None,
}
}
}
#[derive(Debug, Error, JsError)]
pub enum FinalizeResolutionErrorKind {
#[class(inherit)]
#[error(transparent)]
InvalidModuleSpecifierError(#[from] InvalidModuleSpecifierError),
#[class(inherit)]
#[error(transparent)]
ModuleNotFound(#[from] ModuleNotFoundError),
#[class(inherit)]
#[error(transparent)]
TypesNotFound(#[from] TypesNotFoundError),
#[class(inherit)]
#[error(transparent)]
PackageSubpathResolve(#[from] PackageSubpathResolveError),
#[class(inherit)]
#[error(transparent)]
UnsupportedDirImport(#[from] UnsupportedDirImportError),
#[class(inherit)]
#[error(transparent)]
#[property("code" = self.code())]
UrlToFilePath(#[from] deno_path_util::UrlToFilePathError),
}
impl NodeJsErrorCoded for FinalizeResolutionErrorKind {
fn code(&self) -> NodeJsErrorCode {
match self {
FinalizeResolutionErrorKind::InvalidModuleSpecifierError(e) => e.code(),
FinalizeResolutionErrorKind::ModuleNotFound(e) => e.code(),
FinalizeResolutionErrorKind::TypesNotFound(e) => e.code(),
FinalizeResolutionErrorKind::PackageSubpathResolve(e) => e.code(),
FinalizeResolutionErrorKind::UnsupportedDirImport(e) => e.code(),
FinalizeResolutionErrorKind::UrlToFilePath(_) => {
NodeJsErrorCode::ERR_INVALID_FILE_URL_PATH
}
}
}
}
#[derive(Debug, Error, JsError)]
#[class(generic)]
#[error(
"[{}] Cannot find module '{}'{}{}",
self.code(),
specifier,
maybe_referrer.as_ref().map(|referrer| format!(" imported from '{}'", referrer)).unwrap_or_default(),
suggested_ext.as_ref().map(|m| format!("\nDid you mean to import with the \".{}\" extension?", m)).unwrap_or_default()
)]
#[property("code" = self.code())]
pub struct ModuleNotFoundError {
pub specifier: UrlOrPath,
pub maybe_referrer: Option<UrlOrPath>,
pub suggested_ext: Option<&'static str>,
}
impl NodeJsErrorCoded for ModuleNotFoundError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_MODULE_NOT_FOUND
}
}
#[derive(Debug, Error, JsError)]
#[class(generic)]
#[error(
"[{}] Directory import '{}' is not supported resolving ES modules{}{}",
self.code(),
dir_url,
maybe_referrer.as_ref().map(|referrer| format!(" imported from '{}'", referrer)).unwrap_or_default(),
suggestion.as_ref().map(|suggestion| format!("\nDid you mean to import '{suggestion}'?")).unwrap_or_default(),
)]
#[property("code" = self.code())]
pub struct UnsupportedDirImportError {
pub dir_url: UrlOrPath,
pub maybe_referrer: Option<UrlOrPath>,
pub suggestion: Option<String>,
}
impl NodeJsErrorCoded for UnsupportedDirImportError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_UNSUPPORTED_DIR_IMPORT
}
}
#[derive(Debug, JsError)]
#[class(generic)]
#[property("code" = self.code())]
pub struct InvalidPackageTargetError {
pub pkg_json_path: PathBuf,
pub sub_path: String,
pub target: String,
pub is_import: bool,
pub maybe_referrer: Option<UrlOrPath>,
}
impl std::error::Error for InvalidPackageTargetError {}
impl std::fmt::Display for InvalidPackageTargetError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let rel_error = !self.is_import
&& !self.target.is_empty()
&& !self.target.starts_with("./");
f.write_char('[')?;
f.write_str(self.code().as_str())?;
f.write_char(']')?;
if self.sub_path == "." {
assert!(!self.is_import);
write!(
f,
" Invalid \"exports\" main target {} defined in the package config {}",
self.target,
self.pkg_json_path.display()
)?;
} else {
let ie = if self.is_import { "imports" } else { "exports" };
write!(
f,
" Invalid \"{}\" target {} defined for '{}' in the package config {}",
ie,
self.target,
self.sub_path,
self.pkg_json_path.display()
)?;
};
if let Some(referrer) = &self.maybe_referrer {
write!(f, " imported from '{}'", referrer)?;
}
if rel_error {
write!(f, "; target must start with \"./\"")?;
}
Ok(())
}
}
impl NodeJsErrorCoded for InvalidPackageTargetError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_INVALID_PACKAGE_TARGET
}
}
#[derive(Debug, JsError)]
#[class(generic)]
#[property("code" = self.code())]
pub struct PackagePathNotExportedError {
pub pkg_json_path: PathBuf,
pub subpath: String,
pub maybe_referrer: Option<UrlOrPath>,
pub resolution_kind: NodeResolutionKind,
}
impl NodeJsErrorCoded for PackagePathNotExportedError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_PACKAGE_PATH_NOT_EXPORTED
}
}
impl std::error::Error for PackagePathNotExportedError {}
impl std::fmt::Display for PackagePathNotExportedError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_char('[')?;
f.write_str(self.code().as_str())?;
f.write_char(']')?;
let types_msg = match self.resolution_kind {
NodeResolutionKind::Execution => String::new(),
NodeResolutionKind::Types => " for types".to_string(),
};
if self.subpath == "." {
write!(
f,
" No \"exports\" main defined{} in '{}'",
types_msg,
self.pkg_json_path.display()
)?;
} else {
write!(
f,
" Package subpath '{}' is not defined{} by \"exports\" in '{}'",
self.subpath,
types_msg,
self.pkg_json_path.display()
)?;
};
if let Some(referrer) = &self.maybe_referrer {
write!(f, " imported from '{}'", referrer)?;
}
Ok(())
}
}
#[derive(Debug, Clone, Error, JsError)]
#[class(type)]
#[error(
"[{}] Only file and data URLs are supported by the default ESM loader.{} Received protocol '{}'",
self.code(),
if cfg!(windows) && url_scheme.len() == 2 { " On Windows, absolute path must be valid file:// URLS."} else { "" },
url_scheme
)]
#[property("code" = self.code())]
pub struct UnsupportedEsmUrlSchemeError {
pub url_scheme: String,
}
impl NodeJsErrorCoded for UnsupportedEsmUrlSchemeError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_UNSUPPORTED_ESM_URL_SCHEME
}
}
#[derive(Debug, Error, JsError)]
#[class(generic)]
#[error("Failed resolving binary export. '{}' did not exist", pkg_json_path.display())]
pub struct MissingPkgJsonError {
pub pkg_json_path: PathBuf,
}
#[derive(Debug, Error, JsError)]
pub enum ResolvePkgJsonBinExportError {
#[class(inherit)]
#[error(transparent)]
ResolvePkgNpmBinaryCommands(#[from] ResolvePkgNpmBinaryCommandsError),
#[class(generic)]
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/node_resolver/analyze.rs | libs/node_resolver/analyze.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::BTreeSet;
use std::collections::HashSet;
use std::path::Path;
use std::path::PathBuf;
use deno_error::JsErrorBox;
use deno_path_util::url_to_file_path;
use futures::FutureExt;
use futures::StreamExt;
use futures::future::LocalBoxFuture;
use futures::stream::FuturesUnordered;
use once_cell::sync::Lazy;
use serde::Deserialize;
use serde::Serialize;
use url::Url;
use crate::InNpmPackageChecker;
use crate::IsBuiltInNodeModuleChecker;
use crate::NodeResolutionKind;
use crate::NodeResolverSys;
use crate::NpmPackageFolderResolver;
use crate::PackageJsonResolverRc;
use crate::PathClean;
use crate::ResolutionMode;
use crate::UrlOrPath;
use crate::UrlOrPathRef;
use crate::errors::ModuleNotFoundError;
use crate::resolution::NodeResolverRc;
use crate::resolution::parse_npm_pkg_name;
#[derive(Debug, Clone)]
pub enum CjsAnalysis<'a> {
/// File was found to be an ES module and the translator should
/// load the code as ESM.
Esm(Cow<'a, str>, Option<CjsAnalysisExports>),
Cjs(CjsAnalysisExports),
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CjsAnalysisExports {
pub exports: Vec<String>,
pub reexports: Vec<String>,
}
/// What parts of an ES module should be analyzed.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum EsmAnalysisMode {
SourceOnly,
SourceImportsAndExports,
}
/// Code analyzer for CJS and ESM files.
#[async_trait::async_trait(?Send)]
pub trait CjsCodeAnalyzer {
/// Analyzes CommonJs code for exports and reexports, which is
/// then used to determine the wrapper ESM module exports.
///
/// Note that the source is provided by the caller when the caller
/// already has it. If the source is needed by the implementation,
/// then it can use the provided source, or otherwise load it if
/// necessary.
async fn analyze_cjs<'a>(
&self,
specifier: &Url,
maybe_source: Option<Cow<'a, str>>,
esm_analysis_mode: EsmAnalysisMode,
) -> Result<CjsAnalysis<'a>, JsErrorBox>;
}
pub enum ResolvedCjsAnalysis<'a> {
Esm(Cow<'a, str>),
Cjs(BTreeSet<String>),
}
#[sys_traits::auto_impl]
pub trait CjsModuleExportAnalyzerSys: NodeResolverSys {}
#[allow(clippy::disallowed_types)]
pub type CjsModuleExportAnalyzerRc<
TCjsCodeAnalyzer,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
> = deno_maybe_sync::MaybeArc<
CjsModuleExportAnalyzer<
TCjsCodeAnalyzer,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
>;
pub struct CjsModuleExportAnalyzer<
TCjsCodeAnalyzer: CjsCodeAnalyzer,
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: CjsModuleExportAnalyzerSys,
> {
cjs_code_analyzer: TCjsCodeAnalyzer,
in_npm_pkg_checker: TInNpmPackageChecker,
node_resolver: NodeResolverRc<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
npm_resolver: TNpmPackageFolderResolver,
pkg_json_resolver: PackageJsonResolverRc<TSys>,
sys: TSys,
}
impl<
TCjsCodeAnalyzer: CjsCodeAnalyzer,
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: CjsModuleExportAnalyzerSys,
>
CjsModuleExportAnalyzer<
TCjsCodeAnalyzer,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>
{
pub fn new(
cjs_code_analyzer: TCjsCodeAnalyzer,
in_npm_pkg_checker: TInNpmPackageChecker,
node_resolver: NodeResolverRc<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
npm_resolver: TNpmPackageFolderResolver,
pkg_json_resolver: PackageJsonResolverRc<TSys>,
sys: TSys,
) -> Self {
Self {
cjs_code_analyzer,
in_npm_pkg_checker,
node_resolver,
npm_resolver,
pkg_json_resolver,
sys,
}
}
pub async fn analyze_all_exports<'a>(
&self,
entry_specifier: &Url,
source: Option<Cow<'a, str>>,
) -> Result<ResolvedCjsAnalysis<'a>, TranslateCjsToEsmError> {
let analysis = self
.cjs_code_analyzer
.analyze_cjs(entry_specifier, source, EsmAnalysisMode::SourceOnly)
.await
.map_err(TranslateCjsToEsmError::CjsCodeAnalysis)?;
let analysis = match analysis {
CjsAnalysis::Esm(source, _) => {
return Ok(ResolvedCjsAnalysis::Esm(source));
}
CjsAnalysis::Cjs(analysis) => analysis,
};
// use a BTreeSet to make the output deterministic for v8's code cache
let mut all_exports = analysis.exports.into_iter().collect::<BTreeSet<_>>();
if !analysis.reexports.is_empty() {
let mut errors = Vec::new();
self
.analyze_reexports(
entry_specifier,
analysis.reexports,
&mut all_exports,
&mut errors,
)
.await;
// surface errors afterwards in a deterministic way
if !errors.is_empty() {
errors.sort_by_cached_key(|e| e.to_string());
return Err(TranslateCjsToEsmError::ExportAnalysis(errors.remove(0)));
}
}
Ok(ResolvedCjsAnalysis::Cjs(all_exports))
}
#[allow(clippy::needless_lifetimes)]
async fn analyze_reexports<'a>(
&'a self,
entry_specifier: &url::Url,
reexports: Vec<String>,
all_exports: &mut BTreeSet<String>,
// this goes through the modules concurrently, so collect
// the errors in order to be deterministic
errors: &mut Vec<JsErrorBox>,
) {
struct Analysis {
reexport_specifier: url::Url,
analysis: CjsAnalysis<'static>,
}
type AnalysisFuture<'a> = LocalBoxFuture<'a, Result<Analysis, JsErrorBox>>;
let mut handled_reexports: HashSet<Url> = HashSet::default();
handled_reexports.insert(entry_specifier.clone());
let mut analyze_futures: FuturesUnordered<AnalysisFuture<'a>> =
FuturesUnordered::new();
let cjs_code_analyzer = &self.cjs_code_analyzer;
let mut handle_reexports =
|referrer: url::Url,
reexports: Vec<String>,
analyze_futures: &mut FuturesUnordered<AnalysisFuture<'a>>,
errors: &mut Vec<JsErrorBox>| {
// 1. Resolve the re-exports and start a future to analyze each one
for reexport in reexports {
let result = self
.resolve(
&reexport,
&referrer,
// FIXME(bartlomieju): check if these conditions are okay, probably
// should be `deno-require`, because `deno` is already used in `esm_resolver.rs`
&[
Cow::Borrowed("deno"),
Cow::Borrowed("node"),
Cow::Borrowed("require"),
Cow::Borrowed("default"),
],
NodeResolutionKind::Execution,
)
.and_then(|value| {
value
.map(|url_or_path| url_or_path.into_url())
.transpose()
.map_err(JsErrorBox::from_err)
});
let reexport_specifier = match result {
Ok(Some(specifier)) => specifier,
Ok(None) => continue,
Err(err) => {
errors.push(err);
continue;
}
};
if !handled_reexports.insert(reexport_specifier.clone()) {
continue;
}
let referrer = referrer.clone();
let future = async move {
let analysis = cjs_code_analyzer
.analyze_cjs(
&reexport_specifier,
None,
EsmAnalysisMode::SourceImportsAndExports,
)
.await
.map_err(|source| {
JsErrorBox::from_err(CjsAnalysisCouldNotLoadError {
reexport,
reexport_specifier: reexport_specifier.clone(),
referrer: referrer.clone(),
source,
})
})?;
Ok(Analysis {
reexport_specifier,
analysis,
})
}
.boxed_local();
analyze_futures.push(future);
}
};
handle_reexports(
entry_specifier.clone(),
reexports,
&mut analyze_futures,
errors,
);
while let Some(analysis_result) = analyze_futures.next().await {
// 2. Look at the analysis result and resolve its exports and re-exports
let Analysis {
reexport_specifier,
analysis,
} = match analysis_result {
Ok(analysis) => analysis,
Err(err) => {
errors.push(err);
continue;
}
};
match analysis {
CjsAnalysis::Cjs(analysis) | CjsAnalysis::Esm(_, Some(analysis)) => {
if !analysis.reexports.is_empty() {
handle_reexports(
reexport_specifier.clone(),
analysis.reexports,
&mut analyze_futures,
errors,
);
}
all_exports.extend(
analysis
.exports
.into_iter()
.filter(|e| e.as_str() != "default"),
);
}
CjsAnalysis::Esm(_, None) => {
// should not hit this due to EsmAnalysisMode::SourceImportsAndExports
debug_assert!(false);
}
}
}
}
// todo(dsherret): what is going on here? Isn't this a bunch of duplicate code?
fn resolve(
&self,
specifier: &str,
referrer: &Url,
conditions: &[Cow<'static, str>],
resolution_kind: NodeResolutionKind,
) -> Result<Option<UrlOrPath>, JsErrorBox> {
if specifier.starts_with('/') {
todo!();
}
let referrer = UrlOrPathRef::from_url(referrer);
let referrer_path = referrer.path().unwrap();
if specifier.starts_with("./") || specifier.starts_with("../") {
if let Some(parent) = referrer_path.parent() {
return self
.file_extension_probe(parent.join(specifier), referrer_path)
.map(|p| Some(UrlOrPath::Path(p)));
} else {
todo!();
}
}
// We've got a bare specifier or maybe bare_specifier/blah.js"
let (package_specifier, package_subpath, _is_scoped) =
parse_npm_pkg_name(specifier, &referrer).map_err(JsErrorBox::from_err)?;
let module_dir = match self
.npm_resolver
.resolve_package_folder_from_package(package_specifier, &referrer)
{
Err(err)
if matches!(
err.as_kind(),
crate::errors::PackageFolderResolveErrorKind::PackageNotFound(..)
) =>
{
return Ok(None);
}
other => other.map_err(JsErrorBox::from_err)?,
};
let package_json_path = module_dir.join("package.json");
let maybe_package_json = self
.pkg_json_resolver
.load_package_json(&package_json_path)
.map_err(JsErrorBox::from_err)?;
if let Some(package_json) = maybe_package_json {
if let Some(exports) = &package_json.exports {
return Some(
self
.node_resolver
.package_exports_resolve(
&package_json_path,
&package_subpath,
exports,
Some(&referrer),
ResolutionMode::Require,
conditions,
resolution_kind,
)
.map_err(JsErrorBox::from_err),
)
.transpose();
}
// old school
if package_subpath != "." {
let d = module_dir.join(package_subpath.as_ref());
if self.sys.fs_is_dir_no_err(&d) {
// subdir might have a package.json that specifies the entrypoint
let package_json_path = d.join("package.json");
let maybe_package_json = self
.pkg_json_resolver
.load_package_json(&package_json_path)
.map_err(JsErrorBox::from_err)?;
if let Some(package_json) = maybe_package_json
&& let Some(main) =
self.node_resolver.legacy_fallback_resolve(&package_json)
{
return Ok(Some(UrlOrPath::Path(d.join(main).clean())));
}
return Ok(Some(UrlOrPath::Path(d.join("index.js").clean())));
}
return self
.file_extension_probe(d, referrer_path)
.map(|p| Some(UrlOrPath::Path(p)));
} else if let Some(main) =
self.node_resolver.legacy_fallback_resolve(&package_json)
{
return Ok(Some(UrlOrPath::Path(module_dir.join(main).clean())));
} else {
return Ok(Some(UrlOrPath::Path(module_dir.join("index.js").clean())));
}
}
// as a fallback, attempt to resolve it via the ancestor directories
let mut last = referrer_path;
while let Some(parent) = last.parent() {
if !self.in_npm_pkg_checker.in_npm_package_at_dir_path(parent) {
break;
}
let path = if parent.ends_with("node_modules") {
parent.join(specifier)
} else {
parent.join("node_modules").join(specifier)
};
if let Ok(path) = self.file_extension_probe(path, referrer_path) {
return Ok(Some(UrlOrPath::Path(path)));
}
last = parent;
}
Err(JsErrorBox::from_err(ModuleNotFoundError {
specifier: UrlOrPath::Path(PathBuf::from(specifier)),
maybe_referrer: Some(UrlOrPath::Path(referrer_path.to_path_buf())),
suggested_ext: None,
}))
}
fn file_extension_probe(
&self,
p: PathBuf,
referrer: &Path,
) -> Result<PathBuf, JsErrorBox> {
let p = p.clean();
if self.sys.fs_exists_no_err(&p) {
let file_name = p.file_name().unwrap();
let p_js =
p.with_file_name(format!("{}.js", file_name.to_str().unwrap()));
if self.sys.fs_is_file_no_err(&p_js) {
return Ok(p_js);
} else if self.sys.fs_is_dir_no_err(&p) {
return Ok(p.join("index.js"));
} else {
return Ok(p);
}
} else if let Some(file_name) = p.file_name() {
{
let p_js =
p.with_file_name(format!("{}.js", file_name.to_str().unwrap()));
if self.sys.fs_is_file_no_err(&p_js) {
return Ok(p_js);
}
}
{
let p_json =
p.with_file_name(format!("{}.json", file_name.to_str().unwrap()));
if self.sys.fs_is_file_no_err(&p_json) {
return Ok(p_json);
}
}
}
Err(JsErrorBox::from_err(ModuleNotFoundError {
specifier: UrlOrPath::Path(p),
maybe_referrer: Some(UrlOrPath::Path(referrer.to_path_buf())),
suggested_ext: None,
}))
}
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum TranslateCjsToEsmError {
#[class(inherit)]
#[error(transparent)]
CjsCodeAnalysis(JsErrorBox),
#[class(inherit)]
#[error(transparent)]
ExportAnalysis(JsErrorBox),
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
#[class(generic)]
#[error(
"Could not load '{reexport}' ({reexport_specifier}) referenced from {referrer}"
)]
pub struct CjsAnalysisCouldNotLoadError {
reexport: String,
reexport_specifier: Url,
referrer: Url,
#[source]
source: JsErrorBox,
}
#[sys_traits::auto_impl]
pub trait NodeCodeTranslatorSys: CjsModuleExportAnalyzerSys {}
#[allow(clippy::disallowed_types)]
pub type NodeCodeTranslatorRc<
TCjsCodeAnalyzer,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
> = deno_maybe_sync::MaybeArc<
NodeCodeTranslator<
TCjsCodeAnalyzer,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
>;
pub struct NodeCodeTranslator<
TCjsCodeAnalyzer: CjsCodeAnalyzer,
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: NodeCodeTranslatorSys,
> {
module_export_analyzer: CjsModuleExportAnalyzerRc<
TCjsCodeAnalyzer,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
mode: NodeCodeTranslatorMode,
}
#[derive(Debug, Default, Clone, Copy)]
pub enum NodeCodeTranslatorMode {
Disabled,
#[default]
ModuleLoader,
}
impl<
TCjsCodeAnalyzer: CjsCodeAnalyzer,
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: NodeCodeTranslatorSys,
>
NodeCodeTranslator<
TCjsCodeAnalyzer,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>
{
pub fn new(
module_export_analyzer: CjsModuleExportAnalyzerRc<
TCjsCodeAnalyzer,
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
mode: NodeCodeTranslatorMode,
) -> Self {
Self {
module_export_analyzer,
mode,
}
}
/// Translates given CJS module into ESM. This function will perform static
/// analysis on the file to find defined exports and reexports.
///
/// For all discovered reexports the analysis will be performed recursively.
///
/// If successful a source code for equivalent ES module is returned.
pub async fn translate_cjs_to_esm<'a>(
&self,
entry_specifier: &Url,
source: Option<Cow<'a, str>>,
) -> Result<Cow<'a, str>, TranslateCjsToEsmError> {
let all_exports = if matches!(self.mode, NodeCodeTranslatorMode::Disabled) {
return Ok(source.unwrap());
} else {
let analysis = self
.module_export_analyzer
.analyze_all_exports(entry_specifier, source)
.await?;
match analysis {
ResolvedCjsAnalysis::Esm(source) => return Ok(source),
ResolvedCjsAnalysis::Cjs(all_exports) => all_exports,
}
};
Ok(Cow::Owned(exports_to_wrapper_module(
entry_specifier,
&all_exports,
)))
}
}
static RESERVED_WORDS: Lazy<HashSet<&str>> = Lazy::new(|| {
HashSet::from([
"abstract",
"arguments",
"async",
"await",
"boolean",
"break",
"byte",
"case",
"catch",
"char",
"class",
"const",
"continue",
"debugger",
"default",
"delete",
"do",
"double",
"else",
"enum",
"eval",
"export",
"extends",
"false",
"final",
"finally",
"float",
"for",
"function",
"get",
"goto",
"if",
"implements",
"import",
"in",
"instanceof",
"int",
"interface",
"let",
"long",
"mod",
"native",
"new",
"null",
"package",
"private",
"protected",
"public",
"return",
"set",
"short",
"static",
"super",
"switch",
"synchronized",
"this",
"throw",
"throws",
"transient",
"true",
"try",
"typeof",
"var",
"void",
"volatile",
"while",
"with",
"yield",
])
});
fn exports_to_wrapper_module(
entry_specifier: &Url,
all_exports: &BTreeSet<String>,
) -> String {
let quoted_entry_specifier_text = to_double_quote_string(
url_to_file_path(entry_specifier).unwrap().to_str().unwrap(),
);
let export_names_with_quoted = all_exports
.iter()
.map(|export| (export.as_str(), to_double_quote_string(export)))
.collect::<Vec<_>>();
capacity_builder::StringBuilder::<String>::build(|builder| {
let mut temp_var_count = 0;
builder.append(
r#"import { createRequire as __internalCreateRequire, Module as __internalModule } from "node:module";
const require = __internalCreateRequire(import.meta.url);
let mod;
if (import.meta.main) {
mod = __internalModule._load("#,
);
builder.append("ed_entry_specifier_text);
builder.append(
r#", null, true)
} else {
mod = require("#,
);
builder.append("ed_entry_specifier_text);
builder.append(r#");
}
"#);
for (export_name, quoted_name) in &export_names_with_quoted {
if !matches!(*export_name, "default" | "module.exports") {
add_export(
builder,
export_name,
quoted_name,
|builder| {
builder.append("mod[");
builder.append(quoted_name);
builder.append("]");
},
&mut temp_var_count,
);
}
}
builder.append("export default mod;\n");
add_export(
builder,
"module.exports",
"\"module.exports\"",
|builder| builder.append("mod"),
&mut temp_var_count,
);
}).unwrap()
}
fn add_export<'a>(
builder: &mut capacity_builder::StringBuilder<'a, String>,
name: &'a str,
quoted_name: &'a str,
build_initializer: impl FnOnce(&mut capacity_builder::StringBuilder<'a, String>),
temp_var_count: &mut usize,
) {
fn is_valid_var_decl(name: &str) -> bool {
// it's ok to be super strict here
if name.is_empty() {
return false;
}
if let Some(first) = name.chars().next()
&& !first.is_ascii_alphabetic()
&& first != '_'
&& first != '$'
{
return false;
}
name
.chars()
.all(|c| c.is_ascii_alphanumeric() || c == '_' || c == '$')
}
// TODO(bartlomieju): Node actually checks if a given export exists in `exports` object,
// but it might not be necessary here since our analysis is more detailed?
if RESERVED_WORDS.contains(name) || !is_valid_var_decl(name) {
*temp_var_count += 1;
// we can't create an identifier with a reserved word or invalid identifier name,
// so assign it to a temporary variable that won't have a conflict, then re-export
// it as a string
builder.append("const __deno_export_");
builder.append(*temp_var_count);
builder.append("__ = ");
build_initializer(builder);
builder.append(";\nexport { __deno_export_");
builder.append(*temp_var_count);
builder.append("__ as ");
builder.append(quoted_name);
builder.append(" };\n");
} else {
builder.append("export const ");
builder.append(name);
builder.append(" = ");
build_initializer(builder);
builder.append(";\n");
}
}
fn to_double_quote_string(text: &str) -> String {
// serde can handle this for us
serde_json::to_string(text).unwrap()
}
#[cfg(test)]
mod tests {
use pretty_assertions::assert_eq;
use super::*;
#[test]
fn test_exports_to_wrapper_module() {
let url = Url::parse("file:///test/test.ts").unwrap();
let exports = BTreeSet::from(
["static", "server", "app", "dashed-export", "3d"].map(|s| s.to_string()),
);
let text = exports_to_wrapper_module(&url, &exports);
assert_eq!(
text,
r#"import { createRequire as __internalCreateRequire, Module as __internalModule } from "node:module";
const require = __internalCreateRequire(import.meta.url);
let mod;
if (import.meta.main) {
mod = __internalModule._load("/test/test.ts", null, true)
} else {
mod = require("/test/test.ts");
}
const __deno_export_1__ = mod["3d"];
export { __deno_export_1__ as "3d" };
export const app = mod["app"];
const __deno_export_2__ = mod["dashed-export"];
export { __deno_export_2__ as "dashed-export" };
export const server = mod["server"];
const __deno_export_3__ = mod["static"];
export { __deno_export_3__ as "static" };
export default mod;
const __deno_export_4__ = mod;
export { __deno_export_4__ as "module.exports" };
"#
);
}
#[test]
fn test_to_double_quote_string() {
assert_eq!(to_double_quote_string("test"), "\"test\"");
assert_eq!(
to_double_quote_string("\r\n\t\"test"),
"\"\\r\\n\\t\\\"test\""
);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/node_resolver/path.rs | libs/node_resolver/path.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::path::Component;
use std::path::Path;
use std::path::PathBuf;
use url::Url;
#[derive(Debug, Clone)]
pub enum UrlOrPath {
Url(Url),
Path(PathBuf),
}
impl UrlOrPath {
pub fn is_file(&self) -> bool {
match self {
UrlOrPath::Url(url) => url.scheme() == "file",
UrlOrPath::Path(_) => true,
}
}
pub fn is_node_url(&self) -> bool {
match self {
UrlOrPath::Url(url) => url.scheme() == "node",
UrlOrPath::Path(_) => false,
}
}
pub fn into_path(
self,
) -> Result<PathBuf, deno_path_util::UrlToFilePathError> {
match self {
UrlOrPath::Url(url) => deno_path_util::url_to_file_path(&url),
UrlOrPath::Path(path) => Ok(path),
}
}
pub fn into_url(self) -> Result<Url, deno_path_util::PathToUrlError> {
match self {
UrlOrPath::Url(url) => Ok(url),
UrlOrPath::Path(path) => deno_path_util::url_from_file_path(&path),
}
}
pub fn to_string_lossy(&self) -> Cow<'_, str> {
match self {
UrlOrPath::Url(url) => Cow::Borrowed(url.as_str()),
UrlOrPath::Path(path) => path.to_string_lossy(),
}
}
}
impl std::fmt::Display for UrlOrPath {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
UrlOrPath::Url(url) => url.fmt(f),
UrlOrPath::Path(path) => {
// prefer displaying a url
match deno_path_util::url_from_file_path(path) {
Ok(url) => url.fmt(f),
Err(_) => {
write!(f, "{}", path.display())
}
}
}
}
}
}
pub struct UrlOrPathRef<'a> {
url: once_cell::unsync::OnceCell<Cow<'a, Url>>,
path: once_cell::unsync::OnceCell<Cow<'a, Path>>,
}
impl<'a> UrlOrPathRef<'a> {
pub fn from_path(path: &'a Path) -> Self {
Self {
url: Default::default(),
path: once_cell::unsync::OnceCell::with_value(Cow::Borrowed(path)),
}
}
pub fn from_url(url: &'a Url) -> Self {
Self {
path: Default::default(),
url: once_cell::unsync::OnceCell::with_value(Cow::Borrowed(url)),
}
}
pub fn url(&self) -> Result<&Url, deno_path_util::PathToUrlError> {
self
.url
.get_or_try_init(|| {
deno_path_util::url_from_file_path(self.path.get().unwrap())
.map(Cow::Owned)
})
.map(|v| v.as_ref())
}
pub fn path(&self) -> Result<&Path, deno_path_util::UrlToFilePathError> {
self
.path
.get_or_try_init(|| {
deno_path_util::url_to_file_path(self.url.get().unwrap())
.map(Cow::Owned)
})
.map(|v| v.as_ref())
}
pub fn display(&self) -> UrlOrPath {
// prefer url
if let Ok(url) = self.url() {
UrlOrPath::Url(url.clone())
} else {
// this will always be set if url is None
UrlOrPath::Path(self.path.get().unwrap().to_path_buf())
}
}
}
/// Extension to path_clean::PathClean
pub trait PathClean<T> {
fn clean(&self) -> T;
}
impl PathClean<PathBuf> for PathBuf {
fn clean(&self) -> PathBuf {
fn is_clean_path(path: &Path) -> bool {
let path = path.to_string_lossy();
let mut current_index = 0;
while let Some(index) = path[current_index..].find("\\.") {
let trailing_index = index + current_index + 2;
let mut trailing_chars = path[trailing_index..].chars();
match trailing_chars.next() {
Some('.') => match trailing_chars.next() {
Some('/') | Some('\\') | None => {
return false;
}
_ => {}
},
Some('/') | Some('\\') => {
return false;
}
_ => {}
}
current_index = trailing_index;
}
true
}
let path = path_clean::PathClean::clean(self);
if cfg!(windows) && !is_clean_path(&path) {
// temporary workaround because path_clean::PathClean::clean is
// not good enough on windows
let mut components = Vec::new();
for component in path.components() {
match component {
Component::CurDir => {
// skip
}
Component::ParentDir => {
let maybe_last_component = components.pop();
if !matches!(maybe_last_component, Some(Component::Normal(_))) {
panic!("Error normalizing: {}", path.display());
}
}
Component::Normal(_) | Component::RootDir | Component::Prefix(_) => {
components.push(component);
}
}
}
components.into_iter().collect::<PathBuf>()
} else {
path
}
}
}
#[cfg(test)]
mod test {
#[cfg(windows)]
#[test]
fn test_path_clean() {
use super::*;
run_test("C:\\test\\./file.txt", "C:\\test\\file.txt");
run_test("C:\\test\\../other/file.txt", "C:\\other\\file.txt");
run_test("C:\\test\\../other\\file.txt", "C:\\other\\file.txt");
fn run_test(input: &str, expected: &str) {
assert_eq!(PathBuf::from(input).clean(), PathBuf::from(expected));
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/node_resolver/lib.rs | libs/node_resolver/lib.rs | // Copyright 2018-2025 the Deno authors. MIT license.
#![deny(clippy::print_stderr)]
#![deny(clippy::print_stdout)]
pub mod analyze;
mod builtin_modules;
pub mod cache;
pub mod errors;
mod npm;
mod package_json;
mod path;
mod resolution;
pub use builtin_modules::DENO_SUPPORTED_BUILTIN_NODE_MODULES;
pub use builtin_modules::DenoIsBuiltInNodeModuleChecker;
pub use builtin_modules::IsBuiltInNodeModuleChecker;
pub use cache::NodeResolutionCache;
pub use cache::NodeResolutionCacheRc;
pub use deno_package_json::PackageJson;
pub use npm::InNpmPackageChecker;
pub use npm::NpmPackageFolderResolver;
pub use package_json::PackageJsonCacheRc;
pub use package_json::PackageJsonResolver;
pub use package_json::PackageJsonResolverRc;
pub use package_json::PackageJsonThreadLocalCache;
pub use path::PathClean;
pub use path::UrlOrPath;
pub use path::UrlOrPathRef;
pub use resolution::BinValue;
pub use resolution::IMPORT_CONDITIONS;
pub use resolution::NodeConditionOptions;
pub use resolution::NodeResolution;
pub use resolution::NodeResolutionKind;
pub use resolution::NodeResolver;
pub use resolution::NodeResolverOptions;
pub use resolution::NodeResolverRc;
pub use resolution::NodeResolverSys;
pub use resolution::REQUIRE_CONDITIONS;
pub use resolution::ResolutionMode;
pub use resolution::parse_npm_pkg_name;
pub use resolution::resolve_specifier_into_node_modules;
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/node_resolver/package_json.rs | libs/node_resolver/package_json.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::cell::RefCell;
use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
use deno_package_json::PackageJson;
use deno_package_json::PackageJsonCacheResult;
use deno_package_json::PackageJsonRc;
use sys_traits::FsMetadata;
use sys_traits::FsRead;
use crate::errors::PackageJsonLoadError;
pub trait NodePackageJsonCache:
deno_package_json::PackageJsonCache
+ std::fmt::Debug
+ deno_maybe_sync::MaybeSend
+ deno_maybe_sync::MaybeSync
{
fn as_deno_package_json_cache(
&self,
) -> &dyn deno_package_json::PackageJsonCache;
}
impl<T> NodePackageJsonCache for T
where
T: deno_package_json::PackageJsonCache
+ std::fmt::Debug
+ deno_maybe_sync::MaybeSend
+ deno_maybe_sync::MaybeSync,
{
fn as_deno_package_json_cache(
&self,
) -> &dyn deno_package_json::PackageJsonCache {
self
}
}
#[allow(clippy::disallowed_types)]
pub type PackageJsonCacheRc =
deno_maybe_sync::MaybeArc<dyn NodePackageJsonCache>;
thread_local! {
static CACHE: RefCell<HashMap<PathBuf, PackageJsonRc>> = RefCell::new(HashMap::new());
}
#[derive(Debug)]
pub struct PackageJsonThreadLocalCache;
impl PackageJsonThreadLocalCache {
pub fn clear() {
CACHE.with_borrow_mut(|cache| cache.clear());
}
}
impl deno_package_json::PackageJsonCache for PackageJsonThreadLocalCache {
fn get(&self, path: &Path) -> PackageJsonCacheResult {
CACHE.with_borrow(|cache| match cache.get(path).cloned() {
Some(value) => PackageJsonCacheResult::Hit(Some(value)),
None => PackageJsonCacheResult::NotCached,
})
}
fn set(&self, path: PathBuf, package_json: Option<PackageJsonRc>) {
let Some(package_json) = package_json else {
// We don't cache misses.
return;
};
CACHE.with_borrow_mut(|cache| cache.insert(path, package_json));
}
}
#[allow(clippy::disallowed_types)]
pub type PackageJsonResolverRc<TSys> =
deno_maybe_sync::MaybeArc<PackageJsonResolver<TSys>>;
#[derive(Debug)]
pub struct PackageJsonResolver<TSys: FsRead + FsMetadata> {
sys: TSys,
loader_cache: Option<PackageJsonCacheRc>,
}
impl<TSys: FsRead + FsMetadata> PackageJsonResolver<TSys> {
pub fn new(sys: TSys, loader_cache: Option<PackageJsonCacheRc>) -> Self {
Self { sys, loader_cache }
}
pub fn get_closest_package_json(
&self,
file_path: &Path,
) -> Result<Option<PackageJsonRc>, PackageJsonLoadError> {
self.get_closest_package_jsons(file_path).next().transpose()
}
/// Gets the closest package.json files, iterating from the
/// nearest directory to the furthest ancestor directory.
pub fn get_closest_package_jsons<'a>(
&'a self,
file_path: &'a Path,
) -> ClosestPackageJsonsIterator<'a, TSys> {
ClosestPackageJsonsIterator {
current_path: file_path,
resolver: self,
}
}
pub fn load_package_json(
&self,
path: &Path,
) -> Result<Option<PackageJsonRc>, PackageJsonLoadError> {
let result = PackageJson::load_from_path(
&self.sys,
self
.loader_cache
.as_deref()
.map(|cache| cache.as_deno_package_json_cache()),
path,
);
match result {
Ok(pkg_json) => Ok(pkg_json),
Err(err) => Err(PackageJsonLoadError(err)),
}
}
}
pub struct ClosestPackageJsonsIterator<'a, TSys: FsRead + FsMetadata> {
current_path: &'a Path,
resolver: &'a PackageJsonResolver<TSys>,
}
impl<'a, TSys: FsRead + FsMetadata> Iterator
for ClosestPackageJsonsIterator<'a, TSys>
{
type Item = Result<PackageJsonRc, PackageJsonLoadError>;
fn next(&mut self) -> Option<Self::Item> {
while let Some(parent) = self.current_path.parent() {
self.current_path = parent;
let package_json_path = parent.join("package.json");
match self.resolver.load_package_json(&package_json_path) {
Ok(Some(value)) => return Some(Ok(value)),
Ok(None) => {
// skip
}
Err(err) => return Some(Err(err)),
}
}
None
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/node_resolver/builtin_modules.rs | libs/node_resolver/builtin_modules.rs | // Copyright 2018-2025 the Deno authors. MIT license.
pub trait IsBuiltInNodeModuleChecker: std::fmt::Debug {
/// e.g. `is_builtin_node_module("assert")`
fn is_builtin_node_module(&self, module_name: &str) -> bool;
}
/// An implementation of IsBuiltInNodeModuleChecker that uses
/// the list of built-in node_modules that are supported by Deno
/// in the `deno_node` crate (ext/node).
#[derive(Debug)]
pub struct DenoIsBuiltInNodeModuleChecker;
impl IsBuiltInNodeModuleChecker for DenoIsBuiltInNodeModuleChecker {
#[inline(always)]
fn is_builtin_node_module(&self, module_name: &str) -> bool {
DENO_SUPPORTED_BUILTIN_NODE_MODULES
.binary_search(&module_name)
.is_ok()
}
}
/// Collection of built-in node_modules supported by Deno.
pub static DENO_SUPPORTED_BUILTIN_NODE_MODULES: &[&str] = &[
// NOTE(bartlomieju): keep this list in sync with `ext/node/polyfills/01_require.js`
"_http_agent",
"_http_common",
"_http_outgoing",
"_http_server",
"_stream_duplex",
"_stream_passthrough",
"_stream_readable",
"_stream_transform",
"_stream_writable",
"_tls_common",
"_tls_wrap",
"assert",
"assert/strict",
"async_hooks",
"buffer",
"child_process",
"cluster",
"console",
"constants",
"crypto",
"dgram",
"diagnostics_channel",
"dns",
"dns/promises",
"domain",
"events",
"fs",
"fs/promises",
"http",
"http2",
"https",
"inspector",
"inspector/promises",
"module",
"net",
"os",
"path",
"path/posix",
"path/win32",
"perf_hooks",
"process",
"punycode",
"querystring",
"readline",
"readline/promises",
"repl",
"sqlite",
"stream",
"stream/consumers",
"stream/promises",
"stream/web",
"string_decoder",
"sys",
"test",
"timers",
"timers/promises",
"tls",
"tty",
"url",
"util",
"util/types",
"v8",
"vm",
"wasi",
"worker_threads",
"zlib",
];
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_builtins_are_sorted() {
let mut builtins_list = DENO_SUPPORTED_BUILTIN_NODE_MODULES.to_vec();
builtins_list.sort();
assert_eq!(DENO_SUPPORTED_BUILTIN_NODE_MODULES, builtins_list);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/node_resolver/resolution.rs | libs/node_resolver/resolution.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::fmt::Debug;
use std::io::BufReader;
use std::io::Read;
use std::path::Path;
use std::path::PathBuf;
use anyhow::Error as AnyError;
use anyhow::bail;
use deno_media_type::MediaType;
use deno_package_json::PackageJson;
use deno_package_json::PackageJsonRc;
use deno_path_util::url_to_file_path;
use deno_semver::Version;
use deno_semver::VersionReq;
use lazy_regex::Lazy;
use regex::Regex;
use serde_json::Map;
use serde_json::Value;
use sys_traits::FileType;
use sys_traits::FsCanonicalize;
use sys_traits::FsDirEntry;
use sys_traits::FsMetadata;
use sys_traits::FsOpen;
use sys_traits::FsRead;
use sys_traits::FsReadDir;
use sys_traits::OpenOptions;
use url::Url;
use crate::InNpmPackageChecker;
use crate::IsBuiltInNodeModuleChecker;
use crate::NpmPackageFolderResolver;
use crate::PackageJsonResolverRc;
use crate::PathClean;
use crate::cache::NodeResolutionSys;
use crate::errors;
use crate::errors::DataUrlReferrerError;
use crate::errors::FinalizeResolutionError;
use crate::errors::InvalidModuleSpecifierError;
use crate::errors::InvalidPackageTargetError;
use crate::errors::LegacyResolveError;
use crate::errors::MissingPkgJsonError;
use crate::errors::ModuleNotFoundError;
use crate::errors::NodeJsErrorCode;
use crate::errors::NodeJsErrorCoded;
use crate::errors::NodeResolveError;
use crate::errors::NodeResolveRelativeJoinError;
use crate::errors::PackageExportsResolveError;
use crate::errors::PackageImportNotDefinedError;
use crate::errors::PackageImportsResolveError;
use crate::errors::PackageImportsResolveErrorKind;
use crate::errors::PackagePathNotExportedError;
use crate::errors::PackageResolveError;
use crate::errors::PackageSubpathFromDenoModuleResolveError;
use crate::errors::PackageSubpathResolveError;
use crate::errors::PackageSubpathResolveErrorKind;
use crate::errors::PackageTargetNotFoundError;
use crate::errors::PackageTargetResolveError;
use crate::errors::PackageTargetResolveErrorKind;
use crate::errors::ResolvePkgJsonBinExportError;
use crate::errors::ResolvePkgNpmBinaryCommandsError;
use crate::errors::TypesNotFoundError;
use crate::errors::TypesNotFoundErrorData;
use crate::errors::UnknownBuiltInNodeModuleError;
use crate::errors::UnsupportedDirImportError;
use crate::errors::UnsupportedEsmUrlSchemeError;
use crate::path::UrlOrPath;
use crate::path::UrlOrPathRef;
pub static IMPORT_CONDITIONS: &[Cow<'static, str>] = &[
Cow::Borrowed("deno"),
Cow::Borrowed("node"),
Cow::Borrowed("import"),
];
pub static REQUIRE_CONDITIONS: &[Cow<'static, str>] =
&[Cow::Borrowed("require"), Cow::Borrowed("node")];
static TYPES_ONLY_CONDITIONS: &[Cow<'static, str>] = &[Cow::Borrowed("types")];
#[derive(Debug, Default, Clone)]
pub struct NodeConditionOptions {
pub conditions: Vec<Cow<'static, str>>,
/// Provide a value to override the default import conditions.
///
/// Defaults to `["deno", "node", "import"]`
pub import_conditions_override: Option<Vec<Cow<'static, str>>>,
/// Provide a value to override the default require conditions.
///
/// Defaults to `["require", "node"]`
pub require_conditions_override: Option<Vec<Cow<'static, str>>>,
}
#[derive(Debug, Clone)]
struct ConditionResolver {
import_conditions: Cow<'static, [Cow<'static, str>]>,
require_conditions: Cow<'static, [Cow<'static, str>]>,
}
impl ConditionResolver {
pub fn new(options: NodeConditionOptions) -> Self {
fn combine_conditions(
user_conditions: Cow<'_, [Cow<'static, str>]>,
override_default: Option<Vec<Cow<'static, str>>>,
default_conditions: &'static [Cow<'static, str>],
) -> Cow<'static, [Cow<'static, str>]> {
let default_conditions = override_default
.map(Cow::Owned)
.unwrap_or(Cow::Borrowed(default_conditions));
if user_conditions.is_empty() {
default_conditions
} else {
let mut new =
Vec::with_capacity(user_conditions.len() + default_conditions.len());
let mut append =
|conditions: Cow<'_, [Cow<'static, str>]>| match conditions {
Cow::Borrowed(conditions) => new.extend(conditions.iter().cloned()),
Cow::Owned(conditions) => new.extend(conditions),
};
append(user_conditions);
append(default_conditions);
Cow::Owned(new)
}
}
Self {
import_conditions: combine_conditions(
Cow::Borrowed(&options.conditions),
options.import_conditions_override,
IMPORT_CONDITIONS,
),
require_conditions: combine_conditions(
Cow::Owned(options.conditions),
options.require_conditions_override,
REQUIRE_CONDITIONS,
),
}
}
pub fn resolve(
&self,
resolution_mode: ResolutionMode,
) -> &[Cow<'static, str>] {
match resolution_mode {
ResolutionMode::Import => &self.import_conditions,
ResolutionMode::Require => &self.require_conditions,
}
}
pub fn require_conditions(&self) -> &[Cow<'static, str>] {
&self.require_conditions
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum ResolutionMode {
Import,
Require,
}
impl ResolutionMode {
pub fn default_conditions(&self) -> &'static [Cow<'static, str>] {
match self {
ResolutionMode::Import => IMPORT_CONDITIONS,
ResolutionMode::Require => REQUIRE_CONDITIONS,
}
}
#[cfg(feature = "graph")]
pub fn from_deno_graph(mode: deno_graph::source::ResolutionMode) -> Self {
use deno_graph::source::ResolutionMode::*;
match mode {
Import => Self::Import,
Require => Self::Require,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum NodeResolutionKind {
Execution,
Types,
}
impl NodeResolutionKind {
pub fn is_types(&self) -> bool {
matches!(self, NodeResolutionKind::Types)
}
#[cfg(feature = "graph")]
pub fn from_deno_graph(kind: deno_graph::source::ResolutionKind) -> Self {
use deno_graph::source::ResolutionKind::*;
match kind {
Execution => Self::Execution,
Types => Self::Types,
}
}
}
#[derive(Debug)]
pub enum NodeResolution {
Module(UrlOrPath),
BuiltIn(String),
}
impl NodeResolution {
pub fn into_url(self) -> Result<Url, NodeResolveError> {
match self {
Self::Module(u) => Ok(u.into_url()?),
Self::BuiltIn(specifier) => Ok(if specifier.starts_with("node:") {
Url::parse(&specifier).unwrap()
} else {
Url::parse(&format!("node:{specifier}")).unwrap()
}),
}
}
}
struct LocalPath {
path: PathBuf,
known_exists: bool,
}
enum LocalUrlOrPath {
Url(Url),
Path(LocalPath),
}
impl LocalUrlOrPath {
pub fn into_url_or_path(self) -> UrlOrPath {
match self {
LocalUrlOrPath::Url(url) => UrlOrPath::Url(url),
LocalUrlOrPath::Path(local_path) => UrlOrPath::Path(local_path.path),
}
}
}
/// This struct helps ensure we remember to probe for
/// declaration files and to prevent accidentally probing
/// multiple times.
struct MaybeTypesResolvedUrl(LocalUrlOrPath);
/// Kind of method that resolution succeeded with.
enum ResolvedMethod {
Url,
RelativeOrAbsolute,
PackageImports,
PackageExports,
PackageSubPath,
}
#[derive(Debug, Default, Clone)]
pub struct NodeResolverOptions {
pub conditions: NodeConditionOptions,
pub is_browser_platform: bool,
pub bundle_mode: bool,
/// TypeScript version to use for typesVersions resolution and
/// `types@req` exports resolution.
pub typescript_version: Option<Version>,
}
#[derive(Debug)]
struct ResolutionConfig {
pub bundle_mode: bool,
pub prefer_browser_field: bool,
pub typescript_version: Option<Version>,
}
#[sys_traits::auto_impl]
pub trait NodeResolverSys:
FsCanonicalize + FsMetadata + FsRead + FsReadDir + FsOpen
{
}
#[allow(clippy::disallowed_types)]
pub type NodeResolverRc<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
> = deno_maybe_sync::MaybeArc<
NodeResolver<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>,
>;
#[derive(Debug)]
pub struct NodeResolver<
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: NodeResolverSys,
> {
in_npm_pkg_checker: TInNpmPackageChecker,
is_built_in_node_module_checker: TIsBuiltInNodeModuleChecker,
npm_pkg_folder_resolver: TNpmPackageFolderResolver,
pkg_json_resolver: PackageJsonResolverRc<TSys>,
sys: NodeResolutionSys<TSys>,
condition_resolver: ConditionResolver,
resolution_config: ResolutionConfig,
}
impl<
TInNpmPackageChecker: InNpmPackageChecker,
TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver: NpmPackageFolderResolver,
TSys: NodeResolverSys,
>
NodeResolver<
TInNpmPackageChecker,
TIsBuiltInNodeModuleChecker,
TNpmPackageFolderResolver,
TSys,
>
{
pub fn new(
in_npm_pkg_checker: TInNpmPackageChecker,
is_built_in_node_module_checker: TIsBuiltInNodeModuleChecker,
npm_pkg_folder_resolver: TNpmPackageFolderResolver,
pkg_json_resolver: PackageJsonResolverRc<TSys>,
sys: NodeResolutionSys<TSys>,
options: NodeResolverOptions,
) -> Self {
Self {
in_npm_pkg_checker,
is_built_in_node_module_checker,
npm_pkg_folder_resolver,
pkg_json_resolver,
sys,
condition_resolver: ConditionResolver::new(NodeConditionOptions {
conditions: options.conditions.conditions,
import_conditions_override: options
.conditions
.import_conditions_override
.or_else(|| {
if options.is_browser_platform {
Some(vec![Cow::Borrowed("browser"), Cow::Borrowed("import")])
} else {
None
}
}),
require_conditions_override: options
.conditions
.require_conditions_override
.or_else(|| {
if options.is_browser_platform {
Some(vec![Cow::Borrowed("browser"), Cow::Borrowed("require")])
} else {
None
}
}),
}),
resolution_config: ResolutionConfig {
bundle_mode: options.bundle_mode,
prefer_browser_field: options.is_browser_platform,
typescript_version: options.typescript_version,
},
}
}
pub fn require_conditions(&self) -> &[Cow<'static, str>] {
self.condition_resolver.require_conditions()
}
pub fn in_npm_package(&self, specifier: &Url) -> bool {
self.in_npm_pkg_checker.in_npm_package(specifier)
}
#[inline(always)]
pub fn is_builtin_node_module(&self, specifier: &str) -> bool {
self
.is_built_in_node_module_checker
.is_builtin_node_module(specifier)
}
/// This function is an implementation of `defaultResolve` in
/// `lib/internal/modules/esm/resolve.js` from Node.
pub fn resolve(
&self,
specifier: &str,
referrer: &Url,
resolution_mode: ResolutionMode,
resolution_kind: NodeResolutionKind,
) -> Result<NodeResolution, NodeResolveError> {
// Note: if we are here, then the referrer is an esm module
// TODO(bartlomieju): skipped "policy" part as we don't plan to support it
if self.is_builtin_node_module(specifier) {
return Ok(NodeResolution::BuiltIn(specifier.to_string()));
}
if let Ok(url) = Url::parse(specifier) {
if url.scheme() == "data" {
return Ok(NodeResolution::Module(UrlOrPath::Url(url)));
}
if let Some(module_name) =
self.get_module_name_from_builtin_node_module_url(&url)?
{
return Ok(NodeResolution::BuiltIn(module_name.to_string()));
}
let protocol = url.scheme();
if protocol != "file" && protocol != "data" {
return Err(
UnsupportedEsmUrlSchemeError {
url_scheme: protocol.to_string(),
}
.into(),
);
}
// todo(dsherret): this seems wrong
if referrer.scheme() == "data" {
let url = referrer
.join(specifier)
.map_err(|source| DataUrlReferrerError { source })?;
return Ok(NodeResolution::Module(UrlOrPath::Url(url)));
}
}
let conditions = self.condition_resolver.resolve(resolution_mode);
let referrer = UrlOrPathRef::from_url(referrer);
let (url, resolved_kind) = self.module_resolve(
specifier,
&referrer,
resolution_mode,
conditions,
resolution_kind,
)?;
let url_or_path = self.finalize_resolution(
url,
resolved_kind,
resolution_mode,
conditions,
resolution_kind,
Some(&referrer),
)?;
let resolve_response = NodeResolution::Module(url_or_path);
// TODO(bartlomieju): skipped checking errors for commonJS resolution and
// "preserveSymlinksMain"/"preserveSymlinks" options.
Ok(resolve_response)
}
fn module_resolve(
&self,
specifier: &str,
referrer: &UrlOrPathRef,
resolution_mode: ResolutionMode,
conditions: &[Cow<'static, str>],
resolution_kind: NodeResolutionKind,
) -> Result<(MaybeTypesResolvedUrl, ResolvedMethod), NodeResolveError> {
if should_be_treated_as_relative_or_absolute_path(specifier) {
let referrer_url = referrer.url()?;
let url = node_join_url(referrer_url, specifier).map_err(|err| {
NodeResolveRelativeJoinError {
path: specifier.to_string(),
base: referrer_url.clone(),
source: err,
}
})?;
let url = self.maybe_resolve_types(
LocalUrlOrPath::Url(url),
Some(referrer),
resolution_mode,
conditions,
resolution_kind,
)?;
Ok((url, ResolvedMethod::RelativeOrAbsolute))
} else if specifier.starts_with('#') {
let pkg_config = self
.pkg_json_resolver
.get_closest_package_json(referrer.path()?)
.map_err(PackageImportsResolveErrorKind::PkgJsonLoad)
.map_err(|err| PackageImportsResolveError(Box::new(err)))?;
Ok((
self.package_imports_resolve_internal(
specifier,
Some(referrer),
resolution_mode,
pkg_config.as_deref(),
conditions,
resolution_kind,
)?,
ResolvedMethod::PackageImports,
))
} else if let Ok(url) = Url::parse(specifier) {
let url_or_path = self.maybe_resolve_types(
LocalUrlOrPath::Url(url),
Some(referrer),
resolution_mode,
conditions,
resolution_kind,
)?;
Ok((url_or_path, ResolvedMethod::Url))
} else {
Ok(self.package_resolve(
specifier,
referrer,
resolution_mode,
conditions,
resolution_kind,
)?)
}
}
fn finalize_resolution(
&self,
resolved: MaybeTypesResolvedUrl,
resolved_method: ResolvedMethod,
resolution_mode: ResolutionMode,
conditions: &[Cow<'static, str>],
resolution_kind: NodeResolutionKind,
maybe_referrer: Option<&UrlOrPathRef>,
) -> Result<UrlOrPath, FinalizeResolutionError> {
let encoded_sep_re = lazy_regex::regex!(r"%2F|%2C");
let resolved = resolved.0;
let text = match &resolved {
LocalUrlOrPath::Url(url) => Cow::Borrowed(url.as_str()),
LocalUrlOrPath::Path(LocalPath { path, .. }) => path.to_string_lossy(),
};
if encoded_sep_re.is_match(&text) {
return Err(
errors::InvalidModuleSpecifierError {
request: text.into_owned(),
reason: Cow::Borrowed(
"must not include encoded \"/\" or \"\\\\\" characters",
),
maybe_referrer: maybe_referrer.map(|r| match r.path() {
// in this case, prefer showing the path string
Ok(path) => path.display().to_string(),
Err(_) => r.display().to_string(),
}),
}
.into(),
);
}
let (path, maybe_url) = match resolved {
LocalUrlOrPath::Url(url) => {
if url.scheme() == "file" {
(url_to_file_path(&url)?, Some(url))
} else {
return Ok(UrlOrPath::Url(url));
}
}
LocalUrlOrPath::Path(LocalPath { path, known_exists }) => {
if known_exists {
// no need to do the finalization checks
return Ok(UrlOrPath::Path(path));
} else {
(path, None)
}
}
};
// TODO(bartlomieju): currently not supported
// if (getOptionValue('--experimental-specifier-resolution') === 'node') {
// ...
// }
let p_str = path.to_str().unwrap();
let path = match p_str.strip_suffix('/') {
Some(s) => Cow::Borrowed(Path::new(s)),
None => Cow::Owned(path),
};
let maybe_file_type = self.sys.get_file_type(&path);
match maybe_file_type {
Ok(FileType::Dir) => {
if resolution_mode == ResolutionMode::Import
&& !self.resolution_config.bundle_mode
{
let suggestion = self.directory_import_suggestion(&path);
Err(
UnsupportedDirImportError {
dir_url: UrlOrPath::Path(path.into_owned()),
maybe_referrer: maybe_referrer.map(|r| r.display()),
suggestion,
}
.into(),
)
} else {
// prefer the file over the directory
let path_with_ext = with_known_extension(&path, "js");
if self.sys.is_file(&path_with_ext) {
Ok(UrlOrPath::Path(path_with_ext))
} else {
let (resolved_url, resolved_method) = self
.resolve_package_dir_subpath(
&path,
".",
maybe_referrer,
resolution_mode,
conditions,
resolution_kind,
)?;
self.finalize_resolution(
resolved_url,
resolved_method,
resolution_mode,
conditions,
resolution_kind,
maybe_referrer,
)
}
}
}
Ok(FileType::File) => {
// prefer returning the url to avoid re-allocating in the CLI crate
Ok(
maybe_url
.map(UrlOrPath::Url)
.unwrap_or(UrlOrPath::Path(path.into_owned())),
)
}
_ => {
if let Err(e) = maybe_file_type
&& (resolution_mode == ResolutionMode::Require
|| self.resolution_config.bundle_mode)
&& e.kind() == std::io::ErrorKind::NotFound
{
let file_with_ext = with_known_extension(&path, "js");
if self.sys.is_file(&file_with_ext) {
return Ok(UrlOrPath::Path(file_with_ext));
}
}
Err(
ModuleNotFoundError {
suggested_ext: self
.module_not_found_ext_suggestion(&path, resolved_method),
specifier: UrlOrPath::Path(path.into_owned()),
maybe_referrer: maybe_referrer.map(|r| r.display()),
}
.into(),
)
}
}
}
fn module_not_found_ext_suggestion(
&self,
path: &Path,
resolved_method: ResolvedMethod,
) -> Option<&'static str> {
fn should_probe(path: &Path, resolved_method: ResolvedMethod) -> bool {
if MediaType::from_path(path) != MediaType::Unknown {
return false;
}
match resolved_method {
ResolvedMethod::Url
| ResolvedMethod::RelativeOrAbsolute
| ResolvedMethod::PackageSubPath => true,
ResolvedMethod::PackageImports | ResolvedMethod::PackageExports => {
false
}
}
}
if should_probe(path, resolved_method) {
["js", "mjs", "cjs"]
.into_iter()
.find(|ext| self.sys.is_file(&with_known_extension(path, ext)))
} else {
None
}
}
fn directory_import_suggestion(
&self,
dir_import_path: &Path,
) -> Option<String> {
let dir_index_paths = ["index.mjs", "index.js", "index.cjs"]
.into_iter()
.map(|file_name| dir_import_path.join(file_name));
let file_paths = [
with_known_extension(dir_import_path, "js"),
with_known_extension(dir_import_path, "mjs"),
with_known_extension(dir_import_path, "cjs"),
];
dir_index_paths
.chain(file_paths)
.chain(
std::iter::once_with(|| {
// check if this directory has a package.json
let package_json_path = dir_import_path.join("package.json");
let pkg_json = self
.pkg_json_resolver
.load_package_json(&package_json_path)
.ok()??;
let main = pkg_json.main.as_ref()?;
Some(dir_import_path.join(main))
})
.flatten(),
)
.map(|p| deno_path_util::normalize_path(Cow::Owned(p)))
.find(|p| self.sys.is_file(p))
.and_then(|suggested_file_path| {
let pkg_json = self
.pkg_json_resolver
.get_closest_package_jsons(&suggested_file_path)
.filter_map(|pkg_json| pkg_json.ok())
.find(|p| p.name.is_some())?;
let pkg_name = pkg_json.name.as_ref()?;
let sub_path = suggested_file_path
.strip_prefix(pkg_json.dir_path())
.ok()?
.to_string_lossy()
.replace("\\", "/");
Some(format!("{}/{}", pkg_name, sub_path))
})
}
pub fn resolve_package_subpath_from_deno_module(
&self,
package_dir: &Path,
package_subpath: Option<&str>,
maybe_referrer: Option<&Url>,
resolution_mode: ResolutionMode,
resolution_kind: NodeResolutionKind,
) -> Result<UrlOrPath, PackageSubpathFromDenoModuleResolveError> {
// todo(dsherret): don't allocate a string here (maybe use an
// enum that says the subpath is not prefixed with a ./)
let package_subpath = package_subpath
.map(|s| Cow::Owned(format!("./{s}")))
.unwrap_or_else(|| Cow::Borrowed("."));
let maybe_referrer = maybe_referrer.map(UrlOrPathRef::from_url);
let conditions = self.condition_resolver.resolve(resolution_mode);
let (resolved_url, resolved_method) = self.resolve_package_dir_subpath(
package_dir,
&package_subpath,
maybe_referrer.as_ref(),
resolution_mode,
conditions,
resolution_kind,
)?;
let url_or_path = self.finalize_resolution(
resolved_url,
resolved_method,
resolution_mode,
conditions,
resolution_kind,
maybe_referrer.as_ref(),
)?;
Ok(url_or_path)
}
pub fn resolve_binary_export(
&self,
package_folder: &Path,
sub_path: Option<&str>,
) -> Result<BinValue, ResolvePkgJsonBinExportError> {
let (pkg_json, items) = self
.resolve_npm_binary_commands_for_package_with_pkg_json(package_folder)?;
let path =
resolve_bin_entry_value(&pkg_json, &items, sub_path).map_err(|err| {
ResolvePkgJsonBinExportError::InvalidBinProperty {
message: err.to_string(),
}
})?;
Ok(path.clone())
}
pub fn resolve_npm_binary_commands_for_package(
&self,
package_folder: &Path,
) -> Result<BTreeMap<String, BinValue>, ResolvePkgNpmBinaryCommandsError> {
let (_pkg_json, items) = self
.resolve_npm_binary_commands_for_package_with_pkg_json(package_folder)?;
Ok(items)
}
fn resolve_npm_binary_commands_for_package_with_pkg_json(
&self,
package_folder: &Path,
) -> Result<
(PackageJsonRc, BTreeMap<String, BinValue>),
ResolvePkgNpmBinaryCommandsError,
> {
let pkg_json_path = package_folder.join("package.json");
let Some(package_json) =
self.pkg_json_resolver.load_package_json(&pkg_json_path)?
else {
return Err(ResolvePkgNpmBinaryCommandsError::MissingPkgJson(
MissingPkgJsonError { pkg_json_path },
));
};
let bins = package_json.resolve_bins()?;
// TODO(bartlomieju): skipped checking errors for commonJS resolution and
// "preserveSymlinksMain"/"preserveSymlinks" options.
let items = match bins {
deno_package_json::PackageJsonBins::Directory(path_buf) => {
self.resolve_npm_commands_from_bin_dir(&path_buf)
}
deno_package_json::PackageJsonBins::Bins(items) => items
.into_iter()
.filter_map(|(command, path)| {
let bin_value = bin_value_from_file(&path, &self.sys)?;
Some((command, bin_value))
})
.collect(),
};
Ok((package_json, items))
}
pub fn resolve_npm_commands_from_bin_dir(
&self,
bin_dir: &Path,
) -> BTreeMap<String, BinValue> {
log::debug!("Resolving npm commands in '{}'.", bin_dir.display());
let mut result = BTreeMap::new();
match self.sys.fs_read_dir(bin_dir) {
Ok(entries) => {
for entry in entries {
let Ok(entry) = entry else {
continue;
};
if let Some((command, bin_value)) =
self.resolve_bin_dir_entry_command(entry)
{
result.insert(command, bin_value);
}
}
}
Err(err) => {
log::debug!("Failed read_dir for '{}': {:#}", bin_dir.display(), err);
}
}
result
}
fn resolve_bin_dir_entry_command(
&self,
entry: TSys::ReadDirEntry,
) -> Option<(String, BinValue)> {
if entry.path().extension().is_some() {
return None; // only look at files without extensions (even on Windows)
}
let file_type = entry.file_type().ok()?;
let path = if file_type.is_file() {
entry.path()
} else if file_type.is_symlink() {
Cow::Owned(self.sys.fs_canonicalize(entry.path()).ok()?)
} else {
return None;
};
let command_name = entry.file_name().to_string_lossy().into_owned();
let bin_value = bin_value_from_file(&path, &self.sys)?;
Some((command_name, bin_value))
}
/// Resolves an npm package folder path from the specified referrer.
pub fn resolve_package_folder_from_package(
&self,
specifier: &str,
referrer: &UrlOrPathRef,
) -> Result<PathBuf, errors::PackageFolderResolveError> {
self
.npm_pkg_folder_resolver
.resolve_package_folder_from_package(specifier, referrer)
}
fn maybe_resolve_types(
&self,
url: LocalUrlOrPath,
maybe_referrer: Option<&UrlOrPathRef>,
resolution_mode: ResolutionMode,
conditions: &[Cow<'static, str>],
resolution_kind: NodeResolutionKind,
) -> Result<MaybeTypesResolvedUrl, TypesNotFoundError> {
if resolution_kind.is_types() {
let file_path = match url {
LocalUrlOrPath::Url(url) => {
match deno_path_util::url_to_file_path(&url) {
Ok(path) => LocalPath {
path,
known_exists: false,
},
Err(_) => {
return Ok(MaybeTypesResolvedUrl(LocalUrlOrPath::Url(url)));
}
}
}
LocalUrlOrPath::Path(path) => path,
};
self.path_to_declaration_path(
file_path,
maybe_referrer,
resolution_mode,
conditions,
)
} else {
Ok(MaybeTypesResolvedUrl(url))
}
}
/// Checks if the resolved file has a corresponding declaration file.
fn path_to_declaration_path(
&self,
local_path: LocalPath,
maybe_referrer: Option<&UrlOrPathRef>,
resolution_mode: ResolutionMode,
conditions: &[Cow<'static, str>],
) -> Result<MaybeTypesResolvedUrl, TypesNotFoundError> {
fn probe_extensions<TSys: FsMetadata>(
sys: &NodeResolutionSys<TSys>,
path: &Path,
media_type: MediaType,
resolution_mode: ResolutionMode,
) -> Option<PathBuf> {
let mut searched_for_d_mts = false;
let mut searched_for_d_cts = false;
if media_type == MediaType::Mjs {
let d_mts_path = with_known_extension(path, "d.mts");
if sys.exists_(&d_mts_path) {
return Some(d_mts_path);
}
searched_for_d_mts = true;
} else if media_type == MediaType::Cjs {
let d_cts_path = with_known_extension(path, "d.cts");
if sys.exists_(&d_cts_path) {
return Some(d_cts_path);
}
searched_for_d_cts = true;
}
let dts_path = with_known_extension(path, "d.ts");
if sys.exists_(&dts_path) {
return Some(dts_path);
}
let specific_dts_path = match resolution_mode {
ResolutionMode::Require if !searched_for_d_cts => {
Some(with_known_extension(path, "d.cts"))
}
ResolutionMode::Import if !searched_for_d_mts => {
Some(with_known_extension(path, "d.mts"))
}
_ => None, // already searched above
};
if let Some(specific_dts_path) = specific_dts_path
&& sys.exists_(&specific_dts_path)
{
return Some(specific_dts_path);
}
let ts_path = with_known_extension(path, "ts");
if sys.is_file(&ts_path) {
return Some(ts_path);
}
None
}
let media_type = MediaType::from_path(&local_path.path);
if media_type.is_declaration() {
return Ok(MaybeTypesResolvedUrl(LocalUrlOrPath::Path(local_path)));
}
if let Some(path) =
probe_extensions(&self.sys, &local_path.path, media_type, resolution_mode)
{
return Ok(MaybeTypesResolvedUrl(LocalUrlOrPath::Path(LocalPath {
path,
known_exists: true,
})));
}
if self.sys.is_dir(&local_path.path) {
let resolution_result = self.resolve_package_dir_subpath(
&local_path.path,
/* sub path */ ".",
maybe_referrer,
resolution_mode,
conditions,
NodeResolutionKind::Types,
);
if let Ok((url_or_path, _)) = resolution_result {
return Ok(url_or_path);
}
let index_path = local_path.path.join("index.js");
if let Some(path) = probe_extensions(
&self.sys,
&index_path,
MediaType::from_path(&index_path),
resolution_mode,
) {
return Ok(MaybeTypesResolvedUrl(LocalUrlOrPath::Path(LocalPath {
path,
known_exists: true,
})));
}
}
// allow resolving .ts-like or .css files for types resolution
if media_type.is_typed() || media_type == MediaType::Css {
return Ok(MaybeTypesResolvedUrl(LocalUrlOrPath::Path(local_path)));
}
Err(TypesNotFoundError(Box::new(TypesNotFoundErrorData {
code_specifier: UrlOrPathRef::from_path(&local_path.path).display(),
maybe_referrer: maybe_referrer.map(|r| r.display()),
})))
}
#[allow(clippy::too_many_arguments)]
pub fn resolve_package_import(
&self,
name: &str,
maybe_referrer: Option<&UrlOrPathRef>,
referrer_pkg_json: Option<&PackageJson>,
resolution_mode: ResolutionMode,
resolution_kind: NodeResolutionKind,
) -> Result<UrlOrPath, PackageImportsResolveError> {
self
.package_imports_resolve_internal(
name,
maybe_referrer,
resolution_mode,
referrer_pkg_json,
self.condition_resolver.resolve(resolution_mode),
resolution_kind,
)
.map(|url| url.0.into_url_or_path())
}
#[allow(clippy::too_many_arguments)]
fn package_imports_resolve_internal(
&self,
name: &str,
maybe_referrer: Option<&UrlOrPathRef>,
resolution_mode: ResolutionMode,
referrer_pkg_json: Option<&PackageJson>,
conditions: &[Cow<'static, str>],
resolution_kind: NodeResolutionKind,
) -> Result<MaybeTypesResolvedUrl, PackageImportsResolveError> {
if name == "#" || name.ends_with('/') {
let reason = "is not a valid internal imports specifier name";
return Err(
errors::InvalidModuleSpecifierError {
request: name.to_string(),
reason: Cow::Borrowed(reason),
maybe_referrer: maybe_referrer.map(to_specifier_display_string),
}
.into(),
);
}
if let Some(pkg_json) = &referrer_pkg_json
&& let Some(resolved_import) = resolve_pkg_json_import(pkg_json, name)
{
let maybe_resolved = self.resolve_package_target(
&pkg_json.path,
resolved_import.target,
resolved_import.sub_path,
resolved_import.package_sub_path,
maybe_referrer,
resolution_mode,
resolved_import.is_pattern,
true,
conditions,
resolution_kind,
)?;
if let Some(resolved) = maybe_resolved {
return Ok(resolved);
}
}
Err(
PackageImportNotDefinedError {
name: name.to_string(),
package_json_path: referrer_pkg_json.map(|p| p.path.clone()),
maybe_referrer: maybe_referrer.map(|r| r.display()),
}
.into(),
)
}
#[allow(clippy::too_many_arguments)]
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/node_resolver/npm.rs | libs/node_resolver/npm.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::path::Path;
use std::path::PathBuf;
use deno_path_util::url_from_directory_path;
use deno_path_util::url_from_file_path;
use deno_semver::Version;
use url::Url;
use crate::errors;
use crate::path::PathClean;
use crate::path::UrlOrPathRef;
pub trait NpmPackageFolderResolver {
/// Resolves an npm package folder path from the specified referrer.
fn resolve_package_folder_from_package(
&self,
specifier: &str,
referrer: &UrlOrPathRef,
) -> Result<PathBuf, errors::PackageFolderResolveError>;
/// Finds the `@types` package closest to the provided `@types` package name
/// and version of the original package.
fn resolve_types_package_folder(
&self,
types_package_name: &str,
maybe_package_version: Option<&Version>,
maybe_referrer: Option<&UrlOrPathRef>,
) -> Option<PathBuf>;
}
/// Checks if a provided specifier is in an npm package.
pub trait InNpmPackageChecker {
fn in_npm_package(&self, specifier: &Url) -> bool;
fn in_npm_package_at_dir_path(&self, path: &Path) -> bool {
let specifier = match url_from_directory_path(&path.to_path_buf().clean()) {
Ok(p) => p,
Err(_) => return false,
};
self.in_npm_package(&specifier)
}
fn in_npm_package_at_file_path(&self, path: &Path) -> bool {
let specifier = match url_from_file_path(&path.to_path_buf().clean()) {
Ok(p) => p,
Err(_) => return false,
};
self.in_npm_package(&specifier)
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/node_resolver/cache.rs | libs/node_resolver/cache.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::cell::RefCell;
use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
use sys_traits::BaseFsCanonicalize;
use sys_traits::BaseFsOpen;
use sys_traits::BaseFsRead;
use sys_traits::BaseFsReadDir;
use sys_traits::FileType;
use sys_traits::FsCanonicalize;
use sys_traits::FsMetadata;
use sys_traits::FsMetadataValue;
use sys_traits::FsOpen;
use sys_traits::FsRead;
use sys_traits::FsReadDir;
pub trait NodeResolutionCache:
std::fmt::Debug + deno_maybe_sync::MaybeSend + deno_maybe_sync::MaybeSync
{
fn get_canonicalized(
&self,
path: &Path,
) -> Option<Result<PathBuf, std::io::Error>>;
fn set_canonicalized(&self, from: PathBuf, to: &std::io::Result<PathBuf>);
fn get_file_type(&self, path: &Path) -> Option<Option<FileType>>;
fn set_file_type(&self, path: PathBuf, value: Option<FileType>);
}
thread_local! {
static CANONICALIZED_CACHE: RefCell<HashMap<PathBuf, Option<PathBuf>>> = RefCell::new(HashMap::new());
static FILE_TYPE_CACHE: RefCell<HashMap<PathBuf, Option<FileType>>> = RefCell::new(HashMap::new());
}
// We use thread local caches here because it's just more convenient
// and easily allows workers to have separate caches.
#[derive(Debug)]
pub struct NodeResolutionThreadLocalCache;
impl NodeResolutionThreadLocalCache {
pub fn clear() {
CANONICALIZED_CACHE.with_borrow_mut(|cache| cache.clear());
FILE_TYPE_CACHE.with_borrow_mut(|cache| cache.clear());
}
}
impl NodeResolutionCache for NodeResolutionThreadLocalCache {
fn get_canonicalized(
&self,
path: &Path,
) -> Option<Result<PathBuf, std::io::Error>> {
CANONICALIZED_CACHE.with_borrow(|cache| {
let item = cache.get(path)?;
Some(match item {
Some(value) => Ok(value.clone()),
None => Err(std::io::Error::new(
std::io::ErrorKind::NotFound,
"Not found.",
)),
})
})
}
fn set_canonicalized(&self, from: PathBuf, to: &std::io::Result<PathBuf>) {
CANONICALIZED_CACHE.with_borrow_mut(|cache| match to {
Ok(to) => {
cache.insert(from, Some(to.clone()));
}
Err(err) => {
if err.kind() == std::io::ErrorKind::NotFound {
cache.insert(from, None);
}
}
});
}
fn get_file_type(&self, path: &Path) -> Option<Option<FileType>> {
FILE_TYPE_CACHE.with_borrow(|cache| cache.get(path).cloned())
}
fn set_file_type(&self, path: PathBuf, value: Option<FileType>) {
FILE_TYPE_CACHE.with_borrow_mut(|cache| {
cache.insert(path, value);
})
}
}
#[allow(clippy::disallowed_types)]
pub type NodeResolutionCacheRc =
deno_maybe_sync::MaybeArc<dyn NodeResolutionCache>;
#[derive(Debug, Default)]
pub struct NodeResolutionSys<TSys> {
sys: TSys,
cache: Option<NodeResolutionCacheRc>,
}
impl<TSys: Clone> Clone for NodeResolutionSys<TSys> {
fn clone(&self) -> Self {
Self {
sys: self.sys.clone(),
cache: self.cache.clone(),
}
}
}
impl<TSys: FsMetadata> NodeResolutionSys<TSys> {
pub fn new(sys: TSys, store: Option<NodeResolutionCacheRc>) -> Self {
Self { sys, cache: store }
}
pub fn is_file(&self, path: &Path) -> bool {
match self.get_file_type(path) {
Ok(file_type) => file_type.is_file(),
Err(_) => false,
}
}
pub fn is_dir(&self, path: &Path) -> bool {
match self.get_file_type(path) {
Ok(file_type) => file_type.is_dir(),
Err(_) => false,
}
}
pub fn exists_(&self, path: &Path) -> bool {
self.get_file_type(path).is_ok()
}
pub fn get_file_type(&self, path: &Path) -> std::io::Result<FileType> {
{
if let Some(maybe_value) =
self.cache.as_ref().and_then(|c| c.get_file_type(path))
{
return match maybe_value {
Some(value) => Ok(value),
None => Err(std::io::Error::new(
std::io::ErrorKind::NotFound,
"Not found.",
)),
};
}
}
match self.sys.fs_metadata(path) {
Ok(metadata) => {
if let Some(cache) = &self.cache {
cache.set_file_type(path.to_path_buf(), Some(metadata.file_type()));
}
Ok(metadata.file_type())
}
Err(err) => {
if let Some(cache) = &self.cache {
cache.set_file_type(path.to_path_buf(), None);
}
Err(err)
}
}
}
}
impl<TSys: FsCanonicalize> BaseFsCanonicalize for NodeResolutionSys<TSys> {
fn base_fs_canonicalize(&self, from: &Path) -> std::io::Result<PathBuf> {
if let Some(cache) = &self.cache
&& let Some(result) = cache.get_canonicalized(from)
{
return result;
}
let result = self.sys.base_fs_canonicalize(from);
if let Some(cache) = &self.cache {
cache.set_canonicalized(from.to_path_buf(), &result);
}
result
}
}
impl<TSys: FsReadDir> BaseFsReadDir for NodeResolutionSys<TSys> {
type ReadDirEntry = TSys::ReadDirEntry;
#[inline(always)]
fn base_fs_read_dir(
&self,
path: &Path,
) -> std::io::Result<
Box<dyn Iterator<Item = std::io::Result<Self::ReadDirEntry>>>,
> {
self.sys.base_fs_read_dir(path)
}
}
impl<TSys: FsRead> BaseFsRead for NodeResolutionSys<TSys> {
#[inline(always)]
fn base_fs_read(
&self,
path: &Path,
) -> std::io::Result<std::borrow::Cow<'static, [u8]>> {
self.sys.base_fs_read(path)
}
}
impl<TSys: FsOpen> BaseFsOpen for NodeResolutionSys<TSys> {
type File = TSys::File;
fn base_fs_open(
&self,
path: &Path,
flags: &sys_traits::OpenOptions,
) -> std::io::Result<Self::File> {
self.sys.base_fs_open(path, flags)
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/package_json/lib.rs | libs/package_json/lib.rs | // Copyright 2018-2025 the Deno authors. MIT license.
#![deny(clippy::print_stderr)]
#![deny(clippy::print_stdout)]
#![deny(clippy::unused_async)]
#![deny(clippy::unnecessary_wraps)]
use std::collections::BTreeMap;
use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
use boxed_error::Boxed;
use deno_error::JsError;
use deno_semver::StackString;
use deno_semver::VersionReq;
use deno_semver::VersionReqSpecifierParseError;
use deno_semver::npm::NpmVersionReqParseError;
use deno_semver::package::PackageReq;
use indexmap::IndexMap;
use serde::Serialize;
use serde_json::Map;
use serde_json::Value;
use sys_traits::FsRead;
use thiserror::Error;
use url::Url;
#[allow(clippy::disallowed_types)]
pub type PackageJsonRc = deno_maybe_sync::MaybeArc<PackageJson>;
#[allow(clippy::disallowed_types)]
pub type PackageJsonDepsRc = deno_maybe_sync::MaybeArc<PackageJsonDeps>;
#[allow(clippy::disallowed_types)]
type PackageJsonDepsRcCell = deno_maybe_sync::MaybeOnceLock<PackageJsonDepsRc>;
pub enum PackageJsonCacheResult {
Hit(Option<PackageJsonRc>),
NotCached,
}
pub trait PackageJsonCache {
fn get(&self, path: &Path) -> PackageJsonCacheResult;
fn set(&self, path: PathBuf, package_json: Option<PackageJsonRc>);
}
#[derive(Debug, Clone)]
pub enum PackageJsonBins {
Directory(PathBuf),
Bins(BTreeMap<String, PathBuf>),
}
#[derive(Debug, Clone, Error, JsError, PartialEq, Eq)]
#[class(generic)]
#[error("'{}' did not have a name", pkg_json_path.display())]
pub struct MissingPkgJsonNameError {
pkg_json_path: PathBuf,
}
#[derive(Debug, Clone, JsError, PartialEq, Eq, Boxed)]
pub struct PackageJsonDepValueParseError(
pub Box<PackageJsonDepValueParseErrorKind>,
);
#[derive(Debug, Error, Clone, JsError, PartialEq, Eq)]
pub enum PackageJsonDepValueParseErrorKind {
#[class(inherit)]
#[error(transparent)]
VersionReq(#[from] NpmVersionReqParseError),
#[class(inherit)]
#[error(transparent)]
JsrVersionReq(#[from] VersionReqSpecifierParseError),
#[class(type)]
#[error("Not implemented scheme '{scheme}'")]
Unsupported { scheme: String },
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum PackageJsonDepWorkspaceReq {
/// "workspace:~"
Tilde,
/// "workspace:^"
Caret,
/// "workspace:x.y.z", "workspace:*", "workspace:^x.y.z"
VersionReq(VersionReq),
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum PackageJsonDepValue {
File(String),
Req(PackageReq),
Workspace(PackageJsonDepWorkspaceReq),
JsrReq(PackageReq),
}
impl PackageJsonDepValue {
pub fn parse(
key: &str,
value: &str,
) -> Result<Self, PackageJsonDepValueParseError> {
/// Gets the name and raw version constraint for a registry info or
/// package.json dependency entry taking into account npm package aliases.
fn parse_dep_entry_name_and_raw_version<'a>(
key: &'a str,
value: &'a str,
) -> (&'a str, &'a str) {
if let Some(package_and_version) = value.strip_prefix("npm:") {
if let Some((name, version)) = package_and_version.rsplit_once('@') {
// if empty, then the name was scoped and there's no version
if name.is_empty() {
(package_and_version, "*")
} else {
(name, version)
}
} else {
(package_and_version, "*")
}
} else {
(key, value)
}
}
if let Some(workspace_key) = value.strip_prefix("workspace:") {
let workspace_req = match workspace_key {
"~" => PackageJsonDepWorkspaceReq::Tilde,
"^" => PackageJsonDepWorkspaceReq::Caret,
_ => PackageJsonDepWorkspaceReq::VersionReq(
VersionReq::parse_from_npm(workspace_key)?,
),
};
return Ok(Self::Workspace(workspace_req));
} else if let Some(raw_jsr_req) = value.strip_prefix("jsr:") {
let (name, version_req) =
parse_dep_entry_name_and_raw_version(key, raw_jsr_req);
let result = VersionReq::parse_from_specifier(version_req);
match result {
Ok(version_req) => {
return Ok(Self::JsrReq(PackageReq {
name: name.into(),
version_req,
}));
}
Err(err) => {
return Err(
PackageJsonDepValueParseErrorKind::JsrVersionReq(err).into_box(),
);
}
}
}
if value.starts_with("git:")
|| value.starts_with("http:")
|| value.starts_with("https:")
{
return Err(
PackageJsonDepValueParseErrorKind::Unsupported {
scheme: value.split(':').next().unwrap().to_string(),
}
.into_box(),
);
}
if let Some(path) = value.strip_prefix("file:") {
return Ok(Self::File(path.to_string()));
}
let (name, version_req) = parse_dep_entry_name_and_raw_version(key, value);
let result = VersionReq::parse_from_npm(version_req);
match result {
Ok(version_req) => Ok(Self::Req(PackageReq {
name: name.into(),
version_req,
})),
Err(err) => {
Err(PackageJsonDepValueParseErrorKind::VersionReq(err).into_box())
}
}
}
}
pub type PackageJsonDepsMap = IndexMap<
StackString,
Result<PackageJsonDepValue, PackageJsonDepValueParseError>,
>;
#[derive(Debug, Clone)]
pub struct PackageJsonDeps {
pub dependencies: PackageJsonDepsMap,
pub dev_dependencies: PackageJsonDepsMap,
}
impl PackageJsonDeps {
/// Gets a package.json dependency entry by alias.
pub fn get(
&self,
alias: &str,
) -> Option<&Result<PackageJsonDepValue, PackageJsonDepValueParseError>> {
self
.dependencies
.get(alias)
.or_else(|| self.dev_dependencies.get(alias))
}
}
#[derive(Debug, Error, JsError)]
pub enum PackageJsonLoadError {
#[class(inherit)]
#[error("Failed reading '{}'.", .path.display())]
Io {
path: PathBuf,
#[source]
#[inherit]
source: std::io::Error,
},
#[class(inherit)]
#[error("Malformed package.json '{}'.", .path.display())]
Deserialize {
path: PathBuf,
#[source]
#[inherit]
source: serde_json::Error,
},
#[error(
"\"exports\" cannot contain some keys starting with '.' and some not.\nThe exports object must either be an object of package subpath keys\nor an object of main entry condition name keys only."
)]
#[class(type)]
InvalidExports,
}
#[derive(Clone, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct PackageJson {
pub exports: Option<Map<String, Value>>,
pub imports: Option<Map<String, Value>>,
pub bin: Option<Value>,
pub main: Option<String>,
pub module: Option<String>,
pub browser: Option<String>,
pub name: Option<String>,
pub version: Option<String>,
#[serde(skip)]
pub path: PathBuf,
#[serde(rename = "type")]
pub typ: String,
pub types: Option<String>,
pub types_versions: Option<Map<String, Value>>,
pub dependencies: Option<IndexMap<String, String>>,
pub bundle_dependencies: Option<Vec<String>>,
pub dev_dependencies: Option<IndexMap<String, String>>,
pub peer_dependencies: Option<IndexMap<String, String>>,
pub peer_dependencies_meta: Option<Value>,
pub optional_dependencies: Option<IndexMap<String, String>>,
pub directories: Option<Map<String, Value>>,
pub scripts: Option<IndexMap<String, String>>,
pub workspaces: Option<Vec<String>>,
pub os: Option<Vec<String>>,
pub cpu: Option<Vec<String>>,
#[serde(skip_serializing)]
resolved_deps: PackageJsonDepsRcCell,
}
impl PackageJson {
pub fn load_from_path(
sys: &impl FsRead,
maybe_cache: Option<&dyn PackageJsonCache>,
path: &Path,
) -> Result<Option<PackageJsonRc>, PackageJsonLoadError> {
let cache_entry = maybe_cache
.map(|c| c.get(path))
.unwrap_or(PackageJsonCacheResult::NotCached);
match cache_entry {
PackageJsonCacheResult::Hit(item) => Ok(item),
PackageJsonCacheResult::NotCached => {
match sys.fs_read_to_string_lossy(path) {
Ok(file_text) => {
let pkg_json =
PackageJson::load_from_string(path.to_path_buf(), &file_text)?;
let pkg_json = deno_maybe_sync::new_rc(pkg_json);
if let Some(cache) = maybe_cache {
cache.set(path.to_path_buf(), Some(pkg_json.clone()));
}
Ok(Some(pkg_json))
}
Err(err) if err.kind() == ErrorKind::NotFound => {
if let Some(cache) = maybe_cache {
cache.set(path.to_path_buf(), None);
}
Ok(None)
}
Err(err) => Err(PackageJsonLoadError::Io {
path: path.to_path_buf(),
source: err,
}),
}
}
}
}
pub fn load_from_string(
path: PathBuf,
source: &str,
) -> Result<PackageJson, PackageJsonLoadError> {
if source.trim().is_empty() {
return Ok(PackageJson {
path,
main: None,
name: None,
version: None,
module: None,
browser: None,
typ: "none".to_string(),
types: None,
types_versions: None,
exports: None,
imports: None,
bin: None,
dependencies: None,
bundle_dependencies: None,
dev_dependencies: None,
peer_dependencies: None,
peer_dependencies_meta: None,
optional_dependencies: None,
directories: None,
scripts: None,
workspaces: None,
os: None,
cpu: None,
resolved_deps: Default::default(),
});
}
let package_json: Value = serde_json::from_str(source).map_err(|err| {
PackageJsonLoadError::Deserialize {
path: path.clone(),
source: err,
}
})?;
Self::load_from_value(path, package_json)
}
pub fn load_from_value(
path: PathBuf,
package_json: serde_json::Value,
) -> Result<PackageJson, PackageJsonLoadError> {
fn parse_string_map(
value: serde_json::Value,
) -> Option<IndexMap<String, String>> {
if let Value::Object(map) = value {
let mut result = IndexMap::with_capacity(map.len());
for (k, v) in map {
if let Some(v) = map_string(v) {
result.insert(k, v);
}
}
Some(result)
} else {
None
}
}
fn map_object(value: serde_json::Value) -> Option<Map<String, Value>> {
match value {
Value::Object(v) => Some(v),
_ => None,
}
}
fn map_string(value: serde_json::Value) -> Option<String> {
match value {
Value::String(v) => Some(v),
Value::Number(v) => Some(v.to_string()),
_ => None,
}
}
fn map_array(value: serde_json::Value) -> Option<Vec<Value>> {
match value {
Value::Array(v) => Some(v),
_ => None,
}
}
fn parse_string_array(value: serde_json::Value) -> Option<Vec<String>> {
let value = map_array(value)?;
let mut result = Vec::with_capacity(value.len());
for v in value {
if let Some(v) = map_string(v) {
result.push(v);
}
}
Some(result)
}
let mut package_json = match package_json {
Value::Object(o) => o,
_ => Default::default(),
};
let imports_val = package_json.remove("imports");
let main_val = package_json.remove("main");
let module_val = package_json.remove("module");
let browser_val = package_json.remove("browser");
let name_val = package_json.remove("name");
let version_val = package_json.remove("version");
let type_val = package_json.remove("type");
let bin = package_json.remove("bin");
let exports = package_json
.remove("exports")
.map(|exports| {
if is_conditional_exports_main_sugar(&exports)? {
let mut map = Map::new();
map.insert(".".to_string(), exports);
Ok::<_, PackageJsonLoadError>(Some(map))
} else {
Ok(map_object(exports))
}
})
.transpose()?
.flatten();
let imports = imports_val.and_then(map_object);
let main = main_val.and_then(map_string);
let name = name_val.and_then(map_string);
let version = version_val.and_then(map_string);
let module = module_val.and_then(map_string);
let browser = browser_val.and_then(map_string);
let dependencies = package_json
.remove("dependencies")
.and_then(parse_string_map);
let dev_dependencies = package_json
.remove("devDependencies")
.and_then(parse_string_map);
let bundle_dependencies = package_json
.remove("bundleDependencies")
.or_else(|| package_json.remove("bundledDependencies"))
.and_then(parse_string_array);
let peer_dependencies = package_json
.remove("peerDependencies")
.and_then(parse_string_map);
let peer_dependencies_meta = package_json.remove("peerDependenciesMeta");
let optional_dependencies = package_json
.remove("optionalDependencies")
.and_then(parse_string_map);
let directories: Option<Map<String, Value>> =
package_json.remove("directories").and_then(map_object);
let scripts: Option<IndexMap<String, String>> =
package_json.remove("scripts").and_then(parse_string_map);
// Ignore unknown types for forwards compatibility
let typ = if let Some(t) = type_val {
if let Some(t) = t.as_str() {
if t != "module" && t != "commonjs" {
"none".to_string()
} else {
t.to_string()
}
} else {
"none".to_string()
}
} else {
"none".to_string()
};
// for typescript, it looks for "typings" first, then "types"
let types = package_json
.remove("typings")
.or_else(|| package_json.remove("types"))
.and_then(map_string);
let types_versions =
package_json.remove("typesVersions").and_then(map_object);
let workspaces = package_json
.remove("workspaces")
.and_then(parse_string_array);
let os = package_json.remove("os").and_then(parse_string_array);
let cpu = package_json.remove("cpu").and_then(parse_string_array);
Ok(PackageJson {
path,
main,
name,
version,
module,
browser,
typ,
types,
types_versions,
exports,
imports,
bin,
dependencies,
dev_dependencies,
bundle_dependencies,
peer_dependencies,
peer_dependencies_meta,
optional_dependencies,
directories,
scripts,
workspaces,
os,
cpu,
resolved_deps: Default::default(),
})
}
pub fn specifier(&self) -> Url {
deno_path_util::url_from_file_path(&self.path).unwrap()
}
pub fn dir_path(&self) -> &Path {
self.path.parent().unwrap()
}
/// Resolve the package.json's dependencies.
pub fn resolve_local_package_json_deps(&self) -> &PackageJsonDepsRc {
fn get_map(deps: Option<&IndexMap<String, String>>) -> PackageJsonDepsMap {
let Some(deps) = deps else {
return Default::default();
};
let mut result = IndexMap::with_capacity(deps.len());
for (key, value) in deps {
result
.entry(StackString::from(key.as_str()))
.or_insert_with(|| PackageJsonDepValue::parse(key, value));
}
result
}
self.resolved_deps.get_or_init(|| {
PackageJsonDepsRc::new(PackageJsonDeps {
dependencies: get_map(self.dependencies.as_ref()),
dev_dependencies: get_map(self.dev_dependencies.as_ref()),
})
})
}
pub fn resolve_default_bin_name(
&self,
) -> Result<&str, MissingPkgJsonNameError> {
let Some(name) = &self.name else {
return Err(MissingPkgJsonNameError {
pkg_json_path: self.path.clone(),
});
};
let name = name.split("/").last().unwrap();
Ok(name)
}
pub fn resolve_bins(
&self,
) -> Result<PackageJsonBins, MissingPkgJsonNameError> {
match &self.bin {
Some(Value::String(path)) => {
let name = self.resolve_default_bin_name()?;
Ok(PackageJsonBins::Bins(BTreeMap::from([(
name.to_string(),
self.dir_path().join(path),
)])))
}
Some(Value::Object(o)) => Ok(PackageJsonBins::Bins(
o.iter()
.filter_map(|(key, value)| {
let Value::String(path) = value else {
return None;
};
Some((key.clone(), self.dir_path().join(path)))
})
.collect::<BTreeMap<_, _>>(),
)),
_ => {
let bin_directory =
self.directories.as_ref().and_then(|d| d.get("bin"));
match bin_directory {
Some(Value::String(bin_dir)) => {
let bin_dir = self.dir_path().join(bin_dir);
Ok(PackageJsonBins::Directory(bin_dir))
}
_ => Ok(PackageJsonBins::Bins(Default::default())),
}
}
}
}
}
fn is_conditional_exports_main_sugar(
exports: &Value,
) -> Result<bool, PackageJsonLoadError> {
if exports.is_string() || exports.is_array() {
return Ok(true);
}
if exports.is_null() || !exports.is_object() {
return Ok(false);
}
let exports_obj = exports.as_object().unwrap();
let mut is_conditional_sugar = false;
let mut i = 0;
for key in exports_obj.keys() {
let cur_is_conditional_sugar = key.is_empty() || !key.starts_with('.');
if i == 0 {
is_conditional_sugar = cur_is_conditional_sugar;
i += 1;
} else if is_conditional_sugar != cur_is_conditional_sugar {
return Err(PackageJsonLoadError::InvalidExports);
}
}
Ok(is_conditional_sugar)
}
#[cfg(test)]
mod test {
use std::error::Error;
use std::path::PathBuf;
use pretty_assertions::assert_eq;
use super::*;
#[test]
fn null_exports_should_not_crash() {
let package_json = PackageJson::load_from_string(
PathBuf::from("/package.json"),
r#"{ "exports": null }"#,
)
.unwrap();
assert!(package_json.exports.is_none());
}
fn get_local_package_json_version_reqs_for_tests(
package_json: &PackageJson,
) -> IndexMap<
String,
Result<PackageJsonDepValue, PackageJsonDepValueParseErrorKind>,
> {
let deps = package_json.resolve_local_package_json_deps();
deps
.dependencies
.clone()
.into_iter()
.chain(deps.dev_dependencies.clone())
.map(|(k, v)| {
(
k.to_string(),
match v {
Ok(v) => Ok(v),
Err(err) => Err(err.into_kind()),
},
)
})
.collect::<IndexMap<_, _>>()
}
#[test]
fn test_get_local_package_json_version_reqs() {
let mut package_json =
PackageJson::load_from_string(PathBuf::from("/package.json"), "{}")
.unwrap();
package_json.dependencies = Some(IndexMap::from([
("test".to_string(), "^1.2".to_string()),
("other".to_string(), "npm:package@~1.3".to_string()),
]));
package_json.dev_dependencies = Some(IndexMap::from([
("package_b".to_string(), "~2.2".to_string()),
("other".to_string(), "^3.2".to_string()),
]));
let deps = package_json.resolve_local_package_json_deps();
assert_eq!(
deps
.dependencies
.clone()
.into_iter()
.map(|d| (d.0, d.1.unwrap()))
.collect::<Vec<_>>(),
Vec::from([
(
"test".into(),
PackageJsonDepValue::Req(PackageReq::from_str("test@^1.2").unwrap())
),
(
"other".into(),
PackageJsonDepValue::Req(
PackageReq::from_str("package@~1.3").unwrap()
)
),
])
);
assert_eq!(
deps
.dev_dependencies
.clone()
.into_iter()
.map(|d| (d.0, d.1.unwrap()))
.collect::<Vec<_>>(),
Vec::from([
(
"package_b".into(),
PackageJsonDepValue::Req(
PackageReq::from_str("package_b@~2.2").unwrap()
)
),
(
"other".into(),
PackageJsonDepValue::Req(PackageReq::from_str("other@^3.2").unwrap())
),
])
);
}
#[test]
fn test_get_local_package_json_version_reqs_errors_non_npm_specifier() {
let mut package_json =
PackageJson::load_from_string(PathBuf::from("/package.json"), "{}")
.unwrap();
package_json.dependencies = Some(IndexMap::from([(
"test".to_string(),
"%*(#$%()".to_string(),
)]));
let map = get_local_package_json_version_reqs_for_tests(&package_json);
assert_eq!(map.len(), 1);
let err = map.get("test").unwrap().as_ref().unwrap_err();
assert_eq!(format!("{}", err), "Invalid version requirement");
assert_eq!(
format!("{}", err.source().unwrap()),
concat!("Unexpected character.\n", " %*(#$%()\n", " ~")
);
}
#[test]
fn test_get_local_package_json_version_reqs_range() {
let mut package_json =
PackageJson::load_from_string(PathBuf::from("/package.json"), "{}")
.unwrap();
package_json.dependencies = Some(IndexMap::from([(
"test".to_string(),
"1.x - 1.3".to_string(),
)]));
let map = get_local_package_json_version_reqs_for_tests(&package_json);
assert_eq!(
map,
IndexMap::from([(
"test".to_string(),
Ok(PackageJsonDepValue::Req(PackageReq {
name: "test".into(),
version_req: VersionReq::parse_from_npm("1.x - 1.3").unwrap()
}))
)])
);
}
#[test]
fn test_get_local_package_json_version_reqs_jsr() {
let mut package_json =
PackageJson::load_from_string(PathBuf::from("/package.json"), "{}")
.unwrap();
package_json.dependencies = Some(IndexMap::from([(
"@denotest/foo".to_string(),
"jsr:^1.2".to_string(),
)]));
let map = get_local_package_json_version_reqs_for_tests(&package_json);
assert_eq!(
map,
IndexMap::from([(
"@denotest/foo".to_string(),
Ok(PackageJsonDepValue::JsrReq(PackageReq {
name: "@denotest/foo".into(),
version_req: VersionReq::parse_from_specifier("^1.2").unwrap()
}))
)])
);
}
#[test]
fn test_get_local_package_json_version_reqs_skips_certain_specifiers() {
let mut package_json =
PackageJson::load_from_string(PathBuf::from("/package.json"), "{}")
.unwrap();
package_json.dependencies = Some(IndexMap::from([
("test".to_string(), "1".to_string()),
(
"work-test-version-req".to_string(),
"workspace:1.1.1".to_string(),
),
("work-test-star".to_string(), "workspace:*".to_string()),
("work-test-tilde".to_string(), "workspace:~".to_string()),
("work-test-caret".to_string(), "workspace:^".to_string()),
("file-test".to_string(), "file:something".to_string()),
("git-test".to_string(), "git:something".to_string()),
("http-test".to_string(), "http://something".to_string()),
("https-test".to_string(), "https://something".to_string()),
]));
let result = get_local_package_json_version_reqs_for_tests(&package_json);
assert_eq!(
result,
IndexMap::from([
(
"test".to_string(),
Ok(PackageJsonDepValue::Req(
PackageReq::from_str("test@1").unwrap()
))
),
(
"work-test-star".to_string(),
Ok(PackageJsonDepValue::Workspace(
PackageJsonDepWorkspaceReq::VersionReq(
VersionReq::parse_from_npm("*").unwrap()
)
))
),
(
"work-test-version-req".to_string(),
Ok(PackageJsonDepValue::Workspace(
PackageJsonDepWorkspaceReq::VersionReq(
VersionReq::parse_from_npm("1.1.1").unwrap()
)
))
),
(
"work-test-tilde".to_string(),
Ok(PackageJsonDepValue::Workspace(
PackageJsonDepWorkspaceReq::Tilde
))
),
(
"work-test-caret".to_string(),
Ok(PackageJsonDepValue::Workspace(
PackageJsonDepWorkspaceReq::Caret
))
),
(
"file-test".to_string(),
Ok(PackageJsonDepValue::File("something".to_string())),
),
(
"git-test".to_string(),
Err(PackageJsonDepValueParseErrorKind::Unsupported {
scheme: "git".to_string()
}),
),
(
"http-test".to_string(),
Err(PackageJsonDepValueParseErrorKind::Unsupported {
scheme: "http".to_string()
}),
),
(
"https-test".to_string(),
Err(PackageJsonDepValueParseErrorKind::Unsupported {
scheme: "https".to_string()
}),
),
])
);
}
#[test]
fn test_deserialize_serialize() {
let json_value = serde_json::json!({
"name": "test",
"version": "1",
"exports": {
".": "./main.js",
},
"bin": "./main.js",
"types": "./types.d.ts",
"typesVersions": {
"<4.0": { "index.d.ts": ["index.v3.d.ts"] }
},
"imports": {
"#test": "./main.js",
},
"main": "./main.js",
"module": "./module.js",
"browser": "./browser.js",
"type": "module",
"dependencies": {
"name": "1.2",
},
"directories": {
"bin": "./bin",
},
"devDependencies": {
"name": "1.2",
},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
},
"workspaces": ["asdf", "asdf2"],
"cpu": ["x86_64"],
"os": ["win32"],
"optionalDependencies": {
"optional": "1.1"
},
"bundleDependencies": [
"name"
],
"peerDependencies": {
"peer": "1.0"
},
"peerDependenciesMeta": {
"peer": {
"optional": true
}
},
});
let package_json = PackageJson::load_from_value(
PathBuf::from("/package.json"),
json_value.clone(),
)
.unwrap();
let serialized_value = serde_json::to_value(&package_json).unwrap();
assert_eq!(serialized_value, json_value);
}
// https://github.com/denoland/deno/issues/26031
#[test]
fn test_exports_error() {
let json_value = serde_json::json!({
"name": "test",
"version": "1",
"exports": { ".": "./a", "a": "./a" },
});
assert!(matches!(
PackageJson::load_from_value(
PathBuf::from("/package.json"),
json_value.clone(),
),
Err(PackageJsonLoadError::InvalidExports)
));
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/config/lib.rs | libs/config/lib.rs | // Copyright 2018-2025 the Deno authors. MIT license.
#![deny(clippy::print_stderr)]
#![deny(clippy::print_stdout)]
#![deny(clippy::unused_async)]
#![deny(clippy::unnecessary_wraps)]
#[cfg(feature = "deno_json")]
pub mod deno_json;
#[cfg(feature = "deno_json")]
pub mod glob;
#[cfg(feature = "deno_json")]
pub mod import_map;
#[cfg(feature = "deno_json")]
mod util;
#[cfg(feature = "workspace")]
pub mod workspace;
#[cfg(feature = "deno_json")]
pub use deno_path_util::UrlToFilePathError;
#[cfg(feature = "deno_json")]
pub use util::ParseDateOrDurationError;
#[cfg(feature = "deno_json")]
pub use util::ParseIso8601DurationError;
#[cfg(feature = "deno_json")]
pub use util::parse_minutes_duration_or_date;
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/config/import_map.rs | libs/config/import_map.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashSet;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
/// Attempts to resolve any `npm:` and `jsr:` dependencies
/// in the import map's imports and scopes.
pub fn import_map_deps(
import_map: &serde_json::Value,
) -> HashSet<JsrDepPackageReq> {
let values = imports_values(import_map.get("imports"))
.into_iter()
.chain(scope_values(import_map.get("scopes")));
values_to_set(values)
}
pub(crate) fn imports_values(
value: Option<&serde_json::Value>,
) -> Vec<&String> {
let Some(obj) = value.and_then(|v| v.as_object()) else {
return Vec::new();
};
let mut items = Vec::with_capacity(obj.len());
for value in obj.values() {
if let serde_json::Value::String(value) = value {
items.push(value);
}
}
items
}
pub(crate) fn scope_values(value: Option<&serde_json::Value>) -> Vec<&String> {
let Some(obj) = value.and_then(|v| v.as_object()) else {
return Vec::new();
};
obj.values().flat_map(|v| imports_values(Some(v))).collect()
}
pub(crate) fn values_to_set<'a>(
values: impl Iterator<Item = &'a String>,
) -> HashSet<JsrDepPackageReq> {
let mut entries = HashSet::new();
for value in values {
if let Some(dep_req) = value_to_dep_req(value) {
entries.insert(dep_req);
}
}
entries
}
pub(crate) fn value_to_dep_req(value: &str) -> Option<JsrDepPackageReq> {
match JsrPackageReqReference::from_str(value) {
Ok(req_ref) => Some(JsrDepPackageReq::jsr(req_ref.into_inner().req)),
_ => match NpmPackageReqReference::from_str(value) {
Ok(req_ref) => Some(JsrDepPackageReq::npm(req_ref.into_inner().req)),
_ => None,
},
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/config/util.rs | libs/config/util.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use chrono::DateTime;
use chrono::Duration;
use thiserror::Error;
use crate::deno_json::NewestDependencyDate;
pub fn is_skippable_io_error(e: &std::io::Error) -> bool {
use std::io::ErrorKind::*;
// skip over invalid filenames on windows
const ERROR_INVALID_NAME: i32 = 123;
if cfg!(windows) && e.raw_os_error() == Some(ERROR_INVALID_NAME) {
return true;
}
match e.kind() {
InvalidInput | PermissionDenied | NotFound => {
// ok keep going
true
}
_ => {
const NOT_A_DIRECTORY: i32 = 20;
cfg!(unix) && e.raw_os_error() == Some(NOT_A_DIRECTORY)
}
}
}
#[derive(Debug, Error)]
pub enum ParseDateOrDurationError {
#[error("failed parsing integer to minutes")]
InvalidMinutes(#[source] std::num::ParseIntError),
#[error("expected minutes, RFC3339 datetime, or ISO-8601 duration")]
InvalidDateTime(#[source] chrono::ParseError),
#[error("expected minutes, RFC3339 datetime, or ISO-8601 duration")]
InvalidDuration(#[source] ParseIso8601DurationError),
}
/// Parses a string that could be an integer for number of minutes,
/// ISO-8601 duration, or RFC3339 date.
pub fn parse_minutes_duration_or_date(
sys: &impl sys_traits::SystemTimeNow,
s: &str,
) -> Result<NewestDependencyDate, ParseDateOrDurationError> {
if s == "0" {
// consider 0 as disabled in order to not cause
// issues when a user's clock is wrong
Ok(NewestDependencyDate::Disabled)
} else {
parse_enabled_minutes_duration_or_date(sys, s)
.map(NewestDependencyDate::Enabled)
}
}
fn parse_enabled_minutes_duration_or_date(
sys: &impl sys_traits::SystemTimeNow,
s: &str,
) -> Result<chrono::DateTime<chrono::Utc>, ParseDateOrDurationError> {
if s.chars().all(|c| c.is_ascii_digit()) {
let now = chrono::DateTime::<chrono::Utc>::from(sys.sys_time_now());
let minutes: i64 = s
.parse()
.map_err(ParseDateOrDurationError::InvalidMinutes)?;
return Ok(now - chrono::Duration::minutes(minutes));
}
let datetime_parse_err = match DateTime::parse_from_rfc3339(s) {
Ok(dt) => return Ok(dt.with_timezone(&chrono::Utc)),
Err(err) => err,
};
// accept offsets without colon (e.g., +0900) and optional seconds
if let Ok(dt) = DateTime::parse_from_str(s, "%Y-%m-%dT%H:%M:%S%z")
.or_else(|_| DateTime::parse_from_str(s, "%Y-%m-%dT%H:%M%z"))
{
return Ok(dt.with_timezone(&chrono::Utc));
}
// accept simple date format (YYYY-MM-DD) and treat as midnight UTC
if let Ok(date) = chrono::NaiveDate::parse_from_str(s, "%Y-%m-%d") {
return Ok(date.and_hms_opt(0, 0, 0).unwrap().and_utc());
}
// try duration
match parse_iso8601_duration(s) {
Ok(duration) => {
let now = chrono::DateTime::<chrono::Utc>::from(sys.sys_time_now());
Ok(now - duration)
}
Err(ParseIso8601DurationError::MissingP) => Err(
ParseDateOrDurationError::InvalidDateTime(datetime_parse_err),
),
Err(err) => Err(ParseDateOrDurationError::InvalidDuration(err)),
}
}
#[derive(Debug, Error)]
pub enum ParseIso8601DurationError {
#[error("empty duration string")]
Empty,
#[error("duration must start with 'P'")]
MissingP,
#[error("expected designators after 'P'")]
MissingDesignator,
#[error("expected time component after 'T'")]
MissingTimeAfterT,
#[error("duplicate 'T' designator")]
DuplicateT,
#[error("expected number")]
ExpectedNumber,
#[error("expected digits after decimal point")]
ExpectedFraction,
#[error("missing unit designator")]
MissingUnit,
#[error("invalid integer number")]
InvalidNumber,
#[error("invalid fractional seconds")]
InvalidFractionalSeconds,
#[error("months not supported")]
MonthsNotSupported,
#[error("years not supported")]
YearsNotSupported,
#[error("weeks must be the only component (use PnW)")]
WeeksMustBeAlone,
#[error("fractional value is only allowed for seconds (S)")]
FractionalNotAllowed,
#[error("invalid unit designator")]
InvalidUnit,
#[error("duration overflow")]
Overflow,
}
/// Parses ISO-8601 durations of the form:
/// PnW | PnDTnHnMnS | PnD | PTnH | PTnM | PTnS | combinations
/// Notes:
/// - Supports optional leading '-' for negative durations.
/// - Supports weeks (W), days (D), hours (H), minutes (M in the *time* part), seconds (S).
/// - DOES NOT support years or months (ambiguous length); will error if present.
fn parse_iso8601_duration(
input: &str,
) -> Result<Duration, ParseIso8601DurationError> {
if input.is_empty() {
return Err(ParseIso8601DurationError::Empty);
}
// accept optional '+' or '-' sign
let (s, neg) = match input.strip_prefix(['-', '+']) {
Some(rest) if input.starts_with('-') => (rest, true),
Some(rest) => (rest, false),
None => (input, false),
};
let Some(s) = s.strip_prefix('P') else {
return Err(ParseIso8601DurationError::MissingP);
};
if s.is_empty() {
return Err(ParseIso8601DurationError::MissingDesignator);
}
// weeks-only short form: PnW
if let Some(num) = s.strip_suffix(['W', 'w']) {
let weeks: i64 = num
.parse()
.map_err(|_| ParseIso8601DurationError::InvalidNumber)?;
let days = weeks
.checked_mul(7)
.ok_or(ParseIso8601DurationError::Overflow)?;
let d = Duration::days(days);
return Ok(if neg { -d } else { d });
}
let bytes = s.as_bytes();
let mut i = 0usize;
let mut in_time = false;
let mut total = Duration::zero();
while i < bytes.len() {
if !in_time && bytes[i] == b'T' {
in_time = true;
i += 1;
if i == bytes.len() {
return Err(ParseIso8601DurationError::MissingTimeAfterT);
}
continue;
} else if in_time && bytes[i] == b'T' {
return Err(ParseIso8601DurationError::DuplicateT);
}
// parse integer part
let start = i;
while i < bytes.len() && bytes[i].is_ascii_digit() {
i += 1;
}
if start == i {
return Err(ParseIso8601DurationError::ExpectedNumber);
}
// optional fractional part ONLY allowed for seconds
let mut frac_start = i;
let mut frac_len = 0usize;
if i < bytes.len() && bytes[i] == b'.' {
i += 1; // skip '.'
frac_start = i;
while i < bytes.len() && bytes[i].is_ascii_digit() {
i += 1;
frac_len += 1;
}
if frac_len == 0 {
return Err(ParseIso8601DurationError::ExpectedFraction);
}
}
// end of the integer slice (just before '.' if present)
let int_end = if frac_len > 0 { frac_start - 1 } else { i };
if i >= bytes.len() {
return Err(ParseIso8601DurationError::MissingUnit);
}
let mut unit = bytes[i] as char;
unit.make_ascii_uppercase();
i += 1;
// integral value
let int_val: i64 = s[start..int_end]
.parse()
.map_err(|_| ParseIso8601DurationError::InvalidNumber)?;
// add to total
let add = match (in_time, unit, frac_len) {
(false, 'D', 0) => Duration::days(int_val),
(false, 'M', _) => {
return Err(ParseIso8601DurationError::MonthsNotSupported);
}
(false, 'Y', _) => {
return Err(ParseIso8601DurationError::YearsNotSupported);
}
(false, 'W', _) => {
return Err(ParseIso8601DurationError::WeeksMustBeAlone);
}
(true, 'H', 0) => Duration::hours(int_val),
(true, 'M', 0) => Duration::minutes(int_val),
// Seconds may be fractional: PT1.5S
(true, 'S', _) => {
let mut d = Duration::seconds(int_val);
if frac_len > 0 {
let frac_str = &s[frac_start..(frac_start + frac_len)];
// take up to 9 digits for nanoseconds, right-pad with zeros
let n = frac_str.chars().take(9).collect::<String>();
let scale = 9 - n.len();
let base: i64 = n
.parse()
.map_err(|_| ParseIso8601DurationError::InvalidFractionalSeconds)?;
let nanos = base
.checked_mul(10_i64.pow(scale as u32))
.ok_or(ParseIso8601DurationError::Overflow)?;
d = d
.checked_add(&Duration::nanoseconds(nanos))
.ok_or(ParseIso8601DurationError::Overflow)?;
}
d
}
// any other time-unit with a fraction is invalid because only seconds allow fractions
(true, _, f) if f > 0 => {
return Err(ParseIso8601DurationError::FractionalNotAllowed);
}
// unknown/invalid unit in time section (without fraction)
(true, _, _) => return Err(ParseIso8601DurationError::InvalidUnit),
_ => return Err(ParseIso8601DurationError::InvalidUnit),
};
total = total
.checked_add(&add)
.ok_or(ParseIso8601DurationError::Overflow)?;
}
Ok(if neg { -total } else { total })
}
#[cfg(test)]
mod tests {
use std::time::SystemTime;
use chrono::TimeZone;
use chrono::Utc;
use super::*;
#[cfg(windows)]
#[test]
fn is_skippable_io_error_win_invalid_filename() {
let error = std::io::Error::from_raw_os_error(123);
assert!(super::is_skippable_io_error(&error));
}
#[test]
fn ok_basic() {
assert_eq!(
parse_iso8601_duration("P3DT4H12M6S").unwrap(),
Duration::days(3)
+ Duration::hours(4)
+ Duration::minutes(12)
+ Duration::seconds(6)
);
assert_eq!(
parse_iso8601_duration("+PT90S").unwrap(),
Duration::seconds(90)
);
assert_eq!(parse_iso8601_duration("P2W").unwrap(), Duration::days(14));
assert_eq!(
parse_iso8601_duration("PT1.5S").unwrap(),
Duration::milliseconds(1500)
);
assert_eq!(
parse_iso8601_duration("-PT5M").unwrap(),
-Duration::minutes(5)
);
}
#[test]
fn errs() {
assert!(parse_iso8601_duration("P1Y").is_err());
assert!(parse_iso8601_duration("P1M").is_err());
assert!(parse_iso8601_duration("PT").is_err());
assert!(parse_iso8601_duration("PT1.2M").is_err()); // fractional minutes rejected
assert!(parse_iso8601_duration("P1WT1H").is_err()); // W must be alone
}
#[test]
fn test_parse_minutes_duration_or_date() {
struct TestEnv;
impl sys_traits::SystemTimeNow for TestEnv {
fn sys_time_now(&self) -> SystemTime {
let datetime = Utc.with_ymd_and_hms(2025, 6, 1, 0, 0, 0).unwrap();
SystemTime::from(datetime)
}
}
// zero becomes disabled to prevent issues with clock drift
assert_eq!(
parse_minutes_duration_or_date(&TestEnv, "0").unwrap(),
NewestDependencyDate::Disabled
);
assert_eq!(
parse_minutes_duration_or_date(&TestEnv, "120").unwrap(),
NewestDependencyDate::Enabled(
Utc.with_ymd_and_hms(2025, 5, 31, 22, 0, 0).unwrap()
)
);
assert_eq!(
parse_minutes_duration_or_date(&TestEnv, "2025-01-01").unwrap(),
NewestDependencyDate::Enabled(
Utc.with_ymd_and_hms(2025, 1, 1, 0, 0, 0).unwrap()
)
);
assert_eq!(
parse_minutes_duration_or_date(&TestEnv, "2025-01-01").unwrap(),
NewestDependencyDate::Enabled(
Utc.with_ymd_and_hms(2025, 1, 1, 0, 0, 0).unwrap()
)
);
assert_eq!(
parse_minutes_duration_or_date(&TestEnv, "2025-09-16T12:50:10+00:00")
.unwrap(),
NewestDependencyDate::Enabled(
Utc.with_ymd_and_hms(2025, 9, 16, 12, 50, 10).unwrap()
)
);
assert_eq!(
parse_minutes_duration_or_date(&TestEnv, "P2D").unwrap(),
NewestDependencyDate::Enabled(
Utc.with_ymd_and_hms(2025, 5, 30, 0, 0, 0).unwrap()
)
);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/config/deno_json/permissions.rs | libs/config/deno_json/permissions.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use indexmap::IndexMap;
use serde::Deserialize;
use url::Url;
use super::UndefinedPermissionError;
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum PermissionConfigValue {
All,
Some(Vec<String>),
None,
}
impl<'de> serde::Deserialize<'de> for PermissionConfigValue {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
struct Visitor;
impl<'d> serde::de::Visitor<'d> for Visitor {
type Value = PermissionConfigValue;
fn expecting(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "either an array or bool")
}
fn visit_bool<E>(self, v: bool) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
if v {
Ok(PermissionConfigValue::All)
} else {
Ok(PermissionConfigValue::None)
}
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: serde::de::SeqAccess<'d>,
{
let mut out = Vec::with_capacity(seq.size_hint().unwrap_or(8));
while let Some(element) = seq.next_element::<String>()? {
out.push(element);
}
if out.is_empty() {
Ok(PermissionConfigValue::None)
} else {
Ok(PermissionConfigValue::Some(out))
}
}
fn visit_unit<E>(self) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(PermissionConfigValue::None)
}
}
deserializer.deserialize_any(Visitor)
}
}
#[derive(Deserialize, Default, Clone, Debug, PartialEq, Eq, Hash)]
#[serde(default, deny_unknown_fields)]
pub struct AllowDenyPermissionConfig {
pub allow: Option<PermissionConfigValue>,
pub deny: Option<PermissionConfigValue>,
}
impl AllowDenyPermissionConfig {
pub fn is_none(&self) -> bool {
self.allow.is_none() && self.deny.is_none()
}
}
#[derive(Deserialize, Default, Clone, Debug, PartialEq, Eq, Hash)]
#[serde(default, deny_unknown_fields)]
pub struct AllowDenyIgnorePermissionConfig {
pub allow: Option<PermissionConfigValue>,
pub deny: Option<PermissionConfigValue>,
pub ignore: Option<PermissionConfigValue>,
}
impl AllowDenyIgnorePermissionConfig {
pub fn is_none(&self) -> bool {
self.allow.is_none() && self.deny.is_none() && self.ignore.is_none()
}
}
#[derive(Deserialize)]
#[serde(untagged)]
pub enum AllowDenyPermissionConfigValue {
Boolean(bool),
AllowList(Vec<String>),
Object(AllowDenyPermissionConfig),
}
fn deserialize_allow_deny<'de, D: serde::Deserializer<'de>>(
de: D,
) -> Result<AllowDenyPermissionConfig, D::Error> {
AllowDenyPermissionConfigValue::deserialize(de).map(|value| match value {
AllowDenyPermissionConfigValue::Boolean(b) => AllowDenyPermissionConfig {
allow: Some(if b {
PermissionConfigValue::All
} else {
PermissionConfigValue::None
}),
deny: None,
},
AllowDenyPermissionConfigValue::AllowList(allow) => {
AllowDenyPermissionConfig {
allow: Some(if allow.is_empty() {
PermissionConfigValue::None
} else {
PermissionConfigValue::Some(allow)
}),
deny: None,
}
}
AllowDenyPermissionConfigValue::Object(obj) => obj,
})
}
#[derive(Deserialize)]
#[serde(untagged)]
pub enum AllowDenyIgnorePermissionConfigValue {
Boolean(bool),
AllowList(Vec<String>),
Object(AllowDenyIgnorePermissionConfig),
}
fn deserialize_allow_deny_ignore<'de, D: serde::Deserializer<'de>>(
de: D,
) -> Result<AllowDenyIgnorePermissionConfig, D::Error> {
AllowDenyIgnorePermissionConfigValue::deserialize(de).map(|value| match value
{
AllowDenyIgnorePermissionConfigValue::Boolean(b) => {
AllowDenyIgnorePermissionConfig {
allow: Some(if b {
PermissionConfigValue::All
} else {
PermissionConfigValue::None
}),
deny: None,
ignore: None,
}
}
AllowDenyIgnorePermissionConfigValue::AllowList(allow) => {
AllowDenyIgnorePermissionConfig {
allow: Some(if allow.is_empty() {
PermissionConfigValue::None
} else {
PermissionConfigValue::Some(allow)
}),
deny: None,
ignore: None,
}
}
AllowDenyIgnorePermissionConfigValue::Object(obj) => obj,
})
}
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Hash)]
#[serde(untagged)]
pub enum PermissionNameOrObject {
Name(String),
Object(Box<PermissionsObject>),
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct PermissionsObjectWithBase {
pub base: Url,
pub permissions: PermissionsObject,
}
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Default, Hash)]
#[serde(default, deny_unknown_fields)]
pub struct PermissionsObject {
#[serde(default)]
pub all: Option<bool>,
#[serde(default, deserialize_with = "deserialize_allow_deny_ignore")]
pub read: AllowDenyIgnorePermissionConfig,
#[serde(default, deserialize_with = "deserialize_allow_deny")]
pub write: AllowDenyPermissionConfig,
#[serde(default, deserialize_with = "deserialize_allow_deny")]
pub import: AllowDenyPermissionConfig,
#[serde(default, deserialize_with = "deserialize_allow_deny_ignore")]
pub env: AllowDenyIgnorePermissionConfig,
#[serde(default, deserialize_with = "deserialize_allow_deny")]
pub net: AllowDenyPermissionConfig,
#[serde(default, deserialize_with = "deserialize_allow_deny")]
pub run: AllowDenyPermissionConfig,
#[serde(default, deserialize_with = "deserialize_allow_deny")]
pub ffi: AllowDenyPermissionConfig,
#[serde(default, deserialize_with = "deserialize_allow_deny")]
pub sys: AllowDenyPermissionConfig,
}
impl PermissionsObject {
/// Returns true if the permissions object is empty (no permissions are set).
pub fn is_empty(&self) -> bool {
self.all.is_none()
&& self.read.is_none()
&& self.write.is_none()
&& self.import.is_none()
&& self.env.is_none()
&& self.net.is_none()
&& self.run.is_none()
&& self.ffi.is_none()
&& self.sys.is_none()
}
}
#[derive(Clone, Debug, Default)]
pub struct PermissionsConfig {
pub sets: IndexMap<String, PermissionsObjectWithBase>,
}
impl PermissionsConfig {
pub fn parse(
value: serde_json::Value,
base: &Url,
) -> Result<Self, serde_json::Error> {
let sets: IndexMap<String, PermissionsObject> =
serde_json::from_value(value)?;
Ok(Self {
sets: sets
.into_iter()
.map(|(k, permissions)| {
(
k,
PermissionsObjectWithBase {
base: base.clone(),
permissions,
},
)
})
.collect(),
})
}
pub fn get(
&self,
name: &str,
) -> Result<&PermissionsObjectWithBase, UndefinedPermissionError> {
match self.sets.get(name) {
Some(value) => Ok(value),
None => Err(UndefinedPermissionError(name.to_string())),
}
}
pub fn merge(self, member: Self) -> Self {
let mut sets = self.sets;
for (key, value) in member.sets {
// When the same key exists in the root and the member, we overwrite
// with the member instead of merging because we don't want someone looking
// at a member config file and not realizing the permissions are extended
// in the root. In the future, we may add an explicit "extends" concept in
// permissions in order to support this scenario.
sets.insert(key, value);
}
Self { sets }
}
}
#[cfg(test)]
mod test {
use pretty_assertions::assert_eq;
use serde_json::json;
use super::*;
#[test]
fn deserialize() {
assert_eq!(
serde_json::from_value::<PermissionsObject>(json!({
"all": true,
"read": true,
"write": true,
"import": true,
"env": true,
"net": true,
"run": true,
"ffi": true,
"sys": false,
}))
.unwrap(),
PermissionsObject {
all: Some(true),
read: AllowDenyIgnorePermissionConfig {
allow: Some(PermissionConfigValue::All),
deny: None,
ignore: None,
},
write: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::All),
deny: None,
},
import: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::All),
deny: None,
},
env: AllowDenyIgnorePermissionConfig {
allow: Some(PermissionConfigValue::All),
deny: None,
ignore: None,
},
net: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::All),
deny: None,
},
run: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::All),
deny: None,
},
ffi: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::All),
deny: None,
},
sys: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::None),
deny: None,
}
}
);
assert_eq!(
serde_json::from_value::<PermissionsObject>(json!({
"read": ["test"],
"write": ["test"],
"import": ["test"],
"env": ["test"],
"net": ["test"],
"run": ["test"],
"ffi": ["test"],
"sys": ["test"],
}))
.unwrap(),
PermissionsObject {
all: None,
read: AllowDenyIgnorePermissionConfig {
allow: Some(PermissionConfigValue::Some(vec!["test".to_string()])),
deny: None,
ignore: None,
},
write: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::Some(vec!["test".to_string()])),
deny: None,
},
import: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::Some(vec!["test".to_string()])),
deny: None,
},
env: AllowDenyIgnorePermissionConfig {
allow: Some(PermissionConfigValue::Some(vec!["test".to_string()])),
deny: None,
ignore: None,
},
net: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::Some(vec!["test".to_string()])),
deny: None,
},
run: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::Some(vec!["test".to_string()])),
deny: None,
},
ffi: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::Some(vec!["test".to_string()])),
deny: None,
},
sys: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::Some(vec!["test".to_string()])),
deny: None,
}
}
);
assert_eq!(
serde_json::from_value::<PermissionsObject>(json!({
"read": {
"allow": ["test"],
"deny": ["test-deny"],
"ignore": ["test-ignore"],
},
"write": [],
"sys": {
"allow": []
}
}))
.unwrap(),
PermissionsObject {
all: None,
read: AllowDenyIgnorePermissionConfig {
allow: Some(PermissionConfigValue::Some(vec!["test".to_string()])),
deny: Some(PermissionConfigValue::Some(vec![
"test-deny".to_string()
])),
ignore: Some(PermissionConfigValue::Some(vec![
"test-ignore".to_string()
]))
},
write: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::None),
deny: None
},
sys: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::None),
deny: None
},
..Default::default()
}
);
assert_eq!(
serde_json::from_value::<PermissionsObject>(json!({
"read": {
"allow": true,
"deny": ["test-deny"],
"ignore": ["test-ignore"],
},
}))
.unwrap(),
PermissionsObject {
all: None,
read: AllowDenyIgnorePermissionConfig {
allow: Some(PermissionConfigValue::All),
deny: Some(PermissionConfigValue::Some(vec![
"test-deny".to_string()
])),
ignore: Some(PermissionConfigValue::Some(vec![
"test-ignore".to_string()
])),
},
..Default::default()
}
);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/config/deno_json/ts.rs | libs/config/deno_json/ts.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use serde::Deserialize;
use serde::Serialize;
use serde::Serializer;
use serde_json::Value;
#[derive(Debug, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RawJsxCompilerOptions {
pub jsx: Option<String>,
pub jsx_import_source: Option<String>,
pub jsx_import_source_types: Option<String>,
}
/// The transpile options that are significant out of a user provided tsconfig
/// file, that we want to deserialize out of the final config for a transpile.
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct EmitConfigOptions {
pub check_js: bool,
pub experimental_decorators: bool,
pub emit_decorator_metadata: bool,
pub imports_not_used_as_values: String,
pub inline_source_map: bool,
pub inline_sources: bool,
pub source_map: bool,
pub jsx: String,
pub jsx_factory: String,
pub jsx_fragment_factory: String,
pub jsx_import_source: Option<String>,
pub jsx_precompile_skip_elements: Option<Vec<String>>,
}
/// A structure for managing the configuration of TypeScript
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CompilerOptions(pub Value);
impl Default for CompilerOptions {
fn default() -> Self {
Self(serde_json::Value::Object(Default::default()))
}
}
impl CompilerOptions {
/// Create a new `CompilerOptions` with the base being the `value` supplied.
pub fn new(value: Value) -> Self {
CompilerOptions(value)
}
pub fn merge_mut(&mut self, value: CompilerOptions) {
json_merge(&mut self.0, value.0);
}
/// Merge a serde_json value into the configuration.
pub fn merge_object_mut(
&mut self,
value: serde_json::Map<String, serde_json::Value>,
) {
json_merge(&mut self.0, serde_json::Value::Object(value));
}
}
impl Serialize for CompilerOptions {
/// Serializes inner hash map which is ordered by the key
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
Serialize::serialize(&self.0, serializer)
}
}
/// A function that works like JavaScript's `Object.assign()`.
fn json_merge(a: &mut Value, b: Value) {
match (a, b) {
(&mut Value::Object(ref mut a), Value::Object(b)) => {
for (k, v) in b {
json_merge(a.entry(k).or_insert(Value::Null), v);
}
}
(a, b) => {
*a = b;
}
}
}
#[cfg(test)]
mod tests {
use serde_json::json;
use super::*;
#[test]
fn test_json_merge() {
let mut value_a = json!({
"a": true,
"b": "c"
});
let value_b = json!({
"b": "d",
"e": false,
});
json_merge(&mut value_a, value_b);
assert_eq!(
value_a,
json!({
"a": true,
"b": "d",
"e": false,
})
);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/config/deno_json/mod.rs | libs/config/deno_json/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::HashSet;
use std::path::Path;
use std::path::PathBuf;
use boxed_error::Boxed;
use deno_error::JsError;
use deno_path_util::url_from_file_path;
use deno_path_util::url_parent;
use deno_path_util::url_to_file_path;
use deno_semver::jsr::JsrDepPackageReq;
use import_map::ImportMapWithDiagnostics;
use indexmap::IndexMap;
use jsonc_parser::ParseResult;
use serde::Deserialize;
use serde::Deserializer;
use serde::Serialize;
use serde::de;
use serde::de::Unexpected;
use serde::de::Visitor;
use serde::ser::Error;
use serde_json::Value;
use serde_json::json;
use sys_traits::FsRead;
use thiserror::Error;
use url::Url;
use crate::UrlToFilePathError;
use crate::glob::FilePatterns;
use crate::glob::PathOrPatternSet;
use crate::import_map::imports_values;
use crate::import_map::scope_values;
use crate::import_map::value_to_dep_req;
use crate::import_map::values_to_set;
use crate::util::is_skippable_io_error;
mod permissions;
mod ts;
pub use permissions::AllowDenyIgnorePermissionConfig;
pub use permissions::AllowDenyIgnorePermissionConfigValue;
pub use permissions::AllowDenyPermissionConfig;
pub use permissions::AllowDenyPermissionConfigValue;
pub use permissions::PermissionConfigValue;
pub use permissions::PermissionNameOrObject;
pub use permissions::PermissionsConfig;
pub use permissions::PermissionsObject;
pub use permissions::PermissionsObjectWithBase;
pub use ts::CompilerOptions;
pub use ts::EmitConfigOptions;
pub use ts::RawJsxCompilerOptions;
#[derive(Clone, Debug, Hash, PartialEq)]
pub enum AllowScriptsValueConfig {
All,
Limited(Vec<JsrDepPackageReq>),
}
impl Default for AllowScriptsValueConfig {
fn default() -> Self {
Self::Limited(Vec::new())
}
}
impl<'de> Deserialize<'de> for AllowScriptsValueConfig {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct ApprovedScriptsValueConfigVisitor;
impl<'de> Visitor<'de> for ApprovedScriptsValueConfigVisitor {
type Value = AllowScriptsValueConfig;
fn expecting(
&self,
formatter: &mut std::fmt::Formatter,
) -> std::fmt::Result {
formatter.write_str("a boolean or an array of strings")
}
fn visit_bool<E>(self, v: bool) -> Result<Self::Value, E>
where
E: de::Error,
{
if v {
Ok(AllowScriptsValueConfig::All)
} else {
Ok(AllowScriptsValueConfig::Limited(Vec::new()))
}
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: de::SeqAccess<'de>,
{
let mut items = Vec::new();
while let Some(item) = seq.next_element::<JsrDepPackageReq>()? {
items.push(item);
}
Ok(AllowScriptsValueConfig::Limited(items))
}
}
deserializer.deserialize_any(ApprovedScriptsValueConfigVisitor)
}
}
#[derive(Clone, Debug, Default, Deserialize, Hash, PartialEq)]
#[serde(default, deny_unknown_fields)]
pub struct AllowScriptsConfig {
#[serde(default)]
pub allow: AllowScriptsValueConfig,
#[serde(default)]
pub deny: Vec<JsrDepPackageReq>,
}
#[derive(Clone, Debug, Default, Deserialize, Hash, PartialEq)]
#[serde(default, deny_unknown_fields)]
pub struct LintRulesConfig {
pub tags: Option<Vec<String>>,
pub include: Option<Vec<String>>,
pub exclude: Option<Vec<String>>,
}
#[derive(Debug, JsError, Error)]
#[class(generic)]
#[error("Could not find permission set '{0}' in deno.json")]
pub struct UndefinedPermissionError(String);
#[derive(Debug, JsError, Boxed)]
pub struct IntoResolvedError(pub Box<IntoResolvedErrorKind>);
#[derive(Debug, Error, JsError)]
pub enum IntoResolvedErrorKind {
#[class(uri)]
#[error(transparent)]
UrlParse(#[from] url::ParseError),
#[class(inherit)]
#[error(transparent)]
UrlToFilePath(#[from] UrlToFilePathError),
#[class(inherit)]
#[error("Invalid include: {0}")]
InvalidInclude(crate::glob::PathOrPatternParseError),
#[class(inherit)]
#[error("Invalid exclude: {0}")]
InvalidExclude(crate::glob::FromExcludeRelativePathOrPatternsError),
#[class(inherit)]
#[error(transparent)]
UndefinedPermission(#[from] UndefinedPermissionError),
}
#[derive(Debug, Error, JsError)]
#[class(generic)]
#[error("Failed deserilaizing \"compilerOptions\".\"types\" in {}", self.specifier)]
pub struct CompilerOptionTypesDeserializeError {
specifier: Url,
#[source]
source: serde_json::Error,
}
#[derive(Clone, Debug, Default, Deserialize, PartialEq)]
#[serde(default, deny_unknown_fields)]
struct SerializedFilesConfig {
pub include: Option<Vec<String>>,
pub exclude: Vec<String>,
}
impl SerializedFilesConfig {
pub fn into_resolved(
self,
config_file_specifier: &Url,
) -> Result<FilePatterns, IntoResolvedError> {
let config_dir = url_to_file_path(&url_parent(config_file_specifier))?;
Ok(FilePatterns {
base: config_dir.clone(),
include: match self.include {
Some(i) => Some(
PathOrPatternSet::from_include_relative_path_or_patterns(
&config_dir,
&i,
)
.map_err(IntoResolvedErrorKind::InvalidInclude)?,
),
None => None,
},
exclude: PathOrPatternSet::from_exclude_relative_path_or_patterns(
&config_dir,
&self.exclude,
)
.map_err(IntoResolvedErrorKind::InvalidExclude)?,
})
}
}
/// `lint` config representation for serde
///
/// fields `include` and `exclude` are expanded from [SerializedFilesConfig].
#[derive(Clone, Debug, Default, Deserialize, PartialEq)]
#[serde(default, deny_unknown_fields)]
struct SerializedLintConfig {
pub rules: LintRulesConfig,
pub include: Option<Vec<String>>,
pub exclude: Vec<String>,
#[serde(rename = "files")]
pub deprecated_files: serde_json::Value,
pub report: Option<String>,
pub plugins: Vec<String>,
}
impl SerializedLintConfig {
pub fn into_resolved(
self,
config_file_specifier: &Url,
) -> Result<LintConfig, IntoResolvedError> {
let (include, exclude) = (self.include, self.exclude);
let files = SerializedFilesConfig { include, exclude };
if !self.deprecated_files.is_null() {
log::warn!(
"Warning: \"files\" configuration in \"lint\" was removed in Deno 2, use \"include\" and \"exclude\" instead."
);
}
Ok(LintConfig {
options: LintOptionsConfig {
rules: self.rules,
plugins: self
.plugins
.into_iter()
.map(|specifier| LintPluginConfig {
specifier,
base: config_file_specifier.clone(),
})
.collect(),
},
files: files.into_resolved(config_file_specifier)?,
})
}
}
#[derive(Clone, Debug, Hash, PartialEq)]
pub struct LintPluginConfig {
pub specifier: String,
pub base: Url,
}
#[derive(Clone, Debug, Default, Hash, PartialEq)]
pub struct LintOptionsConfig {
pub rules: LintRulesConfig,
pub plugins: Vec<LintPluginConfig>,
}
#[derive(Clone, Debug, Hash, PartialEq)]
pub struct LintConfig {
pub options: LintOptionsConfig,
pub files: FilePatterns,
}
impl LintConfig {
pub fn new_with_base(base: PathBuf) -> Self {
// note: don't create Default implementations of these
// config structs because the base of FilePatterns matters
Self {
options: Default::default(),
files: FilePatterns::new_with_base(base),
}
}
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
pub enum ProseWrap {
Always,
Never,
Preserve,
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
pub enum QuoteProps {
AsNeeded,
Consistent,
Preserve,
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
pub enum NewLineKind {
Auto,
#[serde(rename = "lf")]
LineFeed,
#[serde(rename = "crlf")]
CarriageReturnLineFeed,
System,
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
pub enum UseBraces {
Maintain,
WhenNotSingleLine,
Always,
PreferNone,
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
pub enum BracePosition {
Maintain,
SameLine,
NextLine,
SameLineUnlessHanging,
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
pub enum SingleBodyPosition {
Maintain,
SameLine,
NextLine,
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
pub enum NextControlFlowPosition {
Maintain,
SameLine,
NextLine,
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
pub enum TrailingCommas {
Always,
Never,
OnlyMultiLine,
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
pub enum OperatorPosition {
Maintain,
SameLine,
NextLine,
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
pub enum BracketPosition {
Maintain,
SameLine,
NextLine,
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
pub enum MultiLineParens {
Never,
Prefer,
Always,
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
pub enum SeparatorKind {
SemiColon,
Comma,
}
#[derive(Clone, Debug, Default, Serialize, Deserialize, Hash, PartialEq)]
#[serde(default, deny_unknown_fields, rename_all = "camelCase")]
pub struct FmtOptionsConfig {
pub use_tabs: Option<bool>,
pub line_width: Option<u32>,
pub indent_width: Option<u8>,
pub single_quote: Option<bool>,
pub prose_wrap: Option<ProseWrap>,
pub semi_colons: Option<bool>,
pub quote_props: Option<QuoteProps>,
pub new_line_kind: Option<NewLineKind>,
pub use_braces: Option<UseBraces>,
pub brace_position: Option<BracePosition>,
pub single_body_position: Option<SingleBodyPosition>,
pub next_control_flow_position: Option<NextControlFlowPosition>,
pub trailing_commas: Option<TrailingCommas>,
pub operator_position: Option<OperatorPosition>,
pub jsx_bracket_position: Option<BracketPosition>,
pub jsx_force_new_lines_surrounding_content: Option<bool>,
pub jsx_multi_line_parens: Option<MultiLineParens>,
pub type_literal_separator_kind: Option<SeparatorKind>,
pub space_around: Option<bool>,
pub space_surrounding_properties: Option<bool>,
}
impl FmtOptionsConfig {
pub fn is_empty(&self) -> bool {
self.use_tabs.is_none()
&& self.line_width.is_none()
&& self.indent_width.is_none()
&& self.single_quote.is_none()
&& self.prose_wrap.is_none()
&& self.semi_colons.is_none()
&& self.quote_props.is_none()
&& self.new_line_kind.is_none()
&& self.use_braces.is_none()
&& self.brace_position.is_none()
&& self.single_body_position.is_none()
&& self.next_control_flow_position.is_none()
&& self.trailing_commas.is_none()
&& self.operator_position.is_none()
&& self.jsx_bracket_position.is_none()
&& self.jsx_force_new_lines_surrounding_content.is_none()
&& self.jsx_multi_line_parens.is_none()
&& self.type_literal_separator_kind.is_none()
&& self.space_around.is_none()
&& self.space_surrounding_properties.is_none()
}
}
/// Choose between flat and nested fmt options.
///
/// `options` has precedence over `deprecated_options`.
/// when `deprecated_options` is present, a warning is logged.
///
/// caveat: due to default values, it's not possible to distinguish between
/// an empty configuration and a configuration with default values.
/// `{ "fmt": {} } is equivalent to `{ "fmt": { "options": {} } }`
/// and it wouldn't be able to emit warning for `{ "fmt": { "options": {}, "semiColons": "false" } }`.
///
/// # Arguments
///
/// * `options` - Flat options.
/// * `deprecated_options` - Nested files configuration ("option").
fn choose_fmt_options(
options: FmtOptionsConfig,
deprecated_options: FmtOptionsConfig,
) -> FmtOptionsConfig {
const DEPRECATED_OPTIONS: &str =
"Warning: \"options\" configuration is deprecated";
const FLAT_OPTION: &str = "\"flat\" options";
let (options_nonempty, deprecated_options_nonempty) =
(!options.is_empty(), !deprecated_options.is_empty());
match (options_nonempty, deprecated_options_nonempty) {
(true, true) => {
log::warn!("{DEPRECATED_OPTIONS} and ignored by {FLAT_OPTION}.");
options
}
(true, false) => options,
(false, true) => {
log::warn!("{DEPRECATED_OPTIONS}. Please use {FLAT_OPTION} instead.");
deprecated_options
}
(false, false) => FmtOptionsConfig::default(),
}
}
/// `fmt` config representation for serde
///
/// fields from `use_tabs`..`semi_colons` are expanded from [FmtOptionsConfig].
/// fields `include` and `exclude` are expanded from [SerializedFilesConfig].
#[derive(Clone, Debug, Default, Deserialize, PartialEq)]
#[serde(default, deny_unknown_fields, rename_all = "camelCase")]
struct SerializedFmtConfig {
pub use_tabs: Option<bool>,
pub line_width: Option<u32>,
pub indent_width: Option<u8>,
pub single_quote: Option<bool>,
pub prose_wrap: Option<ProseWrap>,
pub semi_colons: Option<bool>,
pub quote_props: Option<QuoteProps>,
pub new_line_kind: Option<NewLineKind>,
pub use_braces: Option<UseBraces>,
pub brace_position: Option<BracePosition>,
pub single_body_position: Option<SingleBodyPosition>,
pub next_control_flow_position: Option<NextControlFlowPosition>,
pub trailing_commas: Option<TrailingCommas>,
pub operator_position: Option<OperatorPosition>,
#[serde(rename = "jsx.bracketPosition")]
pub jsx_bracket_position: Option<BracketPosition>,
#[serde(rename = "jsx.forceNewLinesSurroundingContent")]
pub jsx_force_new_lines_surrounding_content: Option<bool>,
#[serde(rename = "jsx.multiLineParens")]
pub jsx_multi_line_parens: Option<MultiLineParens>,
#[serde(rename = "typeLiteral.separatorKind")]
pub type_literal_separator_kind: Option<SeparatorKind>,
pub space_around: Option<bool>,
pub space_surrounding_properties: Option<bool>,
#[serde(rename = "options")]
pub deprecated_options: FmtOptionsConfig,
pub include: Option<Vec<String>>,
pub exclude: Vec<String>,
#[serde(rename = "files")]
pub deprecated_files: serde_json::Value,
}
impl SerializedFmtConfig {
pub fn into_resolved(
self,
config_file_specifier: &Url,
) -> Result<FmtConfig, IntoResolvedError> {
let (include, exclude) = (self.include, self.exclude);
let files = SerializedFilesConfig { include, exclude };
let options = FmtOptionsConfig {
use_tabs: self.use_tabs,
line_width: self.line_width,
indent_width: self.indent_width,
single_quote: self.single_quote,
prose_wrap: self.prose_wrap,
semi_colons: self.semi_colons,
quote_props: self.quote_props,
new_line_kind: self.new_line_kind,
use_braces: self.use_braces,
brace_position: self.brace_position,
single_body_position: self.single_body_position,
next_control_flow_position: self.next_control_flow_position,
trailing_commas: self.trailing_commas,
operator_position: self.operator_position,
jsx_bracket_position: self.jsx_bracket_position,
jsx_force_new_lines_surrounding_content: self
.jsx_force_new_lines_surrounding_content,
jsx_multi_line_parens: self.jsx_multi_line_parens,
type_literal_separator_kind: self.type_literal_separator_kind,
space_around: self.space_around,
space_surrounding_properties: self.space_surrounding_properties,
};
if !self.deprecated_files.is_null() {
log::warn!(
"Warning: \"files\" configuration in \"fmt\" was removed in Deno 2, use \"include\" and \"exclude\" instead."
);
}
Ok(FmtConfig {
options: choose_fmt_options(options, self.deprecated_options),
files: files.into_resolved(config_file_specifier)?,
})
}
}
#[derive(Clone, Debug, Hash, PartialEq)]
pub struct FmtConfig {
pub options: FmtOptionsConfig,
pub files: FilePatterns,
}
impl FmtConfig {
pub fn new_with_base(base: PathBuf) -> Self {
Self {
options: Default::default(),
files: FilePatterns::new_with_base(base),
}
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct ExportsConfig {
base: Url,
map: IndexMap<String, String>,
}
impl ExportsConfig {
pub fn into_map(self) -> IndexMap<String, String> {
self.map
}
pub fn get(&self, export_name: &str) -> Option<&String> {
self.map.get(export_name)
}
pub fn get_resolved(
&self,
export_name: &str,
) -> Result<Option<Url>, url::ParseError> {
match self.get(export_name) {
Some(name) => self.base.join(name).map(Some),
None => Ok(None),
}
}
}
/// `test` config representation for serde
///
/// fields `include` and `exclude` are expanded from [SerializedFilesConfig].
#[derive(Clone, Debug, Default, Deserialize, PartialEq)]
#[serde(default, deny_unknown_fields)]
struct SerializedTestConfig {
pub include: Option<Vec<String>>,
pub exclude: Vec<String>,
#[serde(rename = "files")]
pub deprecated_files: serde_json::Value,
pub permissions: Option<PermissionNameOrObject>,
}
impl SerializedTestConfig {
pub fn into_resolved(
self,
config_file_specifier: &Url,
permissions: &PermissionsConfig,
) -> Result<TestConfig, IntoResolvedError> {
let (include, exclude) = (self.include, self.exclude);
let files = SerializedFilesConfig { include, exclude };
if !self.deprecated_files.is_null() {
log::warn!(
"Warning: \"files\" configuration in \"test\" was removed in Deno 2, use \"include\" and \"exclude\" instead."
);
}
Ok(TestConfig {
files: files.into_resolved(config_file_specifier)?,
permissions: match self.permissions {
Some(PermissionNameOrObject::Name(name)) => {
Some(Box::new(permissions.get(&name)?.clone()))
}
Some(PermissionNameOrObject::Object(permissions)) => {
Some(Box::new(PermissionsObjectWithBase {
base: config_file_specifier.clone(),
permissions: *permissions,
}))
}
None => None,
},
})
}
}
#[derive(Clone, Debug, Hash, PartialEq)]
pub struct TestConfig {
pub files: FilePatterns,
pub permissions: Option<Box<PermissionsObjectWithBase>>,
}
impl TestConfig {
pub fn new_with_base(base: PathBuf) -> Self {
Self {
files: FilePatterns::new_with_base(base),
permissions: None,
}
}
}
/// `publish` config representation for serde
///
/// fields `include` and `exclude` are expanded from [SerializedFilesConfig].
#[derive(Clone, Debug, Default, Deserialize, PartialEq)]
#[serde(default, deny_unknown_fields)]
struct SerializedPublishConfig {
pub include: Option<Vec<String>>,
pub exclude: Vec<String>,
}
impl SerializedPublishConfig {
pub fn into_resolved(
self,
config_file_specifier: &Url,
) -> Result<PublishConfig, IntoResolvedError> {
let (include, exclude) = (self.include, self.exclude);
let files = SerializedFilesConfig { include, exclude };
Ok(PublishConfig {
files: files.into_resolved(config_file_specifier)?,
})
}
}
#[derive(Clone, Debug, Hash, PartialEq)]
pub struct PublishConfig {
pub files: FilePatterns,
}
impl PublishConfig {
pub fn new_with_base(base: PathBuf) -> Self {
Self {
files: FilePatterns::new_with_base(base),
}
}
}
/// `bench` config representation for serde
///
/// fields `include` and `exclude` are expanded from [SerializedFilesConfig].
#[derive(Clone, Debug, Default, Deserialize, PartialEq)]
#[serde(default, deny_unknown_fields)]
struct SerializedBenchConfig {
pub include: Option<Vec<String>>,
pub exclude: Vec<String>,
#[serde(rename = "files")]
pub deprecated_files: serde_json::Value,
pub permissions: Option<PermissionNameOrObject>,
}
impl SerializedBenchConfig {
pub fn into_resolved(
self,
config_file_specifier: &Url,
permissions: &PermissionsConfig,
) -> Result<BenchConfig, IntoResolvedError> {
let (include, exclude) = (self.include, self.exclude);
let files = SerializedFilesConfig { include, exclude };
if !self.deprecated_files.is_null() {
log::warn!(
"Warning: \"files\" configuration in \"bench\" was removed in Deno 2, use \"include\" and \"exclude\" instead."
);
}
Ok(BenchConfig {
files: files.into_resolved(config_file_specifier)?,
permissions: match self.permissions {
Some(PermissionNameOrObject::Name(name)) => {
Some(Box::new(permissions.get(&name)?.clone()))
}
Some(PermissionNameOrObject::Object(permissions)) => {
Some(Box::new(PermissionsObjectWithBase {
base: config_file_specifier.clone(),
permissions: *permissions,
}))
}
None => None,
},
})
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct BenchConfig {
pub files: FilePatterns,
pub permissions: Option<Box<PermissionsObjectWithBase>>,
}
impl BenchConfig {
pub fn new_with_base(base: PathBuf) -> Self {
Self {
files: FilePatterns::new_with_base(base),
permissions: None,
}
}
}
/// `compile` config representation for serde
#[derive(Clone, Debug, Default, Deserialize, PartialEq)]
#[serde(default, deny_unknown_fields)]
struct SerializedCompileConfig {
pub permissions: Option<PermissionNameOrObject>,
}
impl SerializedCompileConfig {
pub fn into_resolved(
self,
config_file_specifier: &Url,
permissions: &PermissionsConfig,
) -> Result<CompileConfig, IntoResolvedError> {
Ok(CompileConfig {
permissions: match self.permissions {
Some(PermissionNameOrObject::Name(name)) => {
Some(Box::new(permissions.get(&name)?.clone()))
}
Some(PermissionNameOrObject::Object(permissions)) => {
Some(Box::new(PermissionsObjectWithBase {
base: config_file_specifier.clone(),
permissions: *permissions,
}))
}
None => None,
},
})
}
}
#[derive(Clone, Default, Debug, PartialEq)]
pub struct CompileConfig {
pub permissions: Option<Box<PermissionsObjectWithBase>>,
}
#[derive(Clone, Debug, Deserialize, PartialEq)]
#[serde(untagged)]
pub enum LockConfig {
Bool(bool),
PathBuf(PathBuf),
Object {
path: Option<PathBuf>,
frozen: Option<bool>,
},
}
impl LockConfig {
pub fn frozen(&self) -> bool {
matches!(
self,
LockConfig::Object {
frozen: Some(true),
..
}
)
}
}
#[derive(Debug, Error, JsError)]
#[class(inherit)]
#[error("Failed to parse \"workspace\" configuration.")]
pub struct WorkspaceConfigParseError(#[source] serde_json::Error);
#[derive(Clone, Debug, Deserialize, PartialEq)]
#[serde(deny_unknown_fields)]
pub struct WorkspaceConfig {
pub members: Vec<String>,
}
#[derive(Debug, Error, JsError)]
#[class(inherit)]
#[error("Failed to parse \"link\" configuration.")]
pub struct LinkConfigParseError(#[source] serde_json::Error);
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct TaskDefinition {
pub command: Option<String>,
#[serde(default)]
pub dependencies: Vec<String>,
#[serde(default)]
pub description: Option<String>,
}
#[cfg(test)]
impl From<&str> for TaskDefinition {
fn from(value: &str) -> Self {
Self {
command: Some(value.to_string()),
dependencies: vec![],
description: None,
}
}
}
impl TaskDefinition {
pub fn deserialize_tasks<'de, D>(
deserializer: D,
) -> Result<IndexMap<String, TaskDefinition>, D::Error>
where
D: Deserializer<'de>,
{
use std::fmt;
use serde::de::MapAccess;
use serde::de::Visitor;
struct TasksVisitor;
impl<'de> Visitor<'de> for TasksVisitor {
type Value = IndexMap<String, TaskDefinition>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a map of task definitions")
}
fn visit_map<M>(self, mut access: M) -> Result<Self::Value, M::Error>
where
M: MapAccess<'de>,
{
let mut map = IndexMap::with_capacity(access.size_hint().unwrap_or(4));
while let Some((key, value)) =
access.next_entry::<String, serde_json::Value>()?
{
let task_def = match value {
serde_json::Value::String(command) => TaskDefinition {
command: Some(command),
dependencies: Vec::new(),
description: None,
},
serde_json::Value::Object(_) => {
serde_json::from_value(value).map_err(serde::de::Error::custom)?
}
_ => {
return Err(serde::de::Error::custom("invalid task definition"));
}
};
map.insert(key, task_def);
}
Ok(map)
}
}
deserializer.deserialize_map(TasksVisitor)
}
}
#[derive(Debug, JsError, Boxed)]
pub struct ConfigFileReadError(pub Box<ConfigFileReadErrorKind>);
impl ConfigFileReadError {
pub fn is_not_found(&self) -> bool {
if let ConfigFileReadErrorKind::FailedReading { source: ioerr, .. } =
self.as_kind()
{
matches!(ioerr.kind(), std::io::ErrorKind::NotFound)
} else {
false
}
}
}
#[derive(Debug, Error, JsError)]
pub enum ConfigFileReadErrorKind {
#[class(type)]
#[error("Could not convert config file path to specifier. Path: {0}")]
PathToUrl(PathBuf),
#[class(inherit)]
#[error(transparent)]
UrlToFilePathError(#[from] UrlToFilePathError),
#[class(inherit)]
#[error("Error reading config file '{specifier}'.")]
FailedReading {
specifier: Url,
#[source]
#[inherit]
source: std::io::Error,
},
#[class(type)]
#[error("Unable to parse config file JSON {specifier}.")]
Parse {
specifier: Url,
#[source]
source: Box<jsonc_parser::errors::ParseError>,
},
#[class(inherit)]
#[error("Failed deserializing config file '{specifier}'.")]
Deserialize {
specifier: Url,
#[source]
#[inherit]
source: serde_json::Error,
},
#[class(type)]
#[error("Config file JSON should be an object '{specifier}'.")]
NotObject { specifier: Url },
}
#[derive(Debug, Error, JsError)]
#[class(type)]
#[error("Unsupported \"nodeModulesDir\" value.")]
pub struct NodeModulesDirParseError {
#[source]
pub source: serde_json::Error,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum NewestDependencyDate {
Enabled(chrono::DateTime<chrono::Utc>),
/// Disable using a minimum dependency date.
Disabled,
}
impl NewestDependencyDate {
pub fn into_option(self) -> Option<chrono::DateTime<chrono::Utc>> {
match self {
Self::Enabled(date_time) => Some(date_time),
Self::Disabled => None,
}
}
}
#[derive(Debug, Default, Clone, Deserialize)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
struct RawMinimumDependencyAgeConfig {
pub age: serde_json::Value,
#[serde(default)]
pub exclude: Vec<String>,
}
#[derive(Debug, Default, Clone)]
pub struct MinimumDependencyAgeConfig {
pub age: Option<NewestDependencyDate>,
pub exclude: Vec<String>,
}
#[derive(Debug, Error, JsError)]
#[class(type)]
pub enum MinimumDependencyAgeParseError {
#[error("Unsupported \"minimumDependencyAge\" value.")]
ParseDateOrDuration(
#[from]
#[source]
crate::ParseDateOrDurationError,
),
#[error(
"Unsupported \"minimumDependencyAge\" value. Expected a string or number."
)]
ExpectedStringOrNumber,
#[error(
"Unsupported \"minimumDependencyAge\" value. Could not convert number to i64."
)]
InvalidNumber,
#[error("Unsupported \"minimumDependencyAge\" object.")]
UnsupportedObject(#[source] serde_json::Error),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
#[serde(rename_all = "kebab-case")]
pub enum NodeModulesDirMode {
Auto,
Manual,
None,
}
impl<'de> Deserialize<'de> for NodeModulesDirMode {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct NodeModulesDirModeVisitor;
impl Visitor<'_> for NodeModulesDirModeVisitor {
type Value = NodeModulesDirMode;
fn expecting(
&self,
formatter: &mut std::fmt::Formatter,
) -> std::fmt::Result {
formatter.write_str(r#""auto", "manual", or "none""#)
}
fn visit_str<E>(self, value: &str) -> Result<NodeModulesDirMode, E>
where
E: de::Error,
{
match value {
"auto" => Ok(NodeModulesDirMode::Auto),
"manual" => Ok(NodeModulesDirMode::Manual),
"none" => Ok(NodeModulesDirMode::None),
_ => Err(de::Error::invalid_value(Unexpected::Str(value), &self)),
}
}
fn visit_bool<E>(self, value: bool) -> Result<NodeModulesDirMode, E>
where
E: de::Error,
{
if value {
Ok(NodeModulesDirMode::Auto)
} else {
Ok(NodeModulesDirMode::None)
}
}
}
deserializer.deserialize_any(NodeModulesDirModeVisitor)
}
}
impl std::fmt::Display for NodeModulesDirMode {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.as_str())
}
}
impl NodeModulesDirMode {
pub fn as_str(self) -> &'static str {
match self {
NodeModulesDirMode::Auto => "auto",
NodeModulesDirMode::Manual => "manual",
NodeModulesDirMode::None => "none",
}
}
pub fn uses_node_modules_dir(self) -> bool {
matches!(self, Self::Manual | Self::Auto)
}
}
#[derive(Clone, Debug, Deserialize, PartialEq)]
#[serde(deny_unknown_fields)]
pub struct DeployConfig {
pub org: String,
pub app: Option<String>,
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ConfigFileJson {
pub compiler_options: Option<Value>,
pub import_map: Option<String>,
pub imports: Option<Value>,
pub scopes: Option<Value>,
pub lint: Option<Value>,
pub fmt: Option<Value>,
pub tasks: Option<Value>,
pub test: Option<Value>,
pub bench: Option<Value>,
pub compile: Option<Value>,
pub lock: Option<Value>,
pub exclude: Option<Value>,
pub minimum_dependency_age: Option<Value>,
pub node_modules_dir: Option<Value>,
pub vendor: Option<bool>,
pub license: Option<Value>,
pub permissions: Option<Value>,
pub publish: Option<Value>,
pub deploy: Option<Value>,
pub allow_scripts: Option<Value>,
pub name: Option<String>,
pub version: Option<String>,
pub workspace: Option<Value>,
pub links: Option<Value>,
#[serde(rename = "patch")]
pub(crate) deprecated_patch: Option<Value>,
#[serde(rename = "workspaces")]
pub(crate) deprecated_workspaces: Option<Vec<String>>,
pub exports: Option<Value>,
#[serde(default)]
pub unstable: Vec<String>,
}
pub trait DenoJsonCache {
fn get(&self, path: &Path) -> Option<ConfigFileRc>;
fn set(&self, path: PathBuf, deno_json: ConfigFileRc);
}
#[derive(Debug, Error, JsError)]
pub enum ConfigFileError {
#[class(type)]
#[error(
"Only file: specifiers are supported for security reasons in import maps stored in a deno.json. To use a remote import map, use the --import-map flag and \"deno.importMap\" in the language server config"
)]
OnlyFileSpecifiersSupported,
#[class(inherit)]
#[error(transparent)]
UrlToFilePath(#[from] UrlToFilePathError),
#[class(inherit)]
#[error(transparent)]
UrlParse(#[from] url::ParseError),
#[class(inherit)]
#[error(transparent)]
SerdeJson(#[from] serde_json::Error),
#[class(inherit)]
#[error(transparent)]
ImportMap(#[from] import_map::ImportMapError),
#[class(inherit)]
#[error(transparent)]
Io(std::io::Error),
}
#[derive(Debug, Error, JsError)]
pub enum ConfigFileExportsError {
#[class(type)]
#[error("The {0} must not be empty. Use '.' if you meant the root export.")]
KeyMustNotBeEmpty(Cow<'static, str>),
#[class(type)]
#[error("The {key} must start with './'. Did you mean '{suggestion}'?")]
KeyMustStartWithDotSlash {
key: Cow<'static, str>,
suggestion: String,
},
#[class(type)]
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/config/workspace/discovery.rs | libs/config/workspace/discovery.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::HashSet;
use std::path::Path;
use std::path::PathBuf;
use deno_maybe_sync::new_rc;
use deno_package_json::PackageJson;
use deno_package_json::PackageJsonLoadError;
use deno_package_json::PackageJsonRc;
use deno_path_util::url_from_directory_path;
use deno_path_util::url_from_file_path;
use deno_path_util::url_parent;
use deno_path_util::url_to_file_path;
use indexmap::IndexSet;
use sys_traits::FsMetadata;
use sys_traits::FsRead;
use sys_traits::FsReadDir;
use url::Url;
use super::ResolveWorkspaceLinkError;
use super::ResolveWorkspaceLinkErrorKind;
use super::ResolveWorkspaceMemberError;
use super::ResolveWorkspaceMemberErrorKind;
use super::UrlRc;
use super::VendorEnablement;
use super::WorkspaceDiscoverError;
use super::WorkspaceDiscoverErrorKind;
use super::WorkspaceDiscoverOptions;
use super::WorkspaceDiscoverStart;
use super::WorkspaceRc;
use crate::deno_json::ConfigFile;
use crate::deno_json::ConfigFileRc;
use crate::glob::FileCollector;
use crate::glob::FilePatterns;
use crate::glob::PathOrPattern;
use crate::glob::PathOrPatternSet;
use crate::glob::is_glob_pattern;
use crate::util::is_skippable_io_error;
use crate::workspace::ConfigReadError;
use crate::workspace::Workspace;
#[derive(Debug)]
pub enum DenoOrPkgJson {
Deno(ConfigFileRc),
PkgJson(PackageJsonRc),
}
impl DenoOrPkgJson {
pub fn specifier(&self) -> Cow<'_, Url> {
match self {
Self::Deno(config) => Cow::Borrowed(&config.specifier),
Self::PkgJson(pkg_json) => Cow::Owned(pkg_json.specifier()),
}
}
}
#[derive(Debug)]
pub enum ConfigFolder {
Single(DenoOrPkgJson),
Both {
deno_json: ConfigFileRc,
pkg_json: PackageJsonRc,
},
}
impl ConfigFolder {
pub fn folder_url(&self) -> Url {
match self {
Self::Single(DenoOrPkgJson::Deno(config)) => {
url_parent(&config.specifier)
}
Self::Single(DenoOrPkgJson::PkgJson(pkg_json)) => {
url_from_directory_path(pkg_json.path.parent().unwrap()).unwrap()
}
Self::Both { deno_json, .. } => url_parent(&deno_json.specifier),
}
}
pub fn has_workspace_members(&self) -> bool {
match self {
Self::Single(DenoOrPkgJson::Deno(config)) => {
config.json.workspace.is_some()
}
Self::Single(DenoOrPkgJson::PkgJson(pkg_json)) => {
pkg_json.workspaces.is_some()
}
Self::Both {
deno_json,
pkg_json,
} => deno_json.json.workspace.is_some() || pkg_json.workspaces.is_some(),
}
}
pub fn deno_json(&self) -> Option<&ConfigFileRc> {
match self {
Self::Single(DenoOrPkgJson::Deno(deno_json)) => Some(deno_json),
Self::Both { deno_json, .. } => Some(deno_json),
_ => None,
}
}
pub fn pkg_json(&self) -> Option<&PackageJsonRc> {
match self {
Self::Single(DenoOrPkgJson::PkgJson(pkg_json)) => Some(pkg_json),
Self::Both { pkg_json, .. } => Some(pkg_json),
_ => None,
}
}
pub fn from_maybe_both(
maybe_deno_json: Option<ConfigFileRc>,
maybe_pkg_json: Option<PackageJsonRc>,
) -> Option<Self> {
match (maybe_deno_json, maybe_pkg_json) {
(Some(deno_json), Some(pkg_json)) => Some(Self::Both {
deno_json,
pkg_json,
}),
(Some(deno_json), None) => {
Some(Self::Single(DenoOrPkgJson::Deno(deno_json)))
}
(None, Some(pkg_json)) => {
Some(Self::Single(DenoOrPkgJson::PkgJson(pkg_json)))
}
(None, None) => None,
}
}
}
#[derive(Debug)]
pub enum ConfigFileDiscovery {
None { maybe_vendor_dir: Option<PathBuf> },
Workspace { workspace: WorkspaceRc },
}
impl ConfigFileDiscovery {
fn root_config_specifier(&self) -> Option<Cow<'_, Url>> {
match self {
Self::None { .. } => None,
Self::Workspace { workspace, .. } => {
let root_folder_configs = workspace.root_folder_configs();
if let Some(deno_json) = &root_folder_configs.deno_json {
return Some(Cow::Borrowed(&deno_json.specifier));
}
if let Some(pkg_json) = &root_folder_configs.pkg_json {
return Some(Cow::Owned(pkg_json.specifier()));
}
None
}
}
}
}
fn config_folder_config_specifier(res: &ConfigFolder) -> Cow<'_, Url> {
match res {
ConfigFolder::Single(config) => config.specifier(),
ConfigFolder::Both { deno_json, .. } => Cow::Borrowed(&deno_json.specifier),
}
}
pub fn discover_workspace_config_files<
TSys: FsRead + FsMetadata + FsReadDir,
>(
sys: &TSys,
start: WorkspaceDiscoverStart,
opts: &WorkspaceDiscoverOptions,
) -> Result<ConfigFileDiscovery, WorkspaceDiscoverError> {
match start {
WorkspaceDiscoverStart::Paths(dirs) => match dirs.len() {
0 => Ok(ConfigFileDiscovery::None {
maybe_vendor_dir: resolve_vendor_dir(
None,
opts.maybe_vendor_override.as_ref(),
),
}),
1 => {
let dir = &dirs[0];
let start = DirOrConfigFile::Dir(dir);
discover_workspace_config_files_for_single_dir(sys, start, opts, None)
}
_ => {
let mut checked = HashSet::default();
let mut final_workspace = ConfigFileDiscovery::None {
maybe_vendor_dir: resolve_vendor_dir(
None,
opts.maybe_vendor_override.as_ref(),
),
};
for dir in dirs {
let workspace = discover_workspace_config_files_for_single_dir(
sys,
DirOrConfigFile::Dir(dir),
opts,
Some(&mut checked),
)?;
if let Some(root_config_specifier) = workspace.root_config_specifier()
{
if let Some(final_workspace_config_specifier) =
final_workspace.root_config_specifier()
{
return Err(WorkspaceDiscoverError(
WorkspaceDiscoverErrorKind::MultipleWorkspaces {
base_workspace_url: final_workspace_config_specifier
.into_owned(),
other_workspace_url: root_config_specifier.into_owned(),
}
.into(),
));
}
final_workspace = workspace;
}
}
Ok(final_workspace)
}
},
WorkspaceDiscoverStart::ConfigFile(file) => {
let start = DirOrConfigFile::ConfigFile(file);
discover_workspace_config_files_for_single_dir(sys, start, opts, None)
}
}
}
#[derive(Debug, Clone, Copy)]
enum DirOrConfigFile<'a> {
Dir(&'a Path),
ConfigFile(&'a Path),
}
fn discover_workspace_config_files_for_single_dir<
TSys: FsRead + FsMetadata + FsReadDir,
>(
sys: &TSys,
start: DirOrConfigFile,
opts: &WorkspaceDiscoverOptions,
mut checked: Option<&mut HashSet<PathBuf>>,
) -> Result<ConfigFileDiscovery, WorkspaceDiscoverError> {
fn strip_up_to_node_modules(path: &Path) -> PathBuf {
path
.components()
.take_while(|component| match component {
std::path::Component::Normal(name) => {
name.to_string_lossy() != "node_modules"
}
_ => true,
})
.collect()
}
if opts.workspace_cache.is_some() {
// it doesn't really make sense to use a workspace cache without config
// caches because that would mean the configs might change between calls
// causing strange behavior, so panic if someone does this
assert!(
opts.deno_json_cache.is_some() && opts.pkg_json_cache.is_some(),
"Using a workspace cache requires setting the deno.json and package.json caches"
);
}
let start_dir: Option<&Path>;
let mut first_config_folder_url: Option<Url> = None;
let mut found_config_folders: HashMap<_, ConfigFolder> = HashMap::new();
let config_file_names =
ConfigFile::resolve_config_file_names(opts.additional_config_file_names);
let load_pkg_json_in_folder = |folder_path: &Path| {
if opts.discover_pkg_json {
let pkg_json_path = folder_path.join("package.json");
match PackageJson::load_from_path(
sys,
opts.pkg_json_cache,
&pkg_json_path,
) {
Ok(pkg_json) => {
if pkg_json.is_some() {
log::debug!(
"package.json file found at '{}'",
pkg_json_path.display()
);
}
Ok(pkg_json)
}
Err(PackageJsonLoadError::Io { source, .. })
if is_skippable_io_error(&source) =>
{
Ok(None)
}
Err(err) => Err(err),
}
} else {
Ok(None)
}
};
let load_config_folder = |folder_path: &Path| -> Result<_, ConfigReadError> {
let maybe_config_file = ConfigFile::maybe_find_in_folder(
sys,
opts.deno_json_cache,
folder_path,
&config_file_names,
)?;
let maybe_pkg_json = load_pkg_json_in_folder(folder_path)?;
Ok(ConfigFolder::from_maybe_both(
maybe_config_file,
maybe_pkg_json,
))
};
match start {
DirOrConfigFile::Dir(dir) => {
start_dir = Some(dir);
}
DirOrConfigFile::ConfigFile(file) => {
let specifier = url_from_file_path(file)?;
let config_file = new_rc(
ConfigFile::from_specifier(sys, specifier.clone())
.map_err(ConfigReadError::DenoJsonRead)?,
);
// see what config would be loaded if we just specified the parent directory
let natural_config_folder_result =
load_config_folder(file.parent().unwrap());
let matching_config_folder = match natural_config_folder_result {
Ok(Some(natual_config_folder)) => {
if natual_config_folder
.deno_json()
.is_some_and(|d| d.specifier == config_file.specifier)
{
Some(natual_config_folder)
} else {
None
}
}
Ok(None) | Err(_) => None,
};
let parent_dir_url = url_parent(&config_file.specifier);
let config_folder = match matching_config_folder {
Some(config_folder) => config_folder,
None => {
// when loading the directory we would have loaded something else, so
// don't try to load a workspace and don't store this information in
// the workspace cache
let config_folder =
ConfigFolder::Single(DenoOrPkgJson::Deno(config_file));
if config_folder.has_workspace_members() {
return handle_workspace_folder_with_members(
sys,
config_folder,
Some(&parent_dir_url),
opts,
found_config_folders,
&load_config_folder,
);
}
let maybe_vendor_dir = resolve_vendor_dir(
config_folder.deno_json().map(|d| d.as_ref()),
opts.maybe_vendor_override.as_ref(),
);
let links = resolve_link_config_folders(
sys,
&config_folder,
load_config_folder,
)?;
return Ok(ConfigFileDiscovery::Workspace {
workspace: new_rc(Workspace::new(
config_folder,
Default::default(),
links,
maybe_vendor_dir,
)),
});
}
};
if let Some(workspace_cache) = &opts.workspace_cache
&& let Some(workspace) = workspace_cache.get(&config_file.dir_path())
{
if cfg!(debug_assertions) {
let expected_vendor_dir = resolve_vendor_dir(
config_folder.deno_json().map(|d| d.as_ref()),
opts.maybe_vendor_override.as_ref(),
);
debug_assert_eq!(
expected_vendor_dir, workspace.vendor_dir,
"should not be using a different vendor dir across calls"
);
}
return Ok(ConfigFileDiscovery::Workspace {
workspace: workspace.clone(),
});
}
if config_folder.has_workspace_members() {
return handle_workspace_folder_with_members(
sys,
config_folder,
Some(&parent_dir_url),
opts,
found_config_folders,
&load_config_folder,
);
}
found_config_folders.insert(parent_dir_url.clone(), config_folder);
first_config_folder_url = Some(parent_dir_url);
// start searching for a workspace in the parent directory
start_dir = file.parent().and_then(|p| p.parent());
}
}
// do not auto-discover inside the node_modules folder (ex. when a
// user is running something directly within there)
let start_dir = start_dir.map(strip_up_to_node_modules);
for current_dir in start_dir.iter().flat_map(|p| p.ancestors()) {
if let Some(checked) = checked.as_mut()
&& !checked.insert(current_dir.to_path_buf())
{
// already visited here, so exit
return Ok(ConfigFileDiscovery::None {
maybe_vendor_dir: resolve_vendor_dir(
None,
opts.maybe_vendor_override.as_ref(),
),
});
}
if let Some(workspace_with_members) = opts
.workspace_cache
.and_then(|c| c.get(current_dir))
.filter(|w| w.config_folders.len() > 1)
{
if cfg!(debug_assertions) {
let expected_vendor_dir = resolve_vendor_dir(
workspace_with_members.root_deno_json().map(|d| d.as_ref()),
opts.maybe_vendor_override.as_ref(),
);
debug_assert_eq!(
expected_vendor_dir, workspace_with_members.vendor_dir,
"should not be using a different vendor dir across calls"
);
}
return handle_workspace_with_members(
sys,
workspace_with_members,
first_config_folder_url.as_ref(),
found_config_folders,
opts,
load_config_folder,
);
}
let maybe_config_folder = load_config_folder(current_dir)?;
let Some(root_config_folder) = maybe_config_folder else {
continue;
};
if root_config_folder.has_workspace_members() {
return handle_workspace_folder_with_members(
sys,
root_config_folder,
first_config_folder_url.as_ref(),
opts,
found_config_folders,
&load_config_folder,
);
}
let config_folder_url = root_config_folder.folder_url();
if first_config_folder_url.is_none() {
if let Some(workspace_cache) = &opts.workspace_cache
&& let Some(workspace) = workspace_cache.get(current_dir)
{
if cfg!(debug_assertions) {
let expected_vendor_dir = resolve_vendor_dir(
root_config_folder.deno_json().map(|d| d.as_ref()),
opts.maybe_vendor_override.as_ref(),
);
debug_assert_eq!(
expected_vendor_dir, workspace.vendor_dir,
"should not be using a different vendor dir across calls"
);
}
return Ok(ConfigFileDiscovery::Workspace {
workspace: workspace.clone(),
});
}
first_config_folder_url = Some(config_folder_url.clone());
}
found_config_folders.insert(config_folder_url, root_config_folder);
}
if let Some(first_config_folder_url) = first_config_folder_url {
let config_folder = found_config_folders
.remove(&first_config_folder_url)
.unwrap();
let maybe_vendor_dir = resolve_vendor_dir(
config_folder.deno_json().map(|d| d.as_ref()),
opts.maybe_vendor_override.as_ref(),
);
let link =
resolve_link_config_folders(sys, &config_folder, load_config_folder)?;
let workspace = new_rc(Workspace::new(
config_folder,
Default::default(),
link,
maybe_vendor_dir,
));
if let Some(cache) = opts.workspace_cache {
cache.set(workspace.root_dir_path(), workspace.clone());
}
Ok(ConfigFileDiscovery::Workspace { workspace })
} else {
Ok(ConfigFileDiscovery::None {
maybe_vendor_dir: resolve_vendor_dir(
None,
opts.maybe_vendor_override.as_ref(),
),
})
}
}
fn handle_workspace_folder_with_members<
TSys: FsRead + FsMetadata + FsReadDir,
>(
sys: &TSys,
root_config_folder: ConfigFolder,
first_config_folder_url: Option<&Url>,
opts: &WorkspaceDiscoverOptions<'_>,
mut found_config_folders: HashMap<Url, ConfigFolder>,
load_config_folder: &impl Fn(
&Path,
) -> Result<Option<ConfigFolder>, ConfigReadError>,
) -> Result<ConfigFileDiscovery, WorkspaceDiscoverError> {
let maybe_vendor_dir = resolve_vendor_dir(
root_config_folder.deno_json().map(|d| d.as_ref()),
opts.maybe_vendor_override.as_ref(),
);
let raw_root_workspace = resolve_workspace_for_config_folder(
sys,
root_config_folder,
maybe_vendor_dir,
&mut found_config_folders,
load_config_folder,
)?;
let links = resolve_link_config_folders(
sys,
&raw_root_workspace.root,
load_config_folder,
)?;
let root_workspace = new_rc(Workspace::new(
raw_root_workspace.root,
raw_root_workspace.members,
links,
raw_root_workspace.vendor_dir,
));
if let Some(cache) = opts.workspace_cache {
cache.set(root_workspace.root_dir_path(), root_workspace.clone());
}
handle_workspace_with_members(
sys,
root_workspace,
first_config_folder_url,
found_config_folders,
opts,
load_config_folder,
)
}
fn handle_workspace_with_members<TSys: FsRead + FsMetadata + FsReadDir>(
sys: &TSys,
root_workspace: WorkspaceRc,
first_config_folder_url: Option<&Url>,
mut found_config_folders: HashMap<Url, ConfigFolder>,
opts: &WorkspaceDiscoverOptions,
load_config_folder: impl Fn(
&Path,
) -> Result<Option<ConfigFolder>, ConfigReadError>,
) -> Result<ConfigFileDiscovery, WorkspaceDiscoverError> {
let is_root_deno_json_workspace = root_workspace
.root_deno_json()
.map(|d| d.json.workspace.is_some())
.unwrap_or(false);
// if the root was an npm workspace that doesn't have the start config
// as a member then only resolve the start config
if !is_root_deno_json_workspace
&& let Some(first_config_folder) = &first_config_folder_url
&& !root_workspace
.config_folders
.contains_key(*first_config_folder)
&& let Some(config_folder) =
found_config_folders.remove(first_config_folder)
{
let maybe_vendor_dir = resolve_vendor_dir(
config_folder.deno_json().map(|d| d.as_ref()),
opts.maybe_vendor_override.as_ref(),
);
let links =
resolve_link_config_folders(sys, &config_folder, load_config_folder)?;
let workspace = new_rc(Workspace::new(
config_folder,
Default::default(),
links,
maybe_vendor_dir,
));
if let Some(cache) = opts.workspace_cache {
cache.set(workspace.root_dir_path(), workspace.clone());
}
return Ok(ConfigFileDiscovery::Workspace { workspace });
}
if is_root_deno_json_workspace {
for (key, config_folder) in &found_config_folders {
if !root_workspace.config_folders.contains_key(key) {
return Err(
WorkspaceDiscoverErrorKind::ConfigNotWorkspaceMember {
workspace_url: (**root_workspace.root_dir_url()).clone(),
config_url: config_folder_config_specifier(config_folder)
.into_owned(),
}
.into(),
);
}
}
}
// ensure no duplicate names in deno configuration files
let mut seen_names: HashMap<&str, &Url> =
HashMap::with_capacity(root_workspace.config_folders.len() + 1);
for deno_json in root_workspace.deno_jsons() {
if let Some(name) = deno_json.json.name.as_deref() {
if let Some(other_member_url) = seen_names.get(name) {
return Err(
ResolveWorkspaceMemberErrorKind::DuplicatePackageName {
name: name.to_string(),
deno_json_url: deno_json.specifier.clone(),
other_deno_json_url: (*other_member_url).clone(),
}
.into_box()
.into(),
);
} else {
seen_names.insert(name, &deno_json.specifier);
}
}
}
Ok(ConfigFileDiscovery::Workspace {
workspace: root_workspace,
})
}
struct RawResolvedWorkspace {
root: ConfigFolder,
members: BTreeMap<UrlRc, ConfigFolder>,
vendor_dir: Option<PathBuf>,
}
fn resolve_workspace_for_config_folder<
TSys: FsRead + FsMetadata + FsReadDir,
>(
sys: &TSys,
root_config_folder: ConfigFolder,
maybe_vendor_dir: Option<PathBuf>,
found_config_folders: &mut HashMap<Url, ConfigFolder>,
load_config_folder: impl Fn(
&Path,
) -> Result<Option<ConfigFolder>, ConfigReadError>,
) -> Result<RawResolvedWorkspace, WorkspaceDiscoverError> {
let mut final_members = BTreeMap::new();
let root_config_file_directory_url = root_config_folder.folder_url();
let resolve_member_url =
|raw_member: &str| -> Result<Url, ResolveWorkspaceMemberError> {
let member = ensure_trailing_slash(raw_member);
let member_dir_url = root_config_file_directory_url
.join(&member)
.map_err(|err| {
ResolveWorkspaceMemberErrorKind::InvalidMember {
base: root_config_folder.folder_url(),
member: raw_member.to_owned(),
source: err,
}
.into_box()
})?;
Ok(member_dir_url)
};
let validate_member_url_is_descendant =
|member_dir_url: &Url| -> Result<(), ResolveWorkspaceMemberError> {
if !member_dir_url
.as_str()
.starts_with(root_config_file_directory_url.as_str())
{
return Err(
ResolveWorkspaceMemberErrorKind::NonDescendant {
workspace_url: root_config_file_directory_url.clone(),
member_url: member_dir_url.clone(),
}
.into_box(),
);
}
Ok(())
};
let mut find_member_config_folder =
|member_dir_url: &Url| -> Result<_, ResolveWorkspaceMemberError> {
// try to find the config folder in memory from the configs we already
// found on the file system
if let Some(config_folder) = found_config_folders.remove(member_dir_url) {
return Ok(config_folder);
}
let maybe_config_folder =
load_config_folder(&url_to_file_path(member_dir_url)?)?;
maybe_config_folder.ok_or_else(|| {
// it's fine this doesn't use all the possible config file names
// as this is only used to enhance the error message
if member_dir_url.as_str().ends_with("/deno.json/")
|| member_dir_url.as_str().ends_with("/deno.jsonc/")
|| member_dir_url.as_str().ends_with("/package.json/")
{
ResolveWorkspaceMemberErrorKind::NotFoundMaybeSpecifiedFile {
dir_url: member_dir_url.clone(),
}
.into_box()
} else {
ResolveWorkspaceMemberErrorKind::NotFound {
dir_url: member_dir_url.clone(),
}
.into_box()
}
})
};
let collect_member_config_folders =
|kind: &'static str,
pattern_members: Vec<&String>,
dir_path: &Path,
config_file_names: &'static [&'static str]|
-> Result<Vec<PathBuf>, WorkspaceDiscoverErrorKind> {
let patterns = pattern_members
.iter()
.flat_map(|raw_member| {
config_file_names.iter().map(|config_file_name| {
PathOrPattern::from_relative(
dir_path,
&format!(
"{}{}",
ensure_trailing_slash(raw_member),
config_file_name
),
)
.map_err(|err| {
ResolveWorkspaceMemberErrorKind::MemberToPattern {
kind,
base: root_config_file_directory_url.clone(),
member: raw_member.to_string(),
source: err,
}
.into_box()
})
})
})
.collect::<Result<Vec<_>, _>>()?;
let paths = if patterns.is_empty() {
Vec::new()
} else {
FileCollector::new(|_| true)
.ignore_git_folder()
.ignore_node_modules()
.set_vendor_folder(maybe_vendor_dir.clone())
.collect_file_patterns(
sys,
&FilePatterns {
base: dir_path.to_path_buf(),
include: Some(PathOrPatternSet::new(patterns)),
exclude: PathOrPatternSet::new(Vec::new()),
},
)
};
Ok(paths)
};
if let Some(deno_json) = root_config_folder.deno_json()
&& let Some(workspace_config) = deno_json.to_workspace_config()?
{
let (pattern_members, path_members): (Vec<_>, Vec<_>) = workspace_config
.members
.iter()
.partition(|member| is_glob_pattern(member) || member.starts_with('!'));
// Deno workspaces can discover wildcard members that use either `deno.json`, `deno.jsonc` or `package.json`.
// But it only works for Deno workspaces, npm workspaces don't discover `deno.json(c)` files, otherwise
// we'd be incompatible with npm workspaces if we discovered more files.
let deno_json_paths = collect_member_config_folders(
"Deno",
pattern_members,
&deno_json.dir_path(),
&["deno.json", "deno.jsonc", "package.json"],
)?;
let mut member_dir_urls =
IndexSet::with_capacity(path_members.len() + deno_json_paths.len());
for path_member in path_members {
let member_dir_url = resolve_member_url(path_member)?;
member_dir_urls.insert((path_member.clone(), member_dir_url));
}
for deno_json_path in deno_json_paths {
let member_dir_url =
url_from_directory_path(deno_json_path.parent().unwrap()).unwrap();
member_dir_urls.insert((
deno_json_path
.parent()
.unwrap()
.to_string_lossy()
.into_owned(),
member_dir_url,
));
}
for (raw_member, member_dir_url) in member_dir_urls {
if member_dir_url == root_config_file_directory_url {
return Err(
ResolveWorkspaceMemberErrorKind::InvalidSelfReference {
member: raw_member.to_string(),
}
.into_box()
.into(),
);
}
validate_member_url_is_descendant(&member_dir_url)?;
let member_config_folder = find_member_config_folder(&member_dir_url)?;
let previous_member = final_members
.insert(new_rc(member_dir_url.clone()), member_config_folder);
if previous_member.is_some() {
return Err(
ResolveWorkspaceMemberErrorKind::Duplicate {
member: raw_member.to_string(),
}
.into_box()
.into(),
);
}
}
}
if let Some(pkg_json) = root_config_folder.pkg_json()
&& let Some(members) = &pkg_json.workspaces
{
let (pattern_members, path_members): (Vec<_>, Vec<_>) = members
.iter()
.partition(|member| is_glob_pattern(member) || member.starts_with('!'));
// npm workspaces can discover wildcard members `package.json` files, but not `deno.json(c)` files, otherwise
// we'd be incompatible with npm workspaces if we discovered more files than just `package.json`.
let pkg_json_paths = collect_member_config_folders(
"npm",
pattern_members,
pkg_json.dir_path(),
&["package.json"],
)?;
let mut member_dir_urls =
IndexSet::with_capacity(path_members.len() + pkg_json_paths.len());
for path_member in path_members {
let member_dir_url = resolve_member_url(path_member)?;
member_dir_urls.insert(member_dir_url);
}
for pkg_json_path in pkg_json_paths {
let member_dir_url =
url_from_directory_path(pkg_json_path.parent().unwrap())?;
member_dir_urls.insert(member_dir_url);
}
for member_dir_url in member_dir_urls {
if member_dir_url == root_config_file_directory_url {
continue; // ignore self references
}
validate_member_url_is_descendant(&member_dir_url)?;
let member_config_folder =
match find_member_config_folder(&member_dir_url) {
Ok(config_folder) => config_folder,
Err(err) => {
return Err(
match err.into_kind() {
ResolveWorkspaceMemberErrorKind::NotFound { dir_url } => {
// enhance the error to say we didn't find a package.json
ResolveWorkspaceMemberErrorKind::NotFoundPackageJson {
dir_url,
}
.into_box()
}
err => err.into_box(),
}
.into(),
);
}
};
if member_config_folder.pkg_json().is_none() {
return Err(
ResolveWorkspaceMemberErrorKind::NotFoundPackageJson {
dir_url: member_dir_url,
}
.into_box()
.into(),
);
}
// don't surface errors about duplicate members for
// package.json workspace members
final_members.insert(new_rc(member_dir_url), member_config_folder);
}
}
Ok(RawResolvedWorkspace {
root: root_config_folder,
members: final_members,
vendor_dir: maybe_vendor_dir,
})
}
fn resolve_link_config_folders<TSys: FsRead + FsMetadata + FsReadDir>(
sys: &TSys,
root_config_folder: &ConfigFolder,
load_config_folder: impl Fn(
&Path,
) -> Result<Option<ConfigFolder>, ConfigReadError>,
) -> Result<BTreeMap<UrlRc, ConfigFolder>, WorkspaceDiscoverError> {
let Some(workspace_deno_json) = root_config_folder.deno_json() else {
return Ok(Default::default());
};
let Some(link_members) = workspace_deno_json.to_link_config()? else {
return Ok(Default::default());
};
let root_config_file_directory_url = root_config_folder.folder_url();
let resolve_link_dir_url =
|raw_link: &str| -> Result<Url, WorkspaceDiscoverError> {
let link = ensure_trailing_slash(raw_link);
// support someone specifying an absolute path
if (!cfg!(windows) && link.starts_with('/')
|| cfg!(windows) && link.chars().any(|c| c == '\\'))
&& let Ok(value) =
deno_path_util::url_from_file_path(Path::new(link.as_ref()))
{
return Ok(value);
}
let link_dir_url =
root_config_file_directory_url.join(&link).map_err(|err| {
WorkspaceDiscoverErrorKind::ResolveLink {
base: root_config_file_directory_url.clone(),
link: raw_link.to_owned(),
source: err.into(),
}
})?;
Ok(link_dir_url)
};
let mut final_config_folders = BTreeMap::new();
for raw_member in &link_members {
let link_dir_url = resolve_link_dir_url(raw_member)?;
let link_configs = resolve_link_member_config_folders(
sys,
&link_dir_url,
&load_config_folder,
)
.map_err(|err| WorkspaceDiscoverErrorKind::ResolveLink {
base: root_config_file_directory_url.clone(),
link: raw_member.to_string(),
source: err,
})?;
for link_config_url in link_configs.keys() {
if *link_config_url.as_ref() == root_config_file_directory_url {
return Err(WorkspaceDiscoverError(
WorkspaceDiscoverErrorKind::ResolveLink {
base: root_config_file_directory_url.clone(),
link: raw_member.to_string(),
source: ResolveWorkspaceLinkErrorKind::WorkspaceMemberNotAllowed
.into_box(),
}
.into(),
));
}
}
final_config_folders.extend(link_configs);
}
Ok(final_config_folders)
}
fn resolve_link_member_config_folders<TSys: FsRead + FsMetadata + FsReadDir>(
sys: &TSys,
link_dir_url: &Url,
load_config_folder: impl Fn(
&Path,
) -> Result<Option<ConfigFolder>, ConfigReadError>,
) -> Result<BTreeMap<UrlRc, ConfigFolder>, ResolveWorkspaceLinkError> {
let link_dir_path = url_to_file_path(link_dir_url)?;
let maybe_config_folder = load_config_folder(&link_dir_path)?;
let Some(config_folder) = maybe_config_folder else {
return Err(
ResolveWorkspaceLinkErrorKind::NotFound {
dir_url: link_dir_url.clone(),
}
.into_box(),
);
};
if config_folder.has_workspace_members() {
let maybe_vendor_dir =
resolve_vendor_dir(config_folder.deno_json().map(|d| d.as_ref()), None);
let mut raw_workspace = resolve_workspace_for_config_folder(
sys,
config_folder,
maybe_vendor_dir,
&mut HashMap::new(),
&load_config_folder,
)
.map_err(|err| ResolveWorkspaceLinkErrorKind::Workspace(Box::new(err)))?;
raw_workspace
.members
.insert(new_rc(raw_workspace.root.folder_url()), raw_workspace.root);
Ok(raw_workspace.members)
} else {
// attempt to find the root workspace directory
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/config/workspace/mod.rs | libs/config/workspace/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::HashSet;
use std::collections::VecDeque;
use std::path::Path;
use std::path::PathBuf;
use std::sync::OnceLock;
use boxed_error::Boxed;
use deno_error::JsError;
use deno_maybe_sync::new_rc;
use deno_package_json::PackageJson;
use deno_package_json::PackageJsonDepValue;
use deno_package_json::PackageJsonDepWorkspaceReq;
use deno_package_json::PackageJsonLoadError;
use deno_package_json::PackageJsonRc;
use deno_path_util::url_from_directory_path;
use deno_path_util::url_parent;
use deno_path_util::url_to_file_path;
use deno_semver::RangeSetOrTag;
use deno_semver::Version;
use deno_semver::VersionReq;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageKind;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use discovery::ConfigFileDiscovery;
use discovery::ConfigFolder;
use discovery::DenoOrPkgJson;
use discovery::discover_workspace_config_files;
use indexmap::IndexMap;
use indexmap::IndexSet;
use sys_traits::FsMetadata;
use sys_traits::FsRead;
use sys_traits::FsReadDir;
use thiserror::Error;
use url::Url;
use crate::UrlToFilePathError;
use crate::deno_json;
use crate::deno_json::AllowScriptsConfig;
use crate::deno_json::BenchConfig;
use crate::deno_json::CompileConfig;
use crate::deno_json::CompilerOptions;
use crate::deno_json::ConfigFile;
use crate::deno_json::ConfigFileError;
use crate::deno_json::ConfigFileRc;
use crate::deno_json::ConfigFileReadError;
use crate::deno_json::DeployConfig;
use crate::deno_json::FmtConfig;
use crate::deno_json::FmtOptionsConfig;
use crate::deno_json::LinkConfigParseError;
use crate::deno_json::LintConfig;
use crate::deno_json::LintRulesConfig;
use crate::deno_json::MinimumDependencyAgeConfig;
use crate::deno_json::NodeModulesDirMode;
use crate::deno_json::NodeModulesDirParseError;
use crate::deno_json::PermissionsConfig;
use crate::deno_json::PermissionsObjectWithBase;
use crate::deno_json::PublishConfig;
pub use crate::deno_json::TaskDefinition;
use crate::deno_json::TestConfig;
use crate::deno_json::ToInvalidConfigError;
use crate::deno_json::ToLockConfigError;
use crate::deno_json::WorkspaceConfigParseError;
use crate::glob::FilePatterns;
use crate::glob::PathOrPattern;
use crate::glob::PathOrPatternParseError;
use crate::glob::PathOrPatternSet;
mod discovery;
#[allow(clippy::disallowed_types)]
type UrlRc = deno_maybe_sync::MaybeArc<Url>;
#[allow(clippy::disallowed_types)]
pub type WorkspaceRc = deno_maybe_sync::MaybeArc<Workspace>;
#[allow(clippy::disallowed_types)]
pub type WorkspaceDirectoryRc = deno_maybe_sync::MaybeArc<WorkspaceDirectory>;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ResolverWorkspaceJsrPackage {
pub base: Url,
pub name: String,
pub version: Option<Version>,
pub exports: IndexMap<String, String>,
pub is_link: bool,
}
impl ResolverWorkspaceJsrPackage {
pub fn matches_req(&self, req: &PackageReq) -> bool {
self.name == req.name
&& self
.version
.as_ref()
.map(|v| req.version_req.matches(v))
.unwrap_or(true)
}
}
#[derive(Debug, Clone)]
pub struct JsrPackageConfig {
/// The package name.
pub name: String,
pub member_dir: WorkspaceDirectoryRc,
pub config_file: ConfigFileRc,
pub license: Option<String>,
pub should_publish: bool,
}
#[derive(Debug, Clone)]
pub struct NpmPackageConfig {
pub nv: PackageNv,
pub workspace_dir: WorkspaceDirectoryRc,
pub pkg_json: PackageJsonRc,
}
impl NpmPackageConfig {
pub fn matches_req(&self, req: &PackageReq) -> bool {
self.matches_name_and_version_req(&req.name, &req.version_req)
}
pub fn matches_name_and_version_req(
&self,
name: &str,
version_req: &VersionReq,
) -> bool {
if name != self.nv.name {
return false;
}
match version_req.inner() {
RangeSetOrTag::RangeSet(set) => set.satisfies(&self.nv.version),
RangeSetOrTag::Tag(tag) => tag == "workspace",
}
}
}
#[derive(Clone, Debug, Default, Hash, PartialEq)]
pub struct WorkspaceLintConfig {
pub report: Option<String>,
}
#[derive(Debug, Clone, Error, JsError, PartialEq, Eq)]
#[class(type)]
pub enum WorkspaceDiagnosticKind {
#[error(
"\"{0}\" field can only be specified in the workspace root deno.json file."
)]
RootOnlyOption(&'static str),
#[error(
"\"{0}\" field can only be specified in a workspace member deno.json file and not the workspace root file."
)]
MemberOnlyOption(&'static str),
#[error("\"workspaces\" field was ignored. Use \"workspace\" instead.")]
InvalidWorkspacesOption,
#[error("\"exports\" field should be specified when specifying a \"name\".")]
MissingExports,
#[error(
"\"importMap\" field is ignored when \"imports\" or \"scopes\" are specified in the config file."
)]
ImportMapReferencingImportMap,
#[error(
"\"imports\" and \"scopes\" field is ignored when \"importMap\" is specified in the root config file."
)]
MemberImportsScopesIgnored,
#[error(
"`\"nodeModulesDir\": {previous}` is deprecated in Deno 2.0. Use `\"nodeModulesDir\": \"{suggestion}\"` instead."
)]
DeprecatedNodeModulesDirOption {
previous: bool,
suggestion: NodeModulesDirMode,
},
#[error("\"patch\" property was renamed to \"links\".")]
DeprecatedPatch,
#[error(
"Invalid workspace member name \"{name}\". Ensure the name is in the format '@scope/name'."
)]
InvalidMemberName { name: String },
#[error(
"\"minimumDependencyAge.exclude\" entry \"{entry}\" missing jsr: or npm: prefix."
)]
MinimumDependencyAgeExcludeMissingPrefix { entry: String },
}
#[derive(Debug, Error, JsError, Clone, PartialEq, Eq)]
#[class(inherit)]
#[error("{}\n at {}", .kind, .config_url)]
pub struct WorkspaceDiagnostic {
#[inherit]
pub kind: WorkspaceDiagnosticKind,
pub config_url: Url,
}
#[derive(Debug, JsError, Boxed)]
pub struct ResolveWorkspaceLinkError(pub Box<ResolveWorkspaceLinkErrorKind>);
#[derive(Debug, Error, JsError)]
pub enum ResolveWorkspaceLinkErrorKind {
#[class(inherit)]
#[error(transparent)]
ConfigRead(#[from] ConfigReadError),
#[class(type)]
#[error("Could not find link member in '{}'.", .dir_url)]
NotFound { dir_url: Url },
#[class(type)]
#[error("Workspace member cannot be specified as a link.")]
WorkspaceMemberNotAllowed,
#[class(inherit)]
#[error(transparent)]
InvalidLink(#[from] url::ParseError),
#[class(inherit)]
#[error(transparent)]
UrlToFilePath(#[from] deno_path_util::UrlToFilePathError),
#[class(inherit)]
#[error(transparent)]
Workspace(Box<WorkspaceDiscoverError>),
}
#[derive(Debug, Error, JsError)]
pub enum ConfigReadError {
#[class(inherit)]
#[error(transparent)]
DenoJsonRead(#[from] ConfigFileReadError),
#[class(inherit)]
#[error(transparent)]
PackageJsonRead(#[from] PackageJsonLoadError),
}
#[derive(Debug, JsError, Boxed)]
#[class(type)]
pub struct ResolveWorkspaceMemberError(
pub Box<ResolveWorkspaceMemberErrorKind>,
);
#[derive(Debug, Error, JsError)]
#[class(type)]
pub enum ResolveWorkspaceMemberErrorKind {
#[class(inherit)]
#[error(transparent)]
ConfigRead(#[from] ConfigReadError),
#[error("Could not find config file for workspace member in '{}'.", .dir_url)]
NotFound { dir_url: Url },
#[error("Could not find package.json for workspace member in '{}'.", .dir_url)]
NotFoundPackageJson { dir_url: Url },
#[error("Could not find config file for workspace member in '{}'. Ensure you specify the directory and not the configuration file in the workspace member.", .dir_url)]
NotFoundMaybeSpecifiedFile { dir_url: Url },
#[error(
"Workspace member must be nested in a directory under the workspace.\n Member: {member_url}\n Workspace: {workspace_url}"
)]
NonDescendant { workspace_url: Url, member_url: Url },
#[error("Cannot specify a workspace member twice ('{}').", .member)]
Duplicate { member: String },
#[error(
"The '{name}' package ('{deno_json_url}') cannot have the same name as the package at '{other_deno_json_url}'."
)]
DuplicatePackageName {
name: String,
deno_json_url: Url,
other_deno_json_url: Url,
},
#[error("Remove the reference to the current config file (\"{}\") in \"workspaces\".", .member)]
InvalidSelfReference { member: String },
#[class(inherit)]
#[error("Invalid workspace member '{}' for config '{}'.", member, base)]
InvalidMember {
base: Url,
member: String,
#[source]
#[inherit]
source: url::ParseError,
},
#[class(inherit)]
#[error(
"Failed converting {kind} workspace member '{}' to pattern for config '{}'.",
member,
base
)]
MemberToPattern {
kind: &'static str,
base: Url,
member: String,
// this error has the text that failed
#[source]
#[inherit]
source: PathOrPatternParseError,
},
#[error(transparent)]
#[class(inherit)]
UrlToFilePath(#[from] deno_path_util::UrlToFilePathError),
}
#[derive(Debug, JsError, Boxed)]
#[class(inherit)]
pub struct WorkspaceDiscoverError(pub Box<WorkspaceDiscoverErrorKind>);
#[derive(Debug, Error, JsError)]
#[class(type)]
pub enum FailedResolvingStartDirectoryError {
#[error("No paths provided.")]
NoPathsProvided,
#[error("Could not resolve path: '{}'.", .0.display())]
CouldNotResolvePath(PathBuf),
#[error("Provided config file path ('{}') had no parent directory.", .0.display())]
PathHasNoParentDirectory(PathBuf),
}
#[derive(Debug, Error, JsError)]
pub enum WorkspaceDiscoverErrorKind {
#[class(inherit)]
#[error("Failed resolving start directory.")]
FailedResolvingStartDirectory(#[source] FailedResolvingStartDirectoryError),
#[class(inherit)]
#[error(transparent)]
ConfigRead(#[from] ConfigReadError),
#[class(inherit)]
#[error(transparent)]
PackageJsonRead(#[from] PackageJsonLoadError),
#[class(inherit)]
#[error(transparent)]
LinkConfigParse(#[from] LinkConfigParseError),
#[class(inherit)]
#[error(transparent)]
WorkspaceConfigParse(#[from] WorkspaceConfigParseError),
#[class(inherit)]
#[error(transparent)]
ResolveMember(#[from] ResolveWorkspaceMemberError),
#[class(inherit)]
#[error("Failed loading link '{}' in config '{}'.", link, base)]
ResolveLink {
link: String,
base: Url,
#[source]
#[inherit]
source: ResolveWorkspaceLinkError,
},
#[class(type)]
#[error(
"Command resolved to multiple config files. Ensure all specified paths are within the same workspace.\n First: {base_workspace_url}\n Second: {other_workspace_url}"
)]
MultipleWorkspaces {
base_workspace_url: Url,
other_workspace_url: Url,
},
#[class(inherit)]
#[error(transparent)]
UrlToFilePath(#[from] UrlToFilePathError),
#[class(inherit)]
#[error(transparent)]
PathToUrl(#[from] deno_path_util::PathToUrlError),
#[class(type)]
#[error(
"Config file must be a member of the workspace.\n Config: {config_url}\n Workspace: {workspace_url}"
)]
ConfigNotWorkspaceMember { workspace_url: Url, config_url: Url },
}
#[derive(Debug, Clone, Copy)]
pub enum WorkspaceDiscoverStart<'a> {
Paths(&'a [PathBuf]),
ConfigFile(&'a Path),
}
#[derive(Debug, Clone, Copy)]
pub enum VendorEnablement<'a> {
Disable,
Enable {
/// The cwd, which will be used when no configuration file is
/// resolved in order to discover the vendor folder.
cwd: &'a Path,
},
}
pub trait WorkspaceCache {
fn get(&self, dir_path: &Path) -> Option<WorkspaceRc>;
fn set(&self, dir_path: PathBuf, workspace: WorkspaceRc);
}
#[derive(Default, Clone)]
pub struct WorkspaceDiscoverOptions<'a> {
/// A cache for deno.json files. This is mostly only useful in the LSP where
/// workspace discovery may occur multiple times.
pub deno_json_cache: Option<&'a dyn crate::deno_json::DenoJsonCache>,
pub pkg_json_cache: Option<&'a dyn deno_package_json::PackageJsonCache>,
/// A cache for workspaces. This is mostly only useful in the LSP where
/// workspace discovery may occur multiple times.
pub workspace_cache: Option<&'a dyn WorkspaceCache>,
pub additional_config_file_names: &'a [&'a str],
pub discover_pkg_json: bool,
pub maybe_vendor_override: Option<VendorEnablement<'a>>,
}
#[derive(Clone)]
pub struct WorkspaceDirectoryEmptyOptions<'a> {
pub root_dir: UrlRc,
pub use_vendor_dir: VendorEnablement<'a>,
}
/// Configuration files found in a specific folder.
#[derive(Debug, Default, Clone)]
pub struct FolderConfigs {
pub deno_json: Option<ConfigFileRc>,
pub pkg_json: Option<PackageJsonRc>,
}
impl FolderConfigs {
fn from_config_folder(config_folder: ConfigFolder) -> Self {
match config_folder {
ConfigFolder::Single(deno_or_pkg_json) => match deno_or_pkg_json {
DenoOrPkgJson::Deno(deno_json) => FolderConfigs {
deno_json: Some(deno_json),
pkg_json: None,
},
DenoOrPkgJson::PkgJson(pkg_json) => FolderConfigs {
deno_json: None,
pkg_json: Some(pkg_json),
},
},
ConfigFolder::Both {
deno_json,
pkg_json,
} => FolderConfigs {
deno_json: Some(deno_json),
pkg_json: Some(pkg_json),
},
}
}
}
#[derive(Debug, Error, JsError)]
#[class(type)]
#[error("lint.report must be a string")]
pub struct LintConfigError;
#[derive(Debug, Default)]
struct WorkspaceCachedValues {
dirs: deno_maybe_sync::MaybeDashMap<UrlRc, WorkspaceDirectoryRc>,
}
#[derive(Debug)]
pub struct Workspace {
root_dir_url: UrlRc,
config_folders: IndexMap<UrlRc, FolderConfigs>,
links: BTreeMap<UrlRc, FolderConfigs>,
pub(crate) vendor_dir: Option<PathBuf>,
cached: WorkspaceCachedValues,
}
impl Workspace {
pub(crate) fn new(
root: ConfigFolder,
members: BTreeMap<UrlRc, ConfigFolder>,
link: BTreeMap<UrlRc, ConfigFolder>,
vendor_dir: Option<PathBuf>,
) -> Self {
let root_dir_url = new_rc(root.folder_url());
let mut config_folders = IndexMap::with_capacity(members.len() + 1);
config_folders.insert(
root_dir_url.clone(),
FolderConfigs::from_config_folder(root),
);
config_folders.extend(members.into_iter().map(
|(folder_url, config_folder)| {
(folder_url, FolderConfigs::from_config_folder(config_folder))
},
));
Workspace {
root_dir_url,
config_folders,
links: link
.into_iter()
.map(|(url, folder)| (url, FolderConfigs::from_config_folder(folder)))
.collect(),
vendor_dir,
cached: Default::default(),
}
}
pub fn root_dir_url(&self) -> &UrlRc {
&self.root_dir_url
}
pub fn root_dir(self: &WorkspaceRc) -> WorkspaceDirectoryRc {
self.resolve_member_dir(&self.root_dir_url)
}
pub fn root_dir_path(&self) -> PathBuf {
url_to_file_path(&self.root_dir_url).unwrap()
}
pub fn root_folder_configs(&self) -> &FolderConfigs {
self.config_folders.get(&self.root_dir_url).unwrap()
}
pub fn root_deno_json(&self) -> Option<&ConfigFileRc> {
self.root_folder_configs().deno_json.as_ref()
}
pub fn root_pkg_json(&self) -> Option<&PackageJsonRc> {
self.root_folder_configs().pkg_json.as_ref()
}
pub fn config_folders(&self) -> &IndexMap<UrlRc, FolderConfigs> {
&self.config_folders
}
/// Gets the folders sorted by whether they have a dependency on each other.
pub fn config_folders_sorted_by_dependencies(
&self,
) -> IndexMap<&UrlRc, &FolderConfigs> {
struct PackageNameMaybeVersion<'a> {
name: &'a str,
version: Option<Version>,
}
enum Dep {
Req(JsrDepPackageReq),
Path(Url),
}
impl Dep {
pub fn matches_pkg(
&self,
package_kind: PackageKind,
pkg: &PackageNameMaybeVersion,
folder_url: &Url,
) -> bool {
match self {
Dep::Req(req) => {
req.kind == package_kind
&& req.req.name == pkg.name
&& pkg
.version
.as_ref()
.map(|v| {
// just match if it's a tag
req.req.version_req.tag().is_some()
|| req.req.version_req.matches(v)
})
.unwrap_or(true)
}
Dep::Path(url) => {
folder_url.as_str().trim_end_matches('/')
== url.as_str().trim_end_matches('/')
}
}
}
}
struct Folder<'a> {
index: usize,
dir_url: &'a UrlRc,
folder: &'a FolderConfigs,
npm_nv: Option<PackageNameMaybeVersion<'a>>,
jsr_nv: Option<PackageNameMaybeVersion<'a>>,
deps: Vec<Dep>,
}
impl<'a> Folder<'a> {
pub fn depends_on(&self, other: &Folder<'a>) -> bool {
if let Some(other_nv) = &other.npm_nv
&& self.has_matching_dep(PackageKind::Npm, other_nv, other.dir_url)
{
return true;
}
if let Some(other_nv) = &other.jsr_nv
&& self.has_matching_dep(PackageKind::Jsr, other_nv, other.dir_url)
{
return true;
}
false
}
fn has_matching_dep(
&self,
pkg_kind: PackageKind,
pkg: &PackageNameMaybeVersion,
folder_url: &Url,
) -> bool {
self
.deps
.iter()
.any(|dep| dep.matches_pkg(pkg_kind, pkg, folder_url))
}
}
let mut folders = Vec::with_capacity(self.config_folders.len());
for (index, (dir_url, folder)) in self.config_folders.iter().enumerate() {
folders.push(Folder {
index,
folder,
dir_url,
jsr_nv: folder.deno_json.as_ref().and_then(|deno_json| {
deno_json
.json
.name
.as_ref()
.map(|name| PackageNameMaybeVersion {
name,
version: deno_json
.json
.version
.as_ref()
.and_then(|v| Version::parse_standard(v).ok()),
})
}),
npm_nv: folder.pkg_json.as_ref().and_then(|pkg_json| {
pkg_json.name.as_ref().map(|name| PackageNameMaybeVersion {
name,
version: pkg_json
.version
.as_ref()
.and_then(|v| Version::parse_from_npm(v).ok()),
})
}),
deps: folder
.deno_json
.as_ref()
.map(|d| d.dependencies().into_iter().map(Dep::Req))
.into_iter()
.flatten()
.chain(
folder
.pkg_json
.as_ref()
.map(|d| {
let deps = d.resolve_local_package_json_deps();
deps
.dependencies
.iter()
.chain(deps.dev_dependencies.iter())
.filter_map(|(k, v)| match v.as_ref().ok()? {
PackageJsonDepValue::File(path) => {
dir_url.join(path).ok().map(Dep::Path)
}
PackageJsonDepValue::Req(package_req) => {
Some(Dep::Req(JsrDepPackageReq {
kind: PackageKind::Npm,
req: package_req.clone(),
}))
}
PackageJsonDepValue::Workspace(workspace_req) => {
Some(Dep::Req(JsrDepPackageReq {
kind: PackageKind::Npm,
req: PackageReq {
name: k.clone(),
version_req: match workspace_req {
PackageJsonDepWorkspaceReq::VersionReq(
version_req,
) => version_req.clone(),
PackageJsonDepWorkspaceReq::Tilde
| PackageJsonDepWorkspaceReq::Caret => {
VersionReq::parse_from_npm("*").unwrap()
}
},
},
}))
}
PackageJsonDepValue::JsrReq(req) => {
Some(Dep::Req(JsrDepPackageReq {
kind: PackageKind::Npm,
req: req.clone(),
}))
}
})
})
.into_iter()
.flatten(),
)
.collect(),
})
}
// build adjacency + in-degree
let n = folders.len();
let mut adj: Vec<Vec<usize>> = vec![Vec::new(); n];
let mut indeg = vec![0_u32; n];
for i in 0..n {
for j in 0..n {
if i != j && folders[i].depends_on(&folders[j]) {
adj[j].push(i);
indeg[i] += 1;
}
}
}
// kahn's algorithm
let mut queue: VecDeque<usize> = indeg
.iter()
.enumerate()
.filter(|&(_, &d)| d == 0)
.map(|(i, _)| i)
.collect();
// preserve original insertion order for deterministic output
queue.make_contiguous().sort_by_key(|&i| folders[i].index);
let mut output = Vec::<usize>::with_capacity(n);
while let Some(i) = queue.pop_front() {
output.push(i);
for &j in &adj[i] {
indeg[j] -= 1;
if indeg[j] == 0 {
queue.push_back(j);
}
}
}
// handle possible cycles
if output.len() < n {
// collect the still-cyclic nodes
let mut cyclic: Vec<usize> = (0..n).filter(|&i| indeg[i] > 0).collect();
// stable, deterministic: lowest original index first
cyclic.sort_by_key(|&i| folders[i].index);
output.extend(cyclic);
}
output
.into_iter()
.map(|i| (folders[i].dir_url, folders[i].folder))
.collect()
}
pub fn deno_jsons(&self) -> impl Iterator<Item = &ConfigFileRc> {
self
.config_folders
.values()
.filter_map(|f| f.deno_json.as_ref())
}
pub fn package_jsons(&self) -> impl Iterator<Item = &PackageJsonRc> {
self
.config_folders
.values()
.filter_map(|f| f.pkg_json.as_ref())
}
#[allow(clippy::needless_lifetimes)] // clippy issue
pub fn jsr_packages<'a>(
self: &'a WorkspaceRc,
) -> impl Iterator<Item = JsrPackageConfig> + 'a {
self.deno_jsons().filter_map(|c| {
if !c.is_package() {
return None;
}
Some(JsrPackageConfig {
member_dir: self.resolve_member_dir(&c.specifier),
name: c.json.name.clone()?,
config_file: c.clone(),
license: c.to_license(),
should_publish: c.should_publish(),
})
})
}
pub fn npm_packages(self: &WorkspaceRc) -> Vec<NpmPackageConfig> {
self
.package_jsons()
.filter_map(|c| self.package_json_to_npm_package_config(c))
.collect()
}
fn package_json_to_npm_package_config(
self: &WorkspaceRc,
pkg_json: &PackageJsonRc,
) -> Option<NpmPackageConfig> {
Some(NpmPackageConfig {
workspace_dir: self.resolve_member_dir(&pkg_json.specifier()),
nv: PackageNv {
name: deno_semver::StackString::from(pkg_json.name.as_ref()?.as_str()),
version: {
let version = pkg_json.version.as_ref()?;
deno_semver::Version::parse_from_npm(version).ok()?
},
},
pkg_json: pkg_json.clone(),
})
}
pub fn link_folders(&self) -> &BTreeMap<UrlRc, FolderConfigs> {
&self.links
}
pub fn link_deno_jsons(&self) -> impl Iterator<Item = &ConfigFileRc> {
self.links.values().filter_map(|f| f.deno_json.as_ref())
}
pub fn link_pkg_jsons(&self) -> impl Iterator<Item = &PackageJsonRc> {
self.links.values().filter_map(|f| f.pkg_json.as_ref())
}
pub fn resolver_deno_jsons(&self) -> impl Iterator<Item = &ConfigFileRc> {
self
.deno_jsons()
.chain(self.links.values().filter_map(|f| f.deno_json.as_ref()))
}
pub fn resolver_pkg_jsons(
&self,
) -> impl Iterator<Item = (&UrlRc, &PackageJsonRc)> {
self
.config_folders
.iter()
.filter_map(|(k, v)| Some((k, v.pkg_json.as_ref()?)))
}
pub fn resolver_jsr_pkgs(
&self,
) -> impl Iterator<Item = ResolverWorkspaceJsrPackage> + '_ {
self
.config_folders
.iter()
.filter_map(|(dir_url, f)| Some((dir_url, f.deno_json.as_ref()?, false)))
.chain(self.links.iter().filter_map(|(dir_url, f)| {
Some((dir_url, f.deno_json.as_ref()?, true))
}))
.filter_map(|(dir_url, config_file, is_link)| {
let name = config_file.json.name.as_ref()?;
let version = config_file
.json
.version
.as_ref()
.and_then(|v| Version::parse_standard(v).ok());
let exports_config = config_file.to_exports_config().ok()?;
Some(ResolverWorkspaceJsrPackage {
is_link,
base: dir_url.as_ref().clone(),
name: name.to_string(),
version,
exports: exports_config.into_map(),
})
})
}
pub fn resolve_member_dirs(
self: &WorkspaceRc,
) -> impl Iterator<Item = WorkspaceDirectoryRc> {
self
.config_folders()
.keys()
.map(|url| self.resolve_member_dir(url))
}
/// Resolves a workspace directory, which can be used for deriving
/// configuration specific to a member.
pub fn resolve_member_dir(
self: &WorkspaceRc,
specifier: &Url,
) -> WorkspaceDirectoryRc {
let maybe_folder = self
.resolve_folder(specifier)
.filter(|(member_url, _)| **member_url != self.root_dir_url);
let folder_url = maybe_folder
.map(|(folder_url, _)| folder_url.clone())
.unwrap_or_else(|| self.root_dir_url.clone());
if let Some(dir) = self.cached.dirs.get(&folder_url).map(|d| d.clone()) {
dir
} else {
let workspace_dir = match maybe_folder {
Some((member_url, folder)) => {
let maybe_deno_json = folder
.deno_json
.as_ref()
.map(|c| (member_url, c))
.or_else(|| {
let parent = parent_specifier_str(member_url.as_str())?;
self.resolve_deno_json_from_str(parent)
})
.or_else(|| {
let root = self.config_folders.get(&self.root_dir_url).unwrap();
root.deno_json.as_ref().map(|c| (&self.root_dir_url, c))
});
let maybe_pkg_json = folder
.pkg_json
.as_ref()
.map(|pkg_json| (member_url, pkg_json))
.or_else(|| {
let parent = parent_specifier_str(member_url.as_str())?;
self.resolve_pkg_json_from_str(parent)
})
.or_else(|| {
let root = self.config_folders.get(&self.root_dir_url).unwrap();
root.pkg_json.as_ref().map(|c| (&self.root_dir_url, c))
});
let maybe_root_folder = self.config_folders.get(&self.root_dir_url);
WorkspaceDirectory {
dir_url: member_url.clone(),
pkg_json: WorkspaceDirConfig {
root: maybe_root_folder.and_then(|dir| dir.pkg_json.clone()),
member: maybe_pkg_json.and_then(|(member_url, pkg_json)| {
if *member_url == self.root_dir_url {
None
} else {
Some(pkg_json.clone())
}
}),
},
deno_json: WorkspaceDirConfig {
root: maybe_root_folder.and_then(|dir| dir.deno_json.clone()),
member: maybe_deno_json.and_then(|(member_url, deno_json)| {
if *member_url == self.root_dir_url {
None
} else {
Some(deno_json.clone())
}
}),
},
workspace: self.clone(),
cached: Default::default(),
}
}
None => WorkspaceDirectory::create_from_root_folder(self.clone()),
};
let workspace_dir = new_rc(workspace_dir);
self.cached.dirs.insert(folder_url, workspace_dir.clone());
workspace_dir
}
}
pub fn resolve_deno_json(
&self,
specifier: &Url,
) -> Option<(&UrlRc, &ConfigFileRc)> {
self.resolve_deno_json_from_str(specifier.as_str())
}
fn resolve_deno_json_from_str(
&self,
specifier: &str,
) -> Option<(&UrlRc, &ConfigFileRc)> {
let mut specifier = specifier;
if !specifier.ends_with('/') {
specifier = parent_specifier_str(specifier)?;
}
loop {
let (folder_url, folder) = self.resolve_folder_str(specifier)?;
if let Some(config) = folder.deno_json.as_ref() {
return Some((folder_url, config));
}
specifier = parent_specifier_str(folder_url.as_str())?;
}
}
fn resolve_pkg_json_from_str(
&self,
specifier: &str,
) -> Option<(&UrlRc, &PackageJsonRc)> {
let mut specifier = specifier;
if !specifier.ends_with('/') {
specifier = parent_specifier_str(specifier)?;
}
loop {
let (folder_url, folder) = self.resolve_folder_str(specifier)?;
if let Some(pkg_json) = folder.pkg_json.as_ref() {
return Some((folder_url, pkg_json));
}
specifier = parent_specifier_str(folder_url.as_str())?;
}
}
pub fn resolve_folder(
&self,
specifier: &Url,
) -> Option<(&UrlRc, &FolderConfigs)> {
self.resolve_folder_str(specifier.as_str())
}
fn resolve_folder_str(
&self,
specifier: &str,
) -> Option<(&UrlRc, &FolderConfigs)> {
let mut best_match: Option<(&UrlRc, &FolderConfigs)> = None;
for (dir_url, config) in &self.config_folders {
if specifier.starts_with(dir_url.as_str())
&& (best_match.is_none()
|| dir_url.as_str().len() > best_match.unwrap().0.as_str().len())
{
best_match = Some((dir_url, config));
}
}
best_match
}
pub fn diagnostics(&self) -> Vec<WorkspaceDiagnostic> {
fn check_member_diagnostics(
member_config: &ConfigFile,
root_config: Option<&ConfigFile>,
diagnostics: &mut Vec<WorkspaceDiagnostic>,
) {
if member_config.json.import_map.is_some() {
diagnostics.push(WorkspaceDiagnostic {
config_url: member_config.specifier.clone(),
kind: WorkspaceDiagnosticKind::RootOnlyOption("importMap"),
});
} else if member_config.is_an_import_map()
&& root_config
.map(|c| {
c.json.import_map.is_some()
&& c.json.imports.is_none()
&& c.json.scopes.is_none()
})
.unwrap_or(false)
{
diagnostics.push(WorkspaceDiagnostic {
config_url: member_config.specifier.clone(),
kind: WorkspaceDiagnosticKind::MemberImportsScopesIgnored,
});
}
if member_config.json.lock.is_some() {
diagnostics.push(WorkspaceDiagnostic {
config_url: member_config.specifier.clone(),
kind: WorkspaceDiagnosticKind::RootOnlyOption("lock"),
});
}
if member_config.json.minimum_dependency_age.is_some() {
diagnostics.push(WorkspaceDiagnostic {
config_url: member_config.specifier.clone(),
kind: WorkspaceDiagnosticKind::RootOnlyOption("minimumDependencyAge"),
});
}
if member_config.json.node_modules_dir.is_some() {
diagnostics.push(WorkspaceDiagnostic {
config_url: member_config.specifier.clone(),
kind: WorkspaceDiagnosticKind::RootOnlyOption("nodeModulesDir"),
});
}
if member_config.json.links.is_some() {
diagnostics.push(WorkspaceDiagnostic {
config_url: member_config.specifier.clone(),
kind: WorkspaceDiagnosticKind::RootOnlyOption("links"),
});
}
if member_config.json.scopes.is_some() {
diagnostics.push(WorkspaceDiagnostic {
config_url: member_config.specifier.clone(),
kind: WorkspaceDiagnosticKind::RootOnlyOption("scopes"),
});
}
if !member_config.json.unstable.is_empty() {
diagnostics.push(WorkspaceDiagnostic {
config_url: member_config.specifier.clone(),
kind: WorkspaceDiagnosticKind::RootOnlyOption("unstable"),
});
}
if member_config.json.vendor.is_some() {
diagnostics.push(WorkspaceDiagnostic {
config_url: member_config.specifier.clone(),
kind: WorkspaceDiagnosticKind::RootOnlyOption("vendor"),
});
}
if member_config.json.workspace.is_some() {
diagnostics.push(WorkspaceDiagnostic {
config_url: member_config.specifier.clone(),
kind: WorkspaceDiagnosticKind::RootOnlyOption("workspace"),
});
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/config/glob/gitignore.rs | libs/config/glob/gitignore.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
use sys_traits::FsMetadata;
use sys_traits::FsRead;
/// Resolved gitignore for a directory.
pub struct DirGitIgnores {
current: Option<Rc<ignore::gitignore::Gitignore>>,
parent: Option<Rc<DirGitIgnores>>,
}
impl DirGitIgnores {
pub fn is_ignored(&self, path: &Path, is_dir: bool) -> bool {
let mut is_ignored = false;
if let Some(parent) = &self.parent {
is_ignored = parent.is_ignored(path, is_dir);
}
if let Some(current) = &self.current {
match current.matched(path, is_dir) {
ignore::Match::None => {}
ignore::Match::Ignore(_) => {
is_ignored = true;
}
ignore::Match::Whitelist(_) => {
is_ignored = false;
}
}
}
is_ignored
}
}
/// Resolves gitignores in a directory tree taking into account
/// ancestor gitignores that may be found in a directory.
pub struct GitIgnoreTree<'a, Sys: FsRead + FsMetadata> {
sys: &'a Sys,
ignores: HashMap<PathBuf, Option<Rc<DirGitIgnores>>>,
include_paths: Vec<PathBuf>,
}
impl<'a, Sys: FsRead + FsMetadata> GitIgnoreTree<'a, Sys> {
pub fn new(
sys: &'a Sys,
// paths that should override what's in the gitignore
include_paths: Vec<PathBuf>,
) -> Self {
Self {
sys,
ignores: Default::default(),
include_paths,
}
}
pub fn get_resolved_git_ignore_for_dir(
&mut self,
dir_path: &Path,
) -> Option<Rc<DirGitIgnores>> {
// for directories, provide itself in order to tell
// if it should stop searching for gitignores because
// maybe this dir_path is a .git directory
let parent = dir_path.parent()?;
self.get_resolved_git_ignore_inner(parent, Some(dir_path))
}
pub fn get_resolved_git_ignore_for_file(
&mut self,
file_path: &Path,
) -> Option<Rc<DirGitIgnores>> {
let dir_path = file_path.parent()?;
self.get_resolved_git_ignore_inner(dir_path, None)
}
fn get_resolved_git_ignore_inner(
&mut self,
dir_path: &Path,
maybe_parent: Option<&Path>,
) -> Option<Rc<DirGitIgnores>> {
let maybe_resolved = self.ignores.get(dir_path).cloned();
if let Some(resolved) = maybe_resolved {
resolved
} else {
let resolved = self.resolve_gitignore_in_dir(dir_path, maybe_parent);
self.ignores.insert(dir_path.to_owned(), resolved.clone());
resolved
}
}
fn resolve_gitignore_in_dir(
&mut self,
dir_path: &Path,
maybe_parent: Option<&Path>,
) -> Option<Rc<DirGitIgnores>> {
if let Some(parent) = maybe_parent {
// stop searching if the parent dir had a .git directory in it
if self.sys.fs_exists_no_err(parent.join(".git")) {
return None;
}
}
let parent = dir_path.parent().and_then(|parent| {
self.get_resolved_git_ignore_inner(parent, Some(dir_path))
});
let current = self
.sys
.fs_read_to_string_lossy(dir_path.join(".gitignore"))
.ok()
.and_then(|text| {
let mut builder = ignore::gitignore::GitignoreBuilder::new(dir_path);
for line in text.lines() {
builder.add_line(None, line).ok()?;
}
// override the gitignore contents to include these paths
for path in &self.include_paths {
if let Ok(suffix) = path.strip_prefix(dir_path) {
let suffix = suffix.to_string_lossy().replace('\\', "/");
let _ignore = builder.add_line(None, &format!("!/{}", suffix));
if !suffix.ends_with('/') {
let _ignore = builder.add_line(None, &format!("!/{}/", suffix));
}
}
}
let gitignore = builder.build().ok()?;
Some(Rc::new(gitignore))
});
if parent.is_none() && current.is_none() {
None
} else {
Some(Rc::new(DirGitIgnores { current, parent }))
}
}
}
#[cfg(test)]
mod test {
use sys_traits::FsCreateDirAll;
use sys_traits::FsWrite;
use sys_traits::impls::InMemorySys;
use super::*;
#[test]
fn git_ignore_tree() {
let sys = InMemorySys::default();
sys.fs_create_dir_all("/sub_dir/sub_dir").unwrap();
sys.fs_write("/.gitignore", "file.txt").unwrap();
sys.fs_write("/sub_dir/.gitignore", "data.txt").unwrap();
sys
.fs_write("/sub_dir/sub_dir/.gitignore", "!file.txt\nignore.txt")
.unwrap();
let mut ignore_tree = GitIgnoreTree::new(&sys, Vec::new());
let mut run_test = |path: &str, expected: bool| {
let path = PathBuf::from(path);
let gitignore =
ignore_tree.get_resolved_git_ignore_for_file(&path).unwrap();
assert_eq!(
gitignore.is_ignored(&path, /* is_dir */ false),
expected,
"Path: {}",
path.display()
);
};
run_test("/file.txt", true);
run_test("/other.txt", false);
run_test("/data.txt", false);
run_test("/sub_dir/file.txt", true);
run_test("/sub_dir/other.txt", false);
run_test("/sub_dir/data.txt", true);
run_test("/sub_dir/sub_dir/file.txt", false); // unignored up here
run_test("/sub_dir/sub_dir/sub_dir/file.txt", false);
run_test("/sub_dir/sub_dir/sub_dir/ignore.txt", true);
run_test("/sub_dir/sub_dir/ignore.txt", true);
run_test("/sub_dir/ignore.txt", false);
run_test("/ignore.txt", false);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/config/glob/mod.rs | libs/config/glob/mod.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::path::Path;
use std::path::PathBuf;
use deno_error::JsError;
use deno_path_util::normalize_path;
use deno_path_util::url_to_file_path;
use indexmap::IndexMap;
use thiserror::Error;
use url::Url;
use crate::UrlToFilePathError;
mod collector;
mod gitignore;
pub use collector::FileCollector;
pub use collector::WalkEntry;
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum FilePatternsMatch {
/// File passes as matching, but further exclude matching (ex. .gitignore)
/// may be necessary.
Passed,
/// File passes matching and further exclude matching (ex. .gitignore)
/// should NOT be done.
PassedOptedOutExclude,
/// File was excluded.
Excluded,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum PathKind {
File,
Directory,
}
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct FilePatterns {
/// Default traversal base used when calling `split_by_base()` without
/// any `include` patterns.
pub base: PathBuf,
pub include: Option<PathOrPatternSet>,
pub exclude: PathOrPatternSet,
}
impl FilePatterns {
pub fn new_with_base(base: PathBuf) -> Self {
Self {
base,
include: Default::default(),
exclude: Default::default(),
}
}
pub fn with_new_base(self, new_base: PathBuf) -> Self {
Self {
base: new_base,
..self
}
}
pub fn matches_specifier(&self, specifier: &Url) -> bool {
self.matches_specifier_detail(specifier) != FilePatternsMatch::Excluded
}
pub fn matches_specifier_detail(&self, specifier: &Url) -> FilePatternsMatch {
if specifier.scheme() != "file" {
// can't do .gitignore on a non-file specifier
return FilePatternsMatch::PassedOptedOutExclude;
}
let path = match url_to_file_path(specifier) {
Ok(path) => path,
Err(_) => return FilePatternsMatch::PassedOptedOutExclude,
};
self.matches_path_detail(&path, PathKind::File) // use file matching behavior
}
pub fn matches_path(&self, path: &Path, path_kind: PathKind) -> bool {
self.matches_path_detail(path, path_kind) != FilePatternsMatch::Excluded
}
pub fn matches_path_detail(
&self,
path: &Path,
path_kind: PathKind,
) -> FilePatternsMatch {
// if there's an include list, only include files that match it
// the include list is a closed set
if let Some(include) = &self.include {
match path_kind {
PathKind::File => {
if include.matches_path_detail(path) != PathOrPatternsMatch::Matched {
return FilePatternsMatch::Excluded;
}
}
PathKind::Directory => {
// for now ignore the include list unless there's a negated
// glob for the directory
for p in include.0.iter().rev() {
match p.matches_path(path) {
PathGlobMatch::Matched => {
break;
}
PathGlobMatch::MatchedNegated => {
return FilePatternsMatch::Excluded;
}
PathGlobMatch::NotMatched => {
// keep going
}
}
}
}
}
}
// the exclude list is an open set and we skip files not in the exclude list
match self.exclude.matches_path_detail(path) {
PathOrPatternsMatch::Matched => FilePatternsMatch::Excluded,
PathOrPatternsMatch::NotMatched => FilePatternsMatch::Passed,
PathOrPatternsMatch::Excluded => FilePatternsMatch::PassedOptedOutExclude,
}
}
/// Creates a collection of `FilePatterns` where the containing patterns
/// are only the ones applicable to the base.
///
/// The order these are returned in is the order that the directory traversal
/// should occur in.
pub fn split_by_base(&self) -> Vec<Self> {
let negated_excludes = self
.exclude
.0
.iter()
.filter(|e| e.is_negated())
.collect::<Vec<_>>();
let include = match &self.include {
Some(include) => Cow::Borrowed(include),
None => {
if negated_excludes.is_empty() {
return vec![self.clone()];
} else {
Cow::Owned(PathOrPatternSet::new(vec![PathOrPattern::Path(
self.base.clone(),
)]))
}
}
};
let mut include_paths = Vec::with_capacity(include.0.len());
let mut include_patterns = Vec::with_capacity(include.0.len());
let mut exclude_patterns =
Vec::with_capacity(include.0.len() + self.exclude.0.len());
for path_or_pattern in &include.0 {
match path_or_pattern {
PathOrPattern::Path(path) => include_paths.push(path),
PathOrPattern::NegatedPath(path) => {
exclude_patterns.push(PathOrPattern::Path(path.clone()));
}
PathOrPattern::Pattern(pattern) => {
if pattern.is_negated() {
exclude_patterns.push(PathOrPattern::Pattern(pattern.as_negated()));
} else {
include_patterns.push(pattern.clone());
}
}
PathOrPattern::RemoteUrl(_) => {}
}
}
let capacity = include_patterns.len() + negated_excludes.len();
let mut include_patterns_by_base_path = include_patterns.into_iter().fold(
IndexMap::with_capacity(capacity),
|mut map: IndexMap<_, Vec<_>>, p| {
map.entry(p.base_path()).or_default().push(p);
map
},
);
for p in &negated_excludes {
if let Some(base_path) = p.base_path()
&& !include_patterns_by_base_path.contains_key(&base_path)
{
let has_any_base_parent = include_patterns_by_base_path
.keys()
.any(|k| base_path.starts_with(k))
|| include_paths.iter().any(|p| base_path.starts_with(p));
// don't include an orphaned negated pattern
if has_any_base_parent {
include_patterns_by_base_path.insert(base_path, Vec::new());
}
}
}
let exclude_by_base_path = exclude_patterns
.iter()
.chain(self.exclude.0.iter())
.filter_map(|s| Some((s.base_path()?, s)))
.collect::<Vec<_>>();
let get_applicable_excludes = |base_path: &PathBuf| -> Vec<PathOrPattern> {
exclude_by_base_path
.iter()
.filter_map(|(exclude_base_path, exclude)| {
match exclude {
PathOrPattern::RemoteUrl(_) => None,
PathOrPattern::Path(exclude_path)
| PathOrPattern::NegatedPath(exclude_path) => {
// include paths that's are sub paths or an ancestor path
if base_path.starts_with(exclude_path)
|| exclude_path.starts_with(base_path)
{
Some((*exclude).clone())
} else {
None
}
}
PathOrPattern::Pattern(_) => {
// include globs that's are sub paths or an ancestor path
if exclude_base_path.starts_with(base_path)
|| base_path.starts_with(exclude_base_path)
{
Some((*exclude).clone())
} else {
None
}
}
}
})
.collect::<Vec<_>>()
};
let mut result = Vec::with_capacity(
include_paths.len() + include_patterns_by_base_path.len(),
);
for path in include_paths {
let applicable_excludes = get_applicable_excludes(path);
result.push(Self {
base: path.clone(),
include: if self.include.is_none() {
None
} else {
Some(PathOrPatternSet::new(vec![PathOrPattern::Path(
path.clone(),
)]))
},
exclude: PathOrPatternSet::new(applicable_excludes),
});
}
// todo(dsherret): This could be further optimized by not including
// patterns that will only ever match another base.
for base_path in include_patterns_by_base_path.keys() {
let applicable_excludes = get_applicable_excludes(base_path);
let mut applicable_includes = Vec::new();
// get all patterns that apply to the current or ancestor directories
for path in base_path.ancestors() {
if let Some(patterns) = include_patterns_by_base_path.get(path) {
applicable_includes.extend(
patterns
.iter()
.map(|p| PathOrPattern::Pattern((*p).clone())),
);
}
}
result.push(Self {
base: base_path.clone(),
include: if self.include.is_none()
|| applicable_includes.is_empty()
&& self
.include
.as_ref()
.map(|i| !i.0.is_empty())
.unwrap_or(false)
{
None
} else {
Some(PathOrPatternSet::new(applicable_includes))
},
exclude: PathOrPatternSet::new(applicable_excludes),
});
}
// Sort by the longest base path first. This ensures that we visit opted into
// nested directories first before visiting the parent directory. The directory
// traverser will handle not going into directories it's already been in.
result.sort_by(|a, b| {
// try looking at the parents first so that files in the same
// folder are kept in the same order that they're provided
let (a, b) =
if let (Some(a), Some(b)) = (a.base.parent(), b.base.parent()) {
(a, b)
} else {
(a.base.as_path(), b.base.as_path())
};
b.as_os_str().len().cmp(&a.as_os_str().len())
});
result
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub enum PathOrPatternsMatch {
Matched,
NotMatched,
Excluded,
}
#[derive(Debug, Error, JsError)]
pub enum FromExcludeRelativePathOrPatternsError {
#[class(type)]
#[error(
"The negation of '{negated_entry}' is never reached due to the higher priority '{entry}' exclude. Move '{negated_entry}' after '{entry}'."
)]
HigherPriorityExclude {
negated_entry: String,
entry: String,
},
#[class(inherit)]
#[error("{0}")]
PathOrPatternParse(#[from] PathOrPatternParseError),
}
#[derive(Clone, Default, Debug, Hash, Eq, PartialEq)]
pub struct PathOrPatternSet(Vec<PathOrPattern>);
impl PathOrPatternSet {
pub fn new(elements: Vec<PathOrPattern>) -> Self {
Self(elements)
}
pub fn from_absolute_paths(
paths: &[String],
) -> Result<Self, PathOrPatternParseError> {
Ok(Self(
paths
.iter()
.map(|p| PathOrPattern::new(p))
.collect::<Result<Vec<_>, _>>()?,
))
}
/// Builds the set of path and patterns for an "include" list.
pub fn from_include_relative_path_or_patterns(
base: &Path,
entries: &[String],
) -> Result<Self, PathOrPatternParseError> {
Ok(Self(
entries
.iter()
.map(|p| PathOrPattern::from_relative(base, p))
.collect::<Result<Vec<_>, _>>()?,
))
}
/// Builds the set and ensures no negations are overruled by
/// higher priority entries.
pub fn from_exclude_relative_path_or_patterns(
base: &Path,
entries: &[String],
) -> Result<Self, FromExcludeRelativePathOrPatternsError> {
// error when someone does something like:
// exclude: ["!./a/b", "./a"] as it should be the opposite
fn validate_entry(
found_negated_paths: &Vec<(&str, PathBuf)>,
entry: &str,
entry_path: &Path,
) -> Result<(), FromExcludeRelativePathOrPatternsError> {
for (negated_entry, negated_path) in found_negated_paths {
if negated_path.starts_with(entry_path) {
return Err(
FromExcludeRelativePathOrPatternsError::HigherPriorityExclude {
negated_entry: negated_entry.to_string(),
entry: entry.to_string(),
},
);
}
}
Ok(())
}
let mut found_negated_paths: Vec<(&str, PathBuf)> =
Vec::with_capacity(entries.len());
let mut result = Vec::with_capacity(entries.len());
for entry in entries {
let p = PathOrPattern::from_relative(base, entry)?;
match &p {
PathOrPattern::Path(p) => {
validate_entry(&found_negated_paths, entry, p)?;
}
PathOrPattern::NegatedPath(p) => {
found_negated_paths.push((entry.as_str(), p.clone()));
}
PathOrPattern::RemoteUrl(_) => {
// ignore
}
PathOrPattern::Pattern(p) => {
if p.is_negated() {
let base_path = p.base_path();
found_negated_paths.push((entry.as_str(), base_path));
}
}
}
result.push(p);
}
Ok(Self(result))
}
pub fn inner(&self) -> &Vec<PathOrPattern> {
&self.0
}
pub fn inner_mut(&mut self) -> &mut Vec<PathOrPattern> {
&mut self.0
}
pub fn into_path_or_patterns(self) -> Vec<PathOrPattern> {
self.0
}
pub fn matches_path(&self, path: &Path) -> bool {
self.matches_path_detail(path) == PathOrPatternsMatch::Matched
}
pub fn matches_path_detail(&self, path: &Path) -> PathOrPatternsMatch {
for p in self.0.iter().rev() {
match p.matches_path(path) {
PathGlobMatch::Matched => return PathOrPatternsMatch::Matched,
PathGlobMatch::MatchedNegated => return PathOrPatternsMatch::Excluded,
PathGlobMatch::NotMatched => {
// ignore
}
}
}
PathOrPatternsMatch::NotMatched
}
pub fn base_paths(&self) -> Vec<PathBuf> {
let mut result = Vec::with_capacity(self.0.len());
for element in &self.0 {
match element {
PathOrPattern::Path(path) | PathOrPattern::NegatedPath(path) => {
result.push(path.to_path_buf());
}
PathOrPattern::RemoteUrl(_) => {
// ignore
}
PathOrPattern::Pattern(pattern) => {
result.push(pattern.base_path());
}
}
}
result
}
pub fn push(&mut self, item: PathOrPattern) {
self.0.push(item);
}
pub fn append(&mut self, items: impl Iterator<Item = PathOrPattern>) {
self.0.extend(items)
}
}
#[derive(Debug, Error, JsError, Clone)]
#[class(inherit)]
#[error("Invalid URL '{}'", url)]
pub struct UrlParseError {
url: String,
#[source]
#[inherit]
source: url::ParseError,
}
#[derive(Debug, Error, JsError)]
pub enum PathOrPatternParseError {
#[class(inherit)]
#[error(transparent)]
UrlParse(#[from] UrlParseError),
#[class(inherit)]
#[error(transparent)]
UrlToFilePathError(#[from] UrlToFilePathError),
#[class(inherit)]
#[error(transparent)]
GlobParse(#[from] GlobPatternParseError),
}
#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
pub enum PathOrPattern {
Path(PathBuf),
NegatedPath(PathBuf),
RemoteUrl(Url),
Pattern(GlobPattern),
}
impl PathOrPattern {
pub fn new(path: &str) -> Result<Self, PathOrPatternParseError> {
if has_url_prefix(path) {
let url = Url::parse(path).map_err(|err| UrlParseError {
url: path.to_string(),
source: err,
})?;
if url.scheme() == "file" {
let path = url_to_file_path(&url)?;
return Ok(Self::Path(path));
} else {
return Ok(Self::RemoteUrl(url));
}
}
GlobPattern::new_if_pattern(path)
.map(|maybe_pattern| {
maybe_pattern
.map(PathOrPattern::Pattern)
.unwrap_or_else(|| {
PathOrPattern::Path(
normalize_path(Cow::Borrowed(Path::new(path))).into_owned(),
)
})
})
.map_err(|err| err.into())
}
pub fn from_relative(
base: &Path,
p: &str,
) -> Result<PathOrPattern, PathOrPatternParseError> {
if is_glob_pattern(p) {
GlobPattern::from_relative(base, p)
.map(PathOrPattern::Pattern)
.map_err(|err| err.into())
} else if has_url_prefix(p) {
PathOrPattern::new(p)
} else if let Some(path) = p.strip_prefix('!') {
Ok(PathOrPattern::NegatedPath(
normalize_path(Cow::Owned(base.join(path))).into_owned(),
))
} else {
Ok(PathOrPattern::Path(
normalize_path(Cow::Owned(base.join(p))).into_owned(),
))
}
}
pub fn matches_path(&self, path: &Path) -> PathGlobMatch {
match self {
PathOrPattern::Path(p) => {
if path.starts_with(p) {
PathGlobMatch::Matched
} else {
PathGlobMatch::NotMatched
}
}
PathOrPattern::NegatedPath(p) => {
if path.starts_with(p) {
PathGlobMatch::MatchedNegated
} else {
PathGlobMatch::NotMatched
}
}
PathOrPattern::RemoteUrl(_) => PathGlobMatch::NotMatched,
PathOrPattern::Pattern(p) => p.matches_path(path),
}
}
/// Returns the base path of the pattern if it's not a remote url pattern.
pub fn base_path(&self) -> Option<PathBuf> {
match self {
PathOrPattern::Path(p) | PathOrPattern::NegatedPath(p) => Some(p.clone()),
PathOrPattern::RemoteUrl(_) => None,
PathOrPattern::Pattern(p) => Some(p.base_path()),
}
}
/// If this is a negated pattern.
pub fn is_negated(&self) -> bool {
match self {
PathOrPattern::Path(_) => false,
PathOrPattern::NegatedPath(_) => true,
PathOrPattern::RemoteUrl(_) => false,
PathOrPattern::Pattern(p) => p.is_negated(),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum PathGlobMatch {
Matched,
MatchedNegated,
NotMatched,
}
#[derive(Debug, Error, JsError)]
#[class(type)]
#[error("Failed to expand glob: \"{pattern}\"")]
pub struct GlobPatternParseError {
pattern: String,
#[source]
source: glob::PatternError,
}
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct GlobPattern {
is_negated: bool,
pattern: glob::Pattern,
}
impl GlobPattern {
pub fn new_if_pattern(
pattern: &str,
) -> Result<Option<Self>, GlobPatternParseError> {
if !is_glob_pattern(pattern) {
return Ok(None);
}
Self::new(pattern).map(Some)
}
pub fn new(pattern: &str) -> Result<Self, GlobPatternParseError> {
let (is_negated, pattern) = match pattern.strip_prefix('!') {
Some(pattern) => (true, pattern),
None => (false, pattern),
};
let pattern = escape_brackets(pattern).replace('\\', "/");
let pattern =
glob::Pattern::new(&pattern).map_err(|source| GlobPatternParseError {
pattern: pattern.to_string(),
source,
})?;
Ok(Self {
is_negated,
pattern,
})
}
pub fn from_relative(
base: &Path,
p: &str,
) -> Result<Self, GlobPatternParseError> {
let (is_negated, p) = match p.strip_prefix('!') {
Some(p) => (true, p),
None => (false, p),
};
let base_str = base.to_string_lossy().replace('\\', "/");
let p = p.strip_prefix("./").unwrap_or(p);
let p = p.strip_suffix('/').unwrap_or(p);
let pattern = capacity_builder::StringBuilder::<String>::build(|builder| {
if is_negated {
builder.append('!');
}
builder.append(&base_str);
if !base_str.ends_with('/') {
builder.append('/');
}
builder.append(p);
})
.unwrap();
GlobPattern::new(&pattern)
}
pub fn as_str(&self) -> Cow<'_, str> {
if self.is_negated {
Cow::Owned(format!("!{}", self.pattern.as_str()))
} else {
Cow::Borrowed(self.pattern.as_str())
}
}
pub fn matches_path(&self, path: &Path) -> PathGlobMatch {
if self.pattern.matches_path_with(path, match_options()) {
if self.is_negated {
PathGlobMatch::MatchedNegated
} else {
PathGlobMatch::Matched
}
} else {
PathGlobMatch::NotMatched
}
}
pub fn base_path(&self) -> PathBuf {
let base_path = self
.pattern
.as_str()
.split('/')
.take_while(|c| !has_glob_chars(c))
.collect::<Vec<_>>()
.join(std::path::MAIN_SEPARATOR_STR);
PathBuf::from(base_path)
}
pub fn is_negated(&self) -> bool {
self.is_negated
}
fn as_negated(&self) -> GlobPattern {
Self {
is_negated: !self.is_negated,
pattern: self.pattern.clone(),
}
}
}
pub fn is_glob_pattern(path: &str) -> bool {
!has_url_prefix(path) && has_glob_chars(path)
}
fn has_url_prefix(pattern: &str) -> bool {
pattern.starts_with("http://")
|| pattern.starts_with("https://")
|| pattern.starts_with("file://")
|| pattern.starts_with("npm:")
|| pattern.starts_with("jsr:")
}
fn has_glob_chars(pattern: &str) -> bool {
// we don't support [ and ]
pattern.chars().any(|c| matches!(c, '*' | '?'))
}
fn escape_brackets(pattern: &str) -> String {
// Escape brackets - we currently don't support them, because with introduction
// of glob expansion paths like "pages/[id].ts" would suddenly start giving
// wrong results. We might want to revisit that in the future.
pattern.replace('[', "[[]").replace(']', "[]]")
}
fn match_options() -> glob::MatchOptions {
// Matches what `deno_task_shell` does
glob::MatchOptions {
// false because it should work the same way on case insensitive file systems
case_sensitive: false,
// true because it copies what sh does
require_literal_separator: true,
// true because it copies with sh does—these files are considered "hidden"
require_literal_leading_dot: true,
}
}
#[cfg(test)]
mod test {
use std::error::Error;
use deno_path_util::url_from_directory_path;
use pretty_assertions::assert_eq;
use tempfile::TempDir;
use super::*;
// For easier comparisons in tests.
#[derive(Debug, PartialEq, Eq)]
struct ComparableFilePatterns {
base: String,
include: Option<Vec<String>>,
exclude: Vec<String>,
}
impl ComparableFilePatterns {
pub fn new(root: &Path, file_patterns: &FilePatterns) -> Self {
fn path_to_string(root: &Path, path: &Path) -> String {
path
.strip_prefix(root)
.unwrap()
.to_string_lossy()
.replace('\\', "/")
}
fn path_or_pattern_to_string(
root: &Path,
p: &PathOrPattern,
) -> Option<String> {
match p {
PathOrPattern::RemoteUrl(_) => None,
PathOrPattern::Path(p) => Some(path_to_string(root, p)),
PathOrPattern::NegatedPath(p) => {
Some(format!("!{}", path_to_string(root, p)))
}
PathOrPattern::Pattern(p) => {
let was_negated = p.is_negated();
let p = if was_negated {
p.as_negated()
} else {
p.clone()
};
let text = p
.as_str()
.strip_prefix(&format!(
"{}/",
root.to_string_lossy().replace('\\', "/")
))
.unwrap_or_else(|| panic!("pattern: {:?}, root: {:?}", p, root))
.to_string();
Some(if was_negated {
format!("!{}", text)
} else {
text
})
}
}
}
Self {
base: path_to_string(root, &file_patterns.base),
include: file_patterns.include.as_ref().map(|p| {
p.0
.iter()
.filter_map(|p| path_or_pattern_to_string(root, p))
.collect()
}),
exclude: file_patterns
.exclude
.0
.iter()
.filter_map(|p| path_or_pattern_to_string(root, p))
.collect(),
}
}
pub fn from_split(
root: &Path,
patterns_by_base: &[FilePatterns],
) -> Vec<ComparableFilePatterns> {
patterns_by_base
.iter()
.map(|file_patterns| ComparableFilePatterns::new(root, file_patterns))
.collect()
}
}
#[test]
fn file_patterns_split_by_base_dir() {
let temp_dir = TempDir::new().unwrap();
let patterns = FilePatterns {
base: temp_dir.path().to_path_buf(),
include: Some(PathOrPatternSet::new(vec![
PathOrPattern::Pattern(
GlobPattern::new(&format!(
"{}/inner/**/*.ts",
temp_dir.path().to_string_lossy().replace('\\', "/")
))
.unwrap(),
),
PathOrPattern::Pattern(
GlobPattern::new(&format!(
"{}/inner/sub/deeper/**/*.js",
temp_dir.path().to_string_lossy().replace('\\', "/")
))
.unwrap(),
),
PathOrPattern::Pattern(
GlobPattern::new(&format!(
"{}/other/**/*.js",
temp_dir.path().to_string_lossy().replace('\\', "/")
))
.unwrap(),
),
PathOrPattern::from_relative(temp_dir.path(), "!./other/**/*.ts")
.unwrap(),
PathOrPattern::from_relative(temp_dir.path(), "sub/file.ts").unwrap(),
])),
exclude: PathOrPatternSet::new(vec![
PathOrPattern::Pattern(
GlobPattern::new(&format!(
"{}/inner/other/**/*.ts",
temp_dir.path().to_string_lossy().replace('\\', "/")
))
.unwrap(),
),
PathOrPattern::Path(
temp_dir
.path()
.join("inner/sub/deeper/file.js")
.to_path_buf(),
),
]),
};
let split = ComparableFilePatterns::from_split(
temp_dir.path(),
&patterns.split_by_base(),
);
assert_eq!(
split,
vec![
ComparableFilePatterns {
base: "inner/sub/deeper".to_string(),
include: Some(vec![
"inner/sub/deeper/**/*.js".to_string(),
"inner/**/*.ts".to_string(),
]),
exclude: vec!["inner/sub/deeper/file.js".to_string()],
},
ComparableFilePatterns {
base: "sub/file.ts".to_string(),
include: Some(vec!["sub/file.ts".to_string()]),
exclude: vec![],
},
ComparableFilePatterns {
base: "inner".to_string(),
include: Some(vec!["inner/**/*.ts".to_string()]),
exclude: vec![
"inner/other/**/*.ts".to_string(),
"inner/sub/deeper/file.js".to_string(),
],
},
ComparableFilePatterns {
base: "other".to_string(),
include: Some(vec!["other/**/*.js".to_string()]),
exclude: vec!["other/**/*.ts".to_string()],
}
]
);
}
#[test]
fn file_patterns_split_by_base_dir_unexcluded() {
let temp_dir = TempDir::new().unwrap();
let patterns = FilePatterns {
base: temp_dir.path().to_path_buf(),
include: None,
exclude: PathOrPatternSet::new(vec![
PathOrPattern::from_relative(temp_dir.path(), "./ignored").unwrap(),
PathOrPattern::from_relative(temp_dir.path(), "!./ignored/unexcluded")
.unwrap(),
PathOrPattern::from_relative(temp_dir.path(), "!./ignored/test/**")
.unwrap(),
]),
};
let split = ComparableFilePatterns::from_split(
temp_dir.path(),
&patterns.split_by_base(),
);
assert_eq!(
split,
vec![
ComparableFilePatterns {
base: "ignored/unexcluded".to_string(),
include: None,
exclude: vec![
// still keeps the higher level exclude for cases
// where these two are accidentally swapped
"ignored".to_string(),
// keep the glob for the current dir because it
// could be used to override the .gitignore
"!ignored/unexcluded".to_string(),
],
},
ComparableFilePatterns {
base: "ignored/test".to_string(),
include: None,
exclude: vec!["ignored".to_string(), "!ignored/test/**".to_string(),],
},
ComparableFilePatterns {
base: "".to_string(),
include: None,
exclude: vec![
"ignored".to_string(),
"!ignored/unexcluded".to_string(),
"!ignored/test/**".to_string(),
],
},
]
);
}
#[test]
fn file_patterns_split_by_base_dir_unexcluded_with_path_includes() {
let temp_dir = TempDir::new().unwrap();
let patterns = FilePatterns {
base: temp_dir.path().to_path_buf(),
include: Some(PathOrPatternSet::new(vec![
PathOrPattern::from_relative(temp_dir.path(), "./sub").unwrap(),
])),
exclude: PathOrPatternSet::new(vec![
PathOrPattern::from_relative(temp_dir.path(), "./sub/ignored").unwrap(),
PathOrPattern::from_relative(temp_dir.path(), "!./sub/ignored/test/**")
.unwrap(),
PathOrPattern::from_relative(temp_dir.path(), "./orphan").unwrap(),
PathOrPattern::from_relative(temp_dir.path(), "!./orphan/test/**")
.unwrap(),
]),
};
let split = ComparableFilePatterns::from_split(
temp_dir.path(),
&patterns.split_by_base(),
);
assert_eq!(
split,
vec![
ComparableFilePatterns {
base: "sub/ignored/test".to_string(),
include: None,
exclude: vec![
"sub/ignored".to_string(),
"!sub/ignored/test/**".to_string(),
],
},
ComparableFilePatterns {
base: "sub".to_string(),
include: Some(vec!["sub".to_string()]),
exclude: vec![
"sub/ignored".to_string(),
"!sub/ignored/test/**".to_string(),
],
},
]
);
}
#[test]
fn file_patterns_split_by_base_dir_unexcluded_with_glob_includes() {
let temp_dir = TempDir::new().unwrap();
let patterns = FilePatterns {
base: temp_dir.path().to_path_buf(),
include: Some(PathOrPatternSet::new(vec![
PathOrPattern::from_relative(temp_dir.path(), "./sub/**").unwrap(),
])),
exclude: PathOrPatternSet::new(vec![
PathOrPattern::from_relative(temp_dir.path(), "./sub/ignored").unwrap(),
PathOrPattern::from_relative(temp_dir.path(), "!./sub/ignored/test/**")
.unwrap(),
PathOrPattern::from_relative(temp_dir.path(), "!./orphan/test/**")
.unwrap(),
PathOrPattern::from_relative(temp_dir.path(), "!orphan/other").unwrap(),
]),
};
let split = ComparableFilePatterns::from_split(
temp_dir.path(),
&patterns.split_by_base(),
);
assert_eq!(
split,
vec![
ComparableFilePatterns {
base: "sub/ignored/test".to_string(),
include: Some(vec!["sub/**".to_string()]),
exclude: vec![
"sub/ignored".to_string(),
"!sub/ignored/test/**".to_string()
],
},
ComparableFilePatterns {
base: "sub".to_string(),
include: Some(vec!["sub/**".to_string()]),
exclude: vec![
"sub/ignored".to_string(),
"!sub/ignored/test/**".to_string(),
],
}
]
);
}
#[test]
fn file_patterns_split_by_base_dir_opposite_exclude() {
let temp_dir = TempDir::new().unwrap();
let patterns = FilePatterns {
base: temp_dir.path().to_path_buf(),
include: None,
// this will actually error before it gets here in integration,
// but it's best to ensure it's handled anyway
exclude: PathOrPatternSet::new(vec![
// this won't be unexcluded because it's lower priority than the entry below
PathOrPattern::from_relative(temp_dir.path(), "!./sub/ignored/test/")
.unwrap(),
// this is higher priority
PathOrPattern::from_relative(temp_dir.path(), "./sub/ignored").unwrap(),
]),
};
let split = ComparableFilePatterns::from_split(
temp_dir.path(),
&patterns.split_by_base(),
);
assert_eq!(
split,
vec![
ComparableFilePatterns {
base: "sub/ignored/test".to_string(),
include: None,
exclude: vec![
"!sub/ignored/test".to_string(),
"sub/ignored".to_string(),
],
},
ComparableFilePatterns {
base: "".to_string(),
include: None,
exclude: vec![
"!sub/ignored/test".to_string(),
"sub/ignored".to_string(),
],
},
]
);
}
#[test]
fn file_patterns_split_by_base_dir_exclude_unexcluded_and_glob() {
let temp_dir = TempDir::new().unwrap();
let patterns = FilePatterns {
base: temp_dir.path().to_path_buf(),
include: None,
exclude: PathOrPatternSet::new(vec![
PathOrPattern::from_relative(temp_dir.path(), "./sub/ignored").unwrap(),
PathOrPattern::from_relative(temp_dir.path(), "!./sub/ignored/test/")
.unwrap(),
PathOrPattern::from_relative(temp_dir.path(), "./sub/ignored/**/*.ts")
.unwrap(),
]),
};
let split = ComparableFilePatterns::from_split(
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | true |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/config/glob/collector.rs | libs/config/glob/collector.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::HashSet;
use std::collections::VecDeque;
use std::path::Path;
use std::path::PathBuf;
use deno_path_util::normalize_path;
use sys_traits::FsDirEntry;
use sys_traits::FsMetadata;
use sys_traits::FsMetadataValue;
use sys_traits::FsRead;
use sys_traits::FsReadDir;
use super::FilePatterns;
use crate::glob::FilePatternsMatch;
use crate::glob::PathKind;
use crate::glob::PathOrPattern;
use crate::glob::gitignore::DirGitIgnores;
use crate::glob::gitignore::GitIgnoreTree;
#[derive(Debug, Clone)]
pub struct WalkEntry<'a> {
pub path: &'a Path,
pub metadata: &'a dyn FsMetadataValue,
pub patterns: &'a FilePatterns,
}
/// Collects file paths that satisfy the given predicate, by recursively walking `files`.
/// If the walker visits a path that is listed in `ignore`, it skips descending into the directory.
pub struct FileCollector<TFilter: Fn(WalkEntry) -> bool> {
file_filter: TFilter,
ignore_git_folder: bool,
ignore_node_modules: bool,
vendor_folder: Option<PathBuf>,
use_gitignore: bool,
}
impl<TFilter: Fn(WalkEntry) -> bool> FileCollector<TFilter> {
pub fn new(file_filter: TFilter) -> Self {
Self {
file_filter,
ignore_git_folder: false,
ignore_node_modules: false,
vendor_folder: None,
use_gitignore: false,
}
}
pub fn ignore_node_modules(mut self) -> Self {
self.ignore_node_modules = true;
self
}
pub fn set_vendor_folder(mut self, vendor_folder: Option<PathBuf>) -> Self {
self.vendor_folder = vendor_folder;
self
}
pub fn ignore_git_folder(mut self) -> Self {
self.ignore_git_folder = true;
self
}
pub fn use_gitignore(mut self) -> Self {
self.use_gitignore = true;
self
}
pub fn collect_file_patterns<TSys: FsRead + FsMetadata + FsReadDir>(
&self,
sys: &TSys,
file_patterns: &FilePatterns,
) -> Vec<PathBuf> {
fn is_pattern_matched(
maybe_git_ignore: Option<&DirGitIgnores>,
path: &Path,
is_dir: bool,
file_patterns: &FilePatterns,
) -> bool {
let path_kind = match is_dir {
true => PathKind::Directory,
false => PathKind::File,
};
match file_patterns.matches_path_detail(path, path_kind) {
FilePatternsMatch::Passed => {
// check gitignore
let is_gitignored = maybe_git_ignore
.as_ref()
.map(|git_ignore| git_ignore.is_ignored(path, is_dir))
.unwrap_or(false);
!is_gitignored
}
FilePatternsMatch::PassedOptedOutExclude => true,
FilePatternsMatch::Excluded => false,
}
}
let mut maybe_git_ignores = if self.use_gitignore {
// Override explicitly specified include paths in the
// .gitignore file. This does not apply to globs because
// that is way too complicated to reason about.
let include_paths = file_patterns
.include
.as_ref()
.map(|include| {
include
.inner()
.iter()
.filter_map(|path_or_pattern| {
if let PathOrPattern::Path(p) = path_or_pattern {
Some(p.clone())
} else {
None
}
})
.collect::<Vec<_>>()
})
.unwrap_or_default();
Some(GitIgnoreTree::new(sys, include_paths))
} else {
None
};
let mut target_files = Vec::new();
let mut visited_paths: HashSet<PathBuf> = HashSet::default();
let file_patterns_by_base = file_patterns.split_by_base();
for file_patterns in file_patterns_by_base {
let specified_path = normalize_path(Cow::Borrowed(&file_patterns.base));
let mut pending_dirs = VecDeque::new();
let mut handle_entry =
|path: PathBuf,
metadata: &dyn FsMetadataValue,
pending_dirs: &mut VecDeque<PathBuf>| {
let maybe_gitignore =
maybe_git_ignores.as_mut().and_then(|git_ignores| {
if metadata.file_type().is_dir() {
git_ignores.get_resolved_git_ignore_for_dir(&path)
} else {
git_ignores.get_resolved_git_ignore_for_file(&path)
}
});
if !is_pattern_matched(
maybe_gitignore.as_deref(),
&path,
metadata.file_type().is_dir(),
&file_patterns,
) {
// ignore
} else if metadata.file_type().is_dir() {
// allow the user to opt out of ignoring by explicitly specifying the dir
let opt_out_ignore = specified_path == path;
let should_ignore_dir =
!opt_out_ignore && self.is_ignored_dir(&path);
if !should_ignore_dir && visited_paths.insert(path.clone()) {
pending_dirs.push_back(path);
}
} else if (self.file_filter)(WalkEntry {
path: &path,
metadata,
patterns: &file_patterns,
}) && visited_paths.insert(path.clone())
{
target_files.push(path);
}
};
if let Ok(metadata) = sys.fs_metadata(&specified_path) {
handle_entry(
specified_path.to_path_buf(),
&metadata,
&mut pending_dirs,
);
}
// use an iterator in order to minimize the number of file system operations
while let Some(next_dir) = pending_dirs.pop_front() {
let Ok(entries) = sys.fs_read_dir(&next_dir) else {
continue;
};
for entry in entries {
let Ok(entry) = entry else {
continue;
};
let Ok(metadata) = entry.metadata() else {
continue;
};
handle_entry(entry.path().into_owned(), &metadata, &mut pending_dirs)
}
}
}
target_files
}
fn is_ignored_dir(&self, path: &Path) -> bool {
path
.file_name()
.map(|dir_name| {
let dir_name = dir_name.to_string_lossy().to_lowercase();
match dir_name.as_str() {
"node_modules" => self.ignore_node_modules,
".git" => self.ignore_git_folder,
_ => false,
}
})
.unwrap_or(false)
|| self.is_vendor_folder(path)
}
fn is_vendor_folder(&self, path: &Path) -> bool {
self
.vendor_folder
.as_ref()
.map(|vendor_folder| path == *vendor_folder)
.unwrap_or(false)
}
}
#[cfg(test)]
mod test {
use std::path::PathBuf;
use sys_traits::impls::RealSys;
use tempfile::TempDir;
use super::*;
use crate::glob::FilePatterns;
use crate::glob::PathOrPattern;
use crate::glob::PathOrPatternSet;
#[allow(clippy::disallowed_methods)] // allow fs methods
#[test]
fn test_collect_files() {
fn create_files(dir_path: &PathBuf, files: &[&str]) {
std::fs::create_dir_all(dir_path).unwrap();
for f in files {
std::fs::write(dir_path.join(f), "").unwrap();
}
}
// dir.ts
// ├── a.ts
// ├── b.js
// ├── child
// | ├── git
// | | └── git.js
// | ├── node_modules
// | | └── node_modules.js
// | ├── vendor
// | | └── vendor.js
// │ ├── e.mjs
// │ ├── f.mjsx
// │ ├── .foo.TS
// │ └── README.md
// ├── c.tsx
// ├── d.jsx
// └── ignore
// ├── g.d.ts
// └── .gitignore
let t = TempDir::new().unwrap();
let root_dir_path = t.path().join("dir.ts");
let root_dir_files = ["a.ts", "b.js", "c.tsx", "d.jsx"];
create_files(&root_dir_path, &root_dir_files);
let child_dir_path = root_dir_path.join("child");
let child_dir_files = ["e.mjs", "f.mjsx", ".foo.TS", "README.md"];
create_files(&child_dir_path, &child_dir_files);
std::fs::create_dir_all(t.path().join("dir.ts/child/node_modules"))
.unwrap();
std::fs::write(
t.path().join("dir.ts/child/node_modules/node_modules.js"),
"",
)
.unwrap();
std::fs::create_dir_all(t.path().join("dir.ts/child/.git")).unwrap();
std::fs::write(t.path().join("dir.ts/child/.git/git.js"), "").unwrap();
std::fs::create_dir_all(t.path().join("dir.ts/child/vendor")).unwrap();
std::fs::write(t.path().join("dir.ts/child/vendor/vendor.js"), "").unwrap();
let ignore_dir_path = root_dir_path.join("ignore");
let ignore_dir_files = ["g.d.ts", ".gitignore"];
create_files(&ignore_dir_path, &ignore_dir_files);
let file_patterns = FilePatterns {
base: root_dir_path.to_path_buf(),
include: None,
exclude: PathOrPatternSet::new(vec![PathOrPattern::Path(
ignore_dir_path.to_path_buf(),
)]),
};
let file_collector = FileCollector::new(|e| {
// exclude dotfiles
e.path
.file_name()
.and_then(|f| f.to_str())
.map(|f| !f.starts_with('.'))
.unwrap_or(false)
});
let result = file_collector.collect_file_patterns(&RealSys, &file_patterns);
let expected = [
"README.md",
"a.ts",
"b.js",
"c.tsx",
"d.jsx",
"e.mjs",
"f.mjsx",
"git.js",
"node_modules.js",
"vendor.js",
];
let mut file_names = result
.into_iter()
.map(|r| r.file_name().unwrap().to_string_lossy().into_owned())
.collect::<Vec<_>>();
file_names.sort();
assert_eq!(file_names, expected);
// test ignoring the .git and node_modules folder
let file_collector = file_collector
.ignore_git_folder()
.ignore_node_modules()
.set_vendor_folder(Some(child_dir_path.join("vendor").to_path_buf()));
let result = file_collector.collect_file_patterns(&RealSys, &file_patterns);
let expected = [
"README.md",
"a.ts",
"b.js",
"c.tsx",
"d.jsx",
"e.mjs",
"f.mjsx",
];
let mut file_names = result
.into_iter()
.map(|r| r.file_name().unwrap().to_string_lossy().into_owned())
.collect::<Vec<_>>();
file_names.sort();
assert_eq!(file_names, expected);
// test opting out of ignoring by specifying the dir
let file_patterns = FilePatterns {
base: root_dir_path.to_path_buf(),
include: Some(PathOrPatternSet::new(vec![
PathOrPattern::Path(root_dir_path.to_path_buf()),
PathOrPattern::Path(
root_dir_path.to_path_buf().join("child/node_modules/"),
),
])),
exclude: PathOrPatternSet::new(vec![PathOrPattern::Path(
ignore_dir_path.to_path_buf(),
)]),
};
let result = file_collector.collect_file_patterns(&RealSys, &file_patterns);
let expected = [
"README.md",
"a.ts",
"b.js",
"c.tsx",
"d.jsx",
"e.mjs",
"f.mjsx",
"node_modules.js",
];
let mut file_names = result
.into_iter()
.map(|r| r.file_name().unwrap().to_string_lossy().into_owned())
.collect::<Vec<_>>();
file_names.sort();
assert_eq!(file_names, expected);
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/npm_cache/fs_util.rs | libs/npm_cache/fs_util.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
use std::time::Duration;
use sys_traits::FsCreateDirAll;
use sys_traits::FsDirEntry;
use sys_traits::FsHardLink;
use sys_traits::FsReadDir;
use sys_traits::FsRemoveFile;
use sys_traits::ThreadSleep;
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum HardLinkDirRecursiveError {
#[class(inherit)]
#[error(transparent)]
Io(#[from] std::io::Error),
#[class(inherit)]
#[error("Creating {path}")]
Creating {
path: PathBuf,
#[source]
#[inherit]
source: std::io::Error,
},
#[class(inherit)]
#[error("Creating {path}")]
Reading {
path: PathBuf,
#[source]
#[inherit]
source: std::io::Error,
},
#[class(inherit)]
#[error("Dir {from} to {to}")]
Dir {
from: PathBuf,
to: PathBuf,
#[source]
#[inherit]
source: Box<Self>,
},
#[class(inherit)]
#[error(transparent)]
HardLinkFile(#[from] HardLinkFileError),
}
#[sys_traits::auto_impl]
pub trait HardLinkDirRecursiveSys:
HardLinkFileSys + FsCreateDirAll + FsReadDir
{
}
/// Hardlinks the files in one directory to another directory.
///
/// Note: Does not handle symlinks.
pub fn hard_link_dir_recursive<TSys: HardLinkDirRecursiveSys>(
sys: &TSys,
from: &Path,
to: &Path,
) -> Result<(), HardLinkDirRecursiveError> {
sys.fs_create_dir_all(to).map_err(|source| {
HardLinkDirRecursiveError::Creating {
path: to.to_path_buf(),
source,
}
})?;
let read_dir = sys.fs_read_dir(from).map_err(|source| {
HardLinkDirRecursiveError::Reading {
path: from.to_path_buf(),
source,
}
})?;
for entry in read_dir {
let entry = entry?;
let file_type = entry.file_type()?;
let new_from = from.join(entry.file_name());
let new_to = to.join(entry.file_name());
if file_type.is_dir() {
hard_link_dir_recursive(sys, &new_from, &new_to).map_err(|source| {
HardLinkDirRecursiveError::Dir {
from: new_from.to_path_buf(),
to: new_to.to_path_buf(),
source: Box::new(source),
}
})?;
} else if file_type.is_file() {
hard_link_file(sys, &new_from, &new_to)?;
}
}
Ok(())
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum HardLinkFileError {
#[class(inherit)]
#[error("Removing file to hard link {from} to {to}")]
RemoveFileToHardLink {
from: PathBuf,
to: PathBuf,
#[source]
#[inherit]
source: std::io::Error,
},
#[class(inherit)]
#[error("Hard linking {from} to {to}")]
HardLinking {
from: PathBuf,
to: PathBuf,
#[source]
#[inherit]
source: std::io::Error,
},
}
#[sys_traits::auto_impl]
pub trait HardLinkFileSys: FsHardLink + FsRemoveFile + ThreadSleep {}
/// Hardlinks a file from one location to another.
pub fn hard_link_file<TSys: HardLinkFileSys>(
sys: &TSys,
from: &Path,
to: &Path,
) -> Result<(), HardLinkFileError> {
// note: chance for race conditions here between attempting to create,
// then removing, then attempting to create. There doesn't seem to be
// a way to hard link with overwriting in Rust, but maybe there is some
// way with platform specific code. The workaround here is to handle
// scenarios where something else might create or remove files.
if let Err(err) = sys.fs_hard_link(from, to) {
if err.kind() == ErrorKind::AlreadyExists {
if let Err(err) = sys.fs_remove_file(to) {
if err.kind() == ErrorKind::NotFound {
// Assume another process/thread created this hard link to the file we are wanting
// to remove then sleep a little bit to let the other process/thread move ahead
// faster to reduce contention.
sys.thread_sleep(Duration::from_millis(10));
} else {
return Err(HardLinkFileError::RemoveFileToHardLink {
from: from.to_path_buf(),
to: to.to_path_buf(),
source: err,
});
}
}
// Always attempt to recreate the hardlink. In contention scenarios, the other process
// might have been killed or exited after removing the file, but before creating the hardlink
if let Err(err) = sys.fs_hard_link(from, to) {
// Assume another process/thread created this hard link to the file we are wanting
// to now create then sleep a little bit to let the other process/thread move ahead
// faster to reduce contention.
if err.kind() == ErrorKind::AlreadyExists {
sys.thread_sleep(Duration::from_millis(10));
} else {
return Err(HardLinkFileError::HardLinking {
from: from.to_path_buf(),
to: to.to_path_buf(),
source: err,
});
}
}
} else {
return Err(HardLinkFileError::HardLinking {
from: from.to_path_buf(),
to: to.to_path_buf(),
source: err,
});
}
}
Ok(())
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/npm_cache/lib.rs | libs/npm_cache/lib.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashSet;
use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_cache_dir::file_fetcher::CacheSetting;
use deno_cache_dir::npm::NpmCacheDir;
use deno_error::JsErrorBox;
use deno_npm::NpmPackageCacheFolderId;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_path_util::fs::atomic_write_file_with_retries;
use deno_semver::StackString;
use deno_semver::Version;
use deno_semver::package::PackageNv;
use parking_lot::Mutex;
use sys_traits::FsCanonicalize;
use sys_traits::FsCreateDirAll;
use sys_traits::FsHardLink;
use sys_traits::FsMetadata;
use sys_traits::FsOpen;
use sys_traits::FsRead;
use sys_traits::FsReadDir;
use sys_traits::FsRemoveDirAll;
use sys_traits::FsRemoveFile;
use sys_traits::FsRename;
use sys_traits::SystemRandom;
use sys_traits::ThreadSleep;
use url::Url;
mod fs_util;
mod registry_info;
mod remote;
mod rt;
mod tarball;
mod tarball_extract;
pub use fs_util::HardLinkDirRecursiveError;
pub use fs_util::HardLinkFileError;
pub use fs_util::hard_link_dir_recursive;
pub use fs_util::hard_link_file;
pub use registry_info::RegistryInfoProvider;
pub use registry_info::SerializedCachedPackageInfo;
pub use registry_info::get_package_url;
pub use remote::maybe_auth_header_value_for_npm_registry;
pub use tarball::EnsurePackageError;
pub use tarball::TarballCache;
pub use tarball::TarballCacheReporter;
use self::rt::spawn_blocking;
#[derive(Debug, deno_error::JsError)]
#[class(generic)]
pub struct DownloadError {
pub status_code: Option<u16>,
pub error: JsErrorBox,
}
impl std::error::Error for DownloadError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
self.error.source()
}
}
impl std::fmt::Display for DownloadError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
self.error.fmt(f)
}
}
pub enum NpmCacheHttpClientResponse {
NotFound,
NotModified,
Bytes(NpmCacheHttpClientBytesResponse),
}
pub struct NpmCacheHttpClientBytesResponse {
pub bytes: Vec<u8>,
pub etag: Option<String>,
}
#[async_trait::async_trait(?Send)]
pub trait NpmCacheHttpClient: std::fmt::Debug + Send + Sync + 'static {
async fn download_with_retries_on_any_tokio_runtime(
&self,
url: Url,
maybe_auth: Option<String>,
maybe_etag: Option<String>,
) -> Result<NpmCacheHttpClientResponse, DownloadError>;
}
/// Indicates how cached source files should be handled.
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum NpmCacheSetting {
/// Only the cached files should be used. Any files not in the cache will
/// error. This is the equivalent of `--cached-only` in the CLI.
Only,
/// No cached source files should be used, and all files should be reloaded.
/// This is the equivalent of `--reload` in the CLI.
ReloadAll,
/// Only some cached resources should be used. This is the equivalent of
/// `--reload=npm:chalk`
ReloadSome { npm_package_names: Vec<String> },
/// The cached source files should be used for local modules. This is the
/// default behavior of the CLI.
Use,
}
impl NpmCacheSetting {
pub fn from_cache_setting(cache_setting: &CacheSetting) -> NpmCacheSetting {
match cache_setting {
CacheSetting::Only => NpmCacheSetting::Only,
CacheSetting::ReloadAll => NpmCacheSetting::ReloadAll,
CacheSetting::ReloadSome(values) => {
if values.iter().any(|v| v == "npm:") {
NpmCacheSetting::ReloadAll
} else {
NpmCacheSetting::ReloadSome {
npm_package_names: values
.iter()
.filter_map(|v| v.strip_prefix("npm:"))
.map(|n| n.to_string())
.collect(),
}
}
}
CacheSetting::RespectHeaders => panic!("not supported"),
CacheSetting::Use => NpmCacheSetting::Use,
}
}
pub fn should_use_for_npm_package(&self, package_name: &str) -> bool {
match self {
NpmCacheSetting::ReloadAll => false,
NpmCacheSetting::ReloadSome { npm_package_names } => {
!npm_package_names.iter().any(|n| n == package_name)
}
_ => true,
}
}
}
#[sys_traits::auto_impl]
pub trait NpmCacheSys:
FsCanonicalize
+ FsCreateDirAll
+ FsHardLink
+ FsMetadata
+ FsOpen
+ FsRead
+ FsReadDir
+ FsRemoveDirAll
+ FsRemoveFile
+ FsRename
+ ThreadSleep
+ SystemRandom
+ Send
+ Sync
+ Clone
+ std::fmt::Debug
+ 'static
{
}
/// Stores a single copy of npm packages in a cache.
#[derive(Debug)]
pub struct NpmCache<TSys: NpmCacheSys> {
cache_dir: Arc<NpmCacheDir>,
sys: TSys,
cache_setting: NpmCacheSetting,
npmrc: Arc<ResolvedNpmRc>,
previously_reloaded_packages: Mutex<HashSet<PackageNv>>,
}
impl<TSys: NpmCacheSys> NpmCache<TSys> {
pub fn new(
cache_dir: Arc<NpmCacheDir>,
sys: TSys,
cache_setting: NpmCacheSetting,
npmrc: Arc<ResolvedNpmRc>,
) -> Self {
Self {
cache_dir,
sys,
cache_setting,
npmrc,
previously_reloaded_packages: Default::default(),
}
}
pub fn cache_setting(&self) -> &NpmCacheSetting {
&self.cache_setting
}
pub fn root_dir_path(&self) -> &Path {
self.cache_dir.root_dir()
}
pub fn root_dir_url(&self) -> &Url {
self.cache_dir.root_dir_url()
}
/// Checks if the cache should be used for the provided name and version.
/// NOTE: Subsequent calls for the same package will always return `true`
/// to ensure a package is only downloaded once per run of the CLI. This
/// prevents downloads from re-occurring when someone has `--reload` and
/// and imports a dynamic import that imports the same package again for example.
pub fn should_use_cache_for_package(&self, package: &PackageNv) -> bool {
self.cache_setting.should_use_for_npm_package(&package.name)
|| !self
.previously_reloaded_packages
.lock()
.insert(package.clone())
}
/// Ensures a copy of the package exists in the global cache.
///
/// This assumes that the original package folder being hard linked
/// from exists before this is called.
pub fn ensure_copy_package(
&self,
folder_id: &NpmPackageCacheFolderId,
) -> Result<(), WithFolderSyncLockError> {
let registry_url = self.npmrc.get_registry_url(&folder_id.nv.name);
assert_ne!(folder_id.copy_index, 0);
let package_folder = self.cache_dir.package_folder_for_id(
&folder_id.nv.name,
&folder_id.nv.version.to_string(),
folder_id.copy_index,
registry_url,
);
if self.sys.fs_exists_no_err(&package_folder)
// if this file exists, then the package didn't successfully initialize
// the first time, or another process is currently extracting the zip file
&& !self.sys.fs_exists_no_err(package_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME))
&& self.cache_setting.should_use_for_npm_package(&folder_id.nv.name)
{
return Ok(());
}
let original_package_folder = self.cache_dir.package_folder_for_id(
&folder_id.nv.name,
&folder_id.nv.version.to_string(),
0, // original copy index
registry_url,
);
// it seems Windows does an "AccessDenied" error when moving a
// directory with hard links, so that's why this solution is done
with_folder_sync_lock(&self.sys, &folder_id.nv, &package_folder, || {
hard_link_dir_recursive(
&self.sys,
&original_package_folder,
&package_folder,
)
.map_err(JsErrorBox::from_err)
})?;
Ok(())
}
pub fn package_folder_for_id(&self, id: &NpmPackageCacheFolderId) -> PathBuf {
let registry_url = self.npmrc.get_registry_url(&id.nv.name);
self.cache_dir.package_folder_for_id(
&id.nv.name,
&id.nv.version.to_string(),
id.copy_index,
registry_url,
)
}
pub fn package_folder_for_nv(&self, package: &PackageNv) -> PathBuf {
let registry_url = self.npmrc.get_registry_url(&package.name);
self.package_folder_for_nv_and_url(package, registry_url)
}
pub fn package_folder_for_nv_and_url(
&self,
package: &PackageNv,
registry_url: &Url,
) -> PathBuf {
self.cache_dir.package_folder_for_id(
&package.name,
&package.version.to_string(),
0, // original copy_index
registry_url,
)
}
pub fn package_name_folder(&self, name: &str) -> PathBuf {
let registry_url = self.npmrc.get_registry_url(name);
self.cache_dir.package_name_folder(name, registry_url)
}
pub fn resolve_package_folder_id_from_specifier(
&self,
specifier: &Url,
) -> Option<NpmPackageCacheFolderId> {
self
.cache_dir
.resolve_package_folder_id_from_specifier(specifier)
.and_then(|cache_id| {
Some(NpmPackageCacheFolderId {
nv: PackageNv {
name: StackString::from_string(cache_id.name),
version: Version::parse_from_npm(&cache_id.version).ok()?,
},
copy_index: cache_id.copy_index,
})
})
}
pub async fn load_package_info(
&self,
name: &str,
) -> Result<Option<SerializedCachedPackageInfo>, serde_json::Error> {
let file_cache_path = self.get_registry_package_info_file_cache_path(name);
let file_bytes = match self.sys.fs_read(&file_cache_path) {
Ok(file_text) => file_text,
Err(err) if err.kind() == ErrorKind::NotFound => return Ok(None),
Err(err) => return Err(serde_json::Error::io(err)),
};
spawn_blocking(move || serde_json::from_slice(&file_bytes))
.await
.unwrap()
}
pub fn save_package_info(
&self,
name: &str,
package_info: &SerializedCachedPackageInfo,
) -> Result<(), JsErrorBox> {
let file_cache_path = self.get_registry_package_info_file_cache_path(name);
let file_text =
serde_json::to_string(&package_info).map_err(JsErrorBox::from_err)?;
atomic_write_file_with_retries(
&self.sys,
&file_cache_path,
file_text.as_bytes(),
0o644,
)
.map_err(JsErrorBox::from_err)?;
Ok(())
}
fn get_registry_package_info_file_cache_path(&self, name: &str) -> PathBuf {
let name_folder_path = self.package_name_folder(name);
name_folder_path.join("registry.json")
}
}
const NPM_PACKAGE_SYNC_LOCK_FILENAME: &str = ".deno_sync_lock";
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum WithFolderSyncLockError {
#[class(inherit)]
#[error("Error creating '{path}'")]
CreateDir {
path: PathBuf,
#[source]
#[inherit]
source: std::io::Error,
},
#[class(inherit)]
#[error(
"Error creating package sync lock file at '{path}'. Maybe try manually deleting this folder."
)]
CreateLockFile {
path: PathBuf,
#[source]
#[inherit]
source: std::io::Error,
},
#[class(inherit)]
#[error(transparent)]
Action(#[from] JsErrorBox),
#[class(generic)]
#[error(
"Failed setting up package cache directory for {package}, then failed cleaning it up.\n\nOriginal error:\n\n{error}\n\nRemove error:\n\n{remove_error}\n\nPlease manually delete this folder or you will run into issues using this package in the future:\n\n{output_folder}"
)]
SetUpPackageCacheDir {
package: Box<PackageNv>,
error: Box<WithFolderSyncLockError>,
remove_error: std::io::Error,
output_folder: PathBuf,
},
}
fn with_folder_sync_lock(
sys: &(impl FsCreateDirAll + FsOpen + FsRemoveDirAll + FsRemoveFile),
package: &PackageNv,
output_folder: &Path,
action: impl FnOnce() -> Result<(), JsErrorBox>,
) -> Result<(), WithFolderSyncLockError> {
fn inner(
sys: &(impl FsCreateDirAll + FsOpen + FsRemoveFile),
output_folder: &Path,
action: impl FnOnce() -> Result<(), JsErrorBox>,
) -> Result<(), WithFolderSyncLockError> {
sys.fs_create_dir_all(output_folder).map_err(|source| {
WithFolderSyncLockError::CreateDir {
path: output_folder.to_path_buf(),
source,
}
})?;
// This sync lock file is a way to ensure that partially created
// npm package directories aren't considered valid. This could maybe
// be a bit smarter in the future to not bother extracting here
// if another process has taken the lock in the past X seconds and
// wait for the other process to finish (it could try to create the
// file with `create_new(true)` then if it exists, check the metadata
// then wait until the other process finishes with a timeout), but
// for now this is good enough.
let sync_lock_path = output_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME);
let mut open_options = sys_traits::OpenOptions::new();
open_options.write = true;
open_options.create = true;
open_options.truncate = false;
match sys.fs_open(&sync_lock_path, &open_options) {
Ok(_) => {
action()?;
// extraction succeeded, so only now delete this file
let _ignore = sys.fs_remove_file(&sync_lock_path);
Ok(())
}
Err(err) => Err(WithFolderSyncLockError::CreateLockFile {
path: output_folder.to_path_buf(),
source: err,
}),
}
}
match inner(sys, output_folder, action) {
Ok(()) => Ok(()),
Err(err) => {
if let Err(remove_err) = sys.fs_remove_dir_all(output_folder)
&& remove_err.kind() != std::io::ErrorKind::NotFound
{
return Err(WithFolderSyncLockError::SetUpPackageCacheDir {
package: Box::new(package.clone()),
error: Box::new(err),
remove_error: remove_err,
output_folder: output_folder.to_path_buf(),
});
}
Err(err)
}
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/npm_cache/remote.rs | libs/npm_cache/remote.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use base64::Engine;
use base64::prelude::BASE64_STANDARD;
use deno_npm::npm_rc::RegistryConfig;
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum AuthHeaderForNpmRegistryError {
#[class(type)]
#[error("Both the username and password must be provided for basic auth")]
Both,
#[class(type)]
#[error("The password in npmrc is an invalid base64 string: {0}")]
Base64(base64::DecodeError),
}
// TODO(bartlomieju): support more auth methods besides token and basic auth
pub fn maybe_auth_header_value_for_npm_registry(
registry_config: &RegistryConfig,
) -> Result<Option<String>, AuthHeaderForNpmRegistryError> {
if let Some(token) = registry_config.auth_token.as_ref() {
return Ok(Some(format!("Bearer {}", token)));
}
if let Some(auth) = registry_config.auth.as_ref() {
return Ok(Some(format!("Basic {}", auth)));
}
let (username, password) = (
registry_config.username.as_ref(),
registry_config.password.as_ref(),
);
if (username.is_some() && password.is_none())
|| (username.is_none() && password.is_some())
{
return Err(AuthHeaderForNpmRegistryError::Both);
}
if let Some(username) = username
&& let Some(password) = password
{
// The npm client does some double encoding when generating the
// bearer token value, see
// https://github.com/npm/cli/blob/780afc50e3a345feb1871a28e33fa48235bc3bd5/workspaces/config/lib/index.js#L846-L851
let pw_base64 = BASE64_STANDARD
.decode(password)
.map_err(AuthHeaderForNpmRegistryError::Base64)?;
let bearer = BASE64_STANDARD.encode(format!(
"{}:{}",
username,
String::from_utf8_lossy(&pw_base64)
));
return Ok(Some(format!("Basic {}", bearer)));
}
Ok(None)
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/npm_cache/registry_info.rs | libs/npm_cache/registry_info.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap;
use std::collections::HashSet;
use std::sync::Arc;
use async_trait::async_trait;
use deno_error::JsErrorBox;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageInfo;
use deno_npm::registry::NpmRegistryApi;
use deno_npm::registry::NpmRegistryPackageInfoLoadError;
use deno_unsync::sync::AtomicFlag;
use futures::FutureExt;
use futures::future::LocalBoxFuture;
use parking_lot::Mutex;
use serde::Deserialize;
use serde::Serialize;
use url::Url;
use crate::NpmCache;
use crate::NpmCacheHttpClient;
use crate::NpmCacheHttpClientResponse;
use crate::NpmCacheSetting;
use crate::NpmCacheSys;
use crate::remote::maybe_auth_header_value_for_npm_registry;
use crate::rt::MultiRuntimeAsyncValueCreator;
use crate::rt::spawn_blocking;
type LoadResult = Result<FutureResult, Arc<JsErrorBox>>;
type LoadFuture = LocalBoxFuture<'static, LoadResult>;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct SerializedCachedPackageInfo {
#[serde(flatten)]
pub info: NpmPackageInfo,
/// Custom property that includes the etag.
#[serde(
default,
skip_serializing_if = "Option::is_none",
rename = "_deno.etag"
)]
pub etag: Option<String>,
}
#[derive(Debug, Clone)]
enum FutureResult {
PackageNotExists,
SavedFsCache(Arc<NpmPackageInfo>),
ErroredFsCache(Arc<NpmPackageInfo>),
}
#[derive(Debug, Clone)]
enum MemoryCacheItem {
/// The cache item hasn't loaded yet.
Pending(Arc<MultiRuntimeAsyncValueCreator<LoadResult>>),
/// The item has loaded in the past and was stored in the file system cache.
/// There is no reason to request this package from the npm registry again
/// for the duration of execution.
FsCached(Arc<NpmPackageInfo>),
/// An item is memory cached when it fails saving to the file system cache
/// or the package does not exist.
MemoryCached(Result<Option<Arc<NpmPackageInfo>>, Arc<JsErrorBox>>),
}
#[derive(Debug, Default)]
struct MemoryCache {
clear_id: usize,
items: HashMap<String, MemoryCacheItem>,
}
impl MemoryCache {
#[inline(always)]
pub fn clear(&mut self) {
self.clear_id += 1;
// if the item couldn't be saved to the fs cache, then we want to continue to hold it in memory
// to avoid re-downloading it from the registry
self
.items
.retain(|_, item| matches!(item, MemoryCacheItem::MemoryCached(Ok(_))));
}
#[inline(always)]
pub fn clear_all(&mut self) {
self.clear_id += 1;
self.items.clear();
}
#[inline(always)]
pub fn get(&self, key: &str) -> Option<&MemoryCacheItem> {
self.items.get(key)
}
#[inline(always)]
pub fn insert(&mut self, key: String, value: MemoryCacheItem) {
self.items.insert(key, value);
}
#[inline(always)]
pub fn try_insert(
&mut self,
clear_id: usize,
key: &str,
value: MemoryCacheItem,
) -> bool {
if clear_id != self.clear_id {
return false;
}
// if the clear_id is the same then the item should exist
debug_assert!(self.items.contains_key(key));
if let Some(item) = self.items.get_mut(key) {
*item = value;
}
true
}
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
#[class(inherit)]
#[error("Failed loading {url} for package \"{name}\"")]
pub struct LoadPackageInfoError {
url: Url,
name: String,
#[inherit]
#[source]
inner: LoadPackageInfoInnerError,
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
#[class(inherit)]
#[error("{0}")]
pub struct LoadPackageInfoInnerError(pub Arc<JsErrorBox>);
#[derive(Debug)]
struct RegistryInfoProviderInner<
THttpClient: NpmCacheHttpClient,
TSys: NpmCacheSys,
> {
cache: Arc<NpmCache<TSys>>,
http_client: Arc<THttpClient>,
npmrc: Arc<ResolvedNpmRc>,
force_reload_flag: AtomicFlag,
memory_cache: Mutex<MemoryCache>,
previously_loaded_packages: Mutex<HashSet<String>>,
}
impl<THttpClient: NpmCacheHttpClient, TSys: NpmCacheSys>
RegistryInfoProviderInner<THttpClient, TSys>
{
async fn maybe_package_info(
self: &Arc<Self>,
name: &str,
) -> Result<Option<Arc<NpmPackageInfo>>, LoadPackageInfoError> {
self
.load_package_info(name)
.await
.map_err(|err| LoadPackageInfoError {
url: get_package_url(&self.npmrc, name),
name: name.to_string(),
inner: err,
})
}
async fn load_package_info(
self: &Arc<Self>,
name: &str,
) -> Result<Option<Arc<NpmPackageInfo>>, LoadPackageInfoInnerError> {
let (value_creator, clear_id) = {
let mut mem_cache = self.memory_cache.lock();
let cache_item = if let Some(cache_item) = mem_cache.get(name) {
cache_item.clone()
} else {
let value_creator = MultiRuntimeAsyncValueCreator::new({
let downloader = self.clone();
let name = name.to_string();
Box::new(move || downloader.create_load_future(&name))
});
let cache_item = MemoryCacheItem::Pending(Arc::new(value_creator));
mem_cache.insert(name.to_string(), cache_item.clone());
cache_item
};
match cache_item {
MemoryCacheItem::FsCached(info) => return Ok(Some(info)),
MemoryCacheItem::MemoryCached(maybe_info) => {
return maybe_info.map_err(LoadPackageInfoInnerError);
}
MemoryCacheItem::Pending(value_creator) => {
(value_creator, mem_cache.clear_id)
}
}
};
match value_creator.get().await {
Ok(FutureResult::SavedFsCache(info)) => {
// return back the future and mark this package as having
// been saved in the cache for next time it's requested
self.memory_cache.lock().try_insert(
clear_id,
name,
MemoryCacheItem::FsCached(info.clone()),
);
Ok(Some(info))
}
Ok(FutureResult::ErroredFsCache(info)) => {
// since saving to the fs cache failed, keep the package information in memory
self.memory_cache.lock().try_insert(
clear_id,
name,
MemoryCacheItem::MemoryCached(Ok(Some(info.clone()))),
);
Ok(Some(info))
}
Ok(FutureResult::PackageNotExists) => {
self.memory_cache.lock().try_insert(
clear_id,
name,
MemoryCacheItem::MemoryCached(Ok(None)),
);
Ok(None)
}
Err(err) => {
let return_err = err.clone();
self.memory_cache.lock().try_insert(
clear_id,
name,
MemoryCacheItem::MemoryCached(Err(err)),
);
Err(LoadPackageInfoInnerError(return_err))
}
}
}
fn create_load_future(self: &Arc<Self>, name: &str) -> LoadFuture {
let downloader = self.clone();
let package_url = get_package_url(&self.npmrc, name);
let registry_config = self.npmrc.get_registry_config(name);
let maybe_auth_header_value =
match maybe_auth_header_value_for_npm_registry(registry_config) {
Ok(maybe_auth_header_value) => maybe_auth_header_value,
Err(err) => {
return std::future::ready(Err(Arc::new(JsErrorBox::from_err(err))))
.boxed_local();
}
};
let name = name.to_string();
async move {
let maybe_file_cached = if (downloader.cache.cache_setting().should_use_for_npm_package(&name) && !downloader.force_reload_flag.is_raised())
// if this has been previously reloaded, then try loading from the file system cache
|| downloader.previously_loaded_packages.lock().contains(&name)
{
// attempt to load from the file cache
match downloader.cache.load_package_info(&name).await.map_err(JsErrorBox::from_err)? { Some(cached_info) => {
return Ok(FutureResult::SavedFsCache(Arc::new(cached_info.info)));
} _ => {
None
}}
} else {
downloader.cache.load_package_info(&name).await.ok().flatten()
};
if *downloader.cache.cache_setting() == NpmCacheSetting::Only {
return Err(JsErrorBox::new(
"NotCached",
format!(
"npm package not found in cache: \"{name}\", --cached-only is specified."
)
));
}
downloader.previously_loaded_packages.lock().insert(name.to_string());
let (maybe_etag, maybe_cached_info) = match maybe_file_cached {
Some(cached_info) => (cached_info.etag, Some(cached_info.info)),
None => (None, None)
};
let response = downloader
.http_client
.download_with_retries_on_any_tokio_runtime(
package_url,
maybe_auth_header_value,
maybe_etag,
)
.await.map_err(JsErrorBox::from_err)?;
match response {
NpmCacheHttpClientResponse::NotModified => {
log::debug!("Respected etag for packument '{0}'", name); // used in the tests
Ok(FutureResult::SavedFsCache(Arc::new(maybe_cached_info.unwrap())))
},
NpmCacheHttpClientResponse::NotFound => Ok(FutureResult::PackageNotExists),
NpmCacheHttpClientResponse::Bytes(response) => {
let future_result = spawn_blocking(
move || -> Result<FutureResult, JsErrorBox> {
let mut package_info: SerializedCachedPackageInfo = serde_json::from_slice(&response.bytes).map_err(JsErrorBox::from_err)?;
package_info.etag = response.etag;
match downloader.cache.save_package_info(&name, &package_info) {
Ok(()) => {
Ok(FutureResult::SavedFsCache(Arc::new(package_info.info)))
}
Err(err) => {
log::debug!(
"Error saving package {} to cache: {:#}",
name,
err
);
Ok(FutureResult::ErroredFsCache(Arc::new(package_info.info)))
}
}
},
)
.await
.map_err(JsErrorBox::from_err)??;
Ok(future_result)
},
}
}
.map(|r| r.map_err(Arc::new))
.boxed_local()
}
fn mark_force_reload(&self) -> bool {
// never force reload the registry information if reloading
// is disabled or if we're already reloading
if matches!(
self.cache.cache_setting(),
NpmCacheSetting::Only | NpmCacheSetting::ReloadAll
) {
return false;
}
if self.force_reload_flag.raise() {
self.memory_cache.lock().clear_all();
true
} else {
false
}
}
}
/// Downloads packuments from the npm registry.
///
/// This is shared amongst all the workers.
#[derive(Debug)]
pub struct RegistryInfoProvider<
THttpClient: NpmCacheHttpClient,
TSys: NpmCacheSys,
>(Arc<RegistryInfoProviderInner<THttpClient, TSys>>);
impl<THttpClient: NpmCacheHttpClient, TSys: NpmCacheSys>
RegistryInfoProvider<THttpClient, TSys>
{
pub fn new(
cache: Arc<NpmCache<TSys>>,
http_client: Arc<THttpClient>,
npmrc: Arc<ResolvedNpmRc>,
) -> Self {
Self(Arc::new(RegistryInfoProviderInner {
cache,
http_client,
npmrc,
force_reload_flag: AtomicFlag::lowered(),
memory_cache: Default::default(),
previously_loaded_packages: Default::default(),
}))
}
/// Clears the internal memory cache.
pub fn clear_memory_cache(&self) {
self.0.memory_cache.lock().clear();
}
pub async fn maybe_package_info(
&self,
name: &str,
) -> Result<Option<Arc<NpmPackageInfo>>, LoadPackageInfoError> {
self.0.maybe_package_info(name).await
}
}
#[async_trait(?Send)]
impl<THttpClient: NpmCacheHttpClient, TSys: NpmCacheSys> NpmRegistryApi
for RegistryInfoProvider<THttpClient, TSys>
{
async fn package_info(
&self,
name: &str,
) -> Result<Arc<NpmPackageInfo>, NpmRegistryPackageInfoLoadError> {
match self.maybe_package_info(name).await {
Ok(Some(info)) => Ok(info),
Ok(None) => Err(NpmRegistryPackageInfoLoadError::PackageNotExists {
package_name: name.to_string(),
}),
Err(err) => Err(NpmRegistryPackageInfoLoadError::LoadError(Arc::new(
JsErrorBox::from_err(err),
))),
}
}
fn mark_force_reload(&self) -> bool {
self.0.mark_force_reload()
}
}
// todo(#27198): make this private and only use RegistryInfoProvider in the rest of
// the code
pub fn get_package_url(npmrc: &ResolvedNpmRc, name: &str) -> Url {
let registry_url = npmrc.get_registry_url(name);
// The '/' character in scoped package names "@scope/name" must be
// encoded for older third party registries. Newer registries and
// npm itself support both ways
// - encoded: https://registry.npmjs.org/@rollup%2fplugin-json
// - non-ecoded: https://registry.npmjs.org/@rollup/plugin-json
// To support as many third party registries as possible we'll
// always encode the '/' character.
// list of all characters used in npm packages:
// !, ', (, ), *, -, ., /, [0-9], @, [A-Za-z], _, ~
const ASCII_SET: percent_encoding::AsciiSet =
percent_encoding::NON_ALPHANUMERIC
.remove(b'!')
.remove(b'\'')
.remove(b'(')
.remove(b')')
.remove(b'*')
.remove(b'-')
.remove(b'.')
.remove(b'@')
.remove(b'_')
.remove(b'~');
let name = percent_encoding::utf8_percent_encode(name, &ASCII_SET);
registry_url
// Ensure that scoped package name percent encoding is lower cased
// to match npm.
.join(&name.to_string().replace("%2F", "%2f"))
.unwrap()
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/npm_cache/tarball.rs | libs/npm_cache/tarball.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap;
use std::sync::Arc;
use deno_error::JsErrorBox;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageVersionDistInfo;
use deno_semver::package::PackageNv;
use futures::FutureExt;
use futures::future::LocalBoxFuture;
use parking_lot::Mutex;
use url::Url;
use crate::NpmCache;
use crate::NpmCacheHttpClient;
use crate::NpmCacheHttpClientResponse;
use crate::NpmCacheSetting;
use crate::NpmCacheSys;
use crate::remote::maybe_auth_header_value_for_npm_registry;
use crate::rt::MultiRuntimeAsyncValueCreator;
use crate::rt::spawn_blocking;
use crate::tarball_extract::TarballExtractionMode;
use crate::tarball_extract::verify_and_extract_tarball;
type LoadResult = Result<(), Arc<JsErrorBox>>;
type LoadFuture = LocalBoxFuture<'static, LoadResult>;
#[derive(Debug, Clone)]
enum MemoryCacheItem {
/// The cache item hasn't finished yet.
Pending(Arc<MultiRuntimeAsyncValueCreator<LoadResult>>),
/// The result errored.
Errored(Arc<JsErrorBox>),
/// This package has already been cached.
Cached,
}
/// Coordinates caching of tarballs being loaded from
/// the npm registry.
///
/// This is shared amongst all the workers.
#[derive(Debug)]
pub struct TarballCache<THttpClient: NpmCacheHttpClient, TSys: NpmCacheSys> {
cache: Arc<NpmCache<TSys>>,
http_client: Arc<THttpClient>,
sys: TSys,
npmrc: Arc<ResolvedNpmRc>,
memory_cache: Mutex<HashMap<PackageNv, MemoryCacheItem>>,
reporter: Option<Arc<dyn TarballCacheReporter>>,
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
#[class(generic)]
#[error("Failed caching npm package '{package_nv}'")]
pub struct EnsurePackageError {
package_nv: Box<PackageNv>,
#[source]
source: Arc<JsErrorBox>,
}
pub trait TarballCacheReporter: std::fmt::Debug + Send + Sync {
fn download_started(&self, _nv: &PackageNv) {}
fn downloaded(&self, _nv: &PackageNv) {}
fn reused_cache(&self, _nv: &PackageNv) {}
}
impl<THttpClient: NpmCacheHttpClient, TSys: NpmCacheSys>
TarballCache<THttpClient, TSys>
{
pub fn new(
cache: Arc<NpmCache<TSys>>,
http_client: Arc<THttpClient>,
sys: TSys,
npmrc: Arc<ResolvedNpmRc>,
reporter: Option<Arc<dyn TarballCacheReporter>>,
) -> Self {
Self {
cache,
http_client,
sys,
npmrc,
memory_cache: Default::default(),
reporter,
}
}
pub async fn ensure_package(
self: &Arc<Self>,
package_nv: &PackageNv,
dist: &NpmPackageVersionDistInfo,
) -> Result<(), EnsurePackageError> {
self
.ensure_package_inner(package_nv, dist)
.await
.map_err(|source| EnsurePackageError {
package_nv: Box::new(package_nv.clone()),
source,
})
}
async fn ensure_package_inner(
self: &Arc<Self>,
package_nv: &PackageNv,
dist: &NpmPackageVersionDistInfo,
) -> Result<(), Arc<JsErrorBox>> {
let cache_item = {
let mut mem_cache = self.memory_cache.lock();
if let Some(cache_item) = mem_cache.get(package_nv) {
cache_item.clone()
} else {
let value_creator = MultiRuntimeAsyncValueCreator::new({
let tarball_cache = self.clone();
let package_nv = package_nv.clone();
let dist = dist.clone();
Box::new(move || {
tarball_cache.create_setup_future(package_nv.clone(), dist.clone())
})
});
let cache_item = MemoryCacheItem::Pending(Arc::new(value_creator));
mem_cache.insert(package_nv.clone(), cache_item.clone());
cache_item
}
};
match cache_item {
MemoryCacheItem::Cached => Ok(()),
MemoryCacheItem::Errored(err) => Err(err),
MemoryCacheItem::Pending(creator) => {
let result = creator.get().await;
match result {
Ok(_) => {
*self.memory_cache.lock().get_mut(package_nv).unwrap() =
MemoryCacheItem::Cached;
Ok(())
}
Err(err) => {
*self.memory_cache.lock().get_mut(package_nv).unwrap() =
MemoryCacheItem::Errored(err.clone());
Err(err)
}
}
}
}
}
fn create_setup_future(
self: &Arc<Self>,
package_nv: PackageNv,
dist: NpmPackageVersionDistInfo,
) -> LoadFuture {
let tarball_cache = self.clone();
let sys = self.sys.clone();
let reporter = self.reporter.clone();
async move {
let registry_url = tarball_cache.npmrc.get_registry_url(&package_nv.name);
let package_folder =
tarball_cache.cache.package_folder_for_nv_and_url(&package_nv, registry_url);
let should_use_cache = tarball_cache.cache.should_use_cache_for_package(&package_nv);
let package_folder_exists = tarball_cache.sys.fs_exists_no_err(&package_folder);
if should_use_cache && package_folder_exists {
if let Some(reporter) = reporter {
reporter.reused_cache(&package_nv);
}
return Ok(());
} else if tarball_cache.cache.cache_setting() == &NpmCacheSetting::Only {
return Err(JsErrorBox::new(
"NotCached",
format!(
"npm package not found in cache: \"{}\", --cached-only is specified.",
&package_nv.name
)
)
);
}
if dist.tarball.is_empty() {
return Err(JsErrorBox::generic("Tarball URL was empty."));
}
// IMPORTANT: npm registries may specify tarball URLs at different URLS than the
// registry, so we MUST get the auth for the tarball URL and not the registry URL.
let tarball_uri = Url::parse(&dist.tarball).map_err(JsErrorBox::from_err)?;
let maybe_registry_config =
tarball_cache.npmrc.tarball_config(&tarball_uri);
let maybe_auth_header = maybe_registry_config.and_then(|c| maybe_auth_header_value_for_npm_registry(c).ok()?);
if let Some(reporter) = &reporter {
reporter.download_started(&package_nv);
}
let result = tarball_cache.http_client
.download_with_retries_on_any_tokio_runtime(tarball_uri, maybe_auth_header, None)
.await;
if let Some(reporter) = &reporter {
reporter.downloaded(&package_nv);
}
let maybe_bytes = match result {
Ok(response) => match response {
NpmCacheHttpClientResponse::NotModified => unreachable!(), // no e-tag
NpmCacheHttpClientResponse::NotFound => None,
NpmCacheHttpClientResponse::Bytes(r) => Some(r.bytes),
},
Err(err) => {
if err.status_code == Some(401)
&& maybe_registry_config.is_none()
&& tarball_cache.npmrc.get_registry_config(&package_nv.name).auth_token.is_some()
{
return Err(JsErrorBox::generic(format!(
concat!(
"No auth for tarball URI, but present for scoped registry.\n\n",
"Tarball URI: {}\n",
"Scope URI: {}\n\n",
"More info here: https://github.com/npm/cli/wiki/%22No-auth-for-URI,-but-auth-present-for-scoped-registry%22"
),
dist.tarball,
registry_url,
)));
}
return Err(JsErrorBox::from_err(err))
},
};
match maybe_bytes {
Some(bytes) => {
let extraction_mode = if should_use_cache || !package_folder_exists {
TarballExtractionMode::SiblingTempDir
} else {
// The user ran with `--reload`, so overwrite the package instead of
// deleting it since the package might get corrupted if a user kills
// their deno process while it's deleting a package directory
//
// We can't rename this folder and delete it because the folder
// may be in use by another process or may now contain hardlinks,
// which will cause windows to throw an "AccessDenied" error when
// renaming. So we settle for overwriting.
TarballExtractionMode::Overwrite
};
let dist = dist.clone();
let package_nv = package_nv.clone();
spawn_blocking(move || verify_and_extract_tarball(
&sys,
&package_nv,
&bytes,
&dist,
&package_folder,
extraction_mode,
))
.await.map_err(JsErrorBox::from_err)?.map_err(JsErrorBox::from_err)
}
None => {
Err(JsErrorBox::generic(format!("Could not find npm package tarball at: {}", dist.tarball)))
}
}
}
.map(|r| r.map_err(Arc::new))
.boxed_local()
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/npm_cache/rt.rs | libs/npm_cache/rt.rs | // Copyright 2018-2025 the Deno authors. MIT license.
#[cfg(not(target_arch = "wasm32"))]
use deno_unsync::JoinHandle;
#[cfg(target_arch = "wasm32")]
pub type JoinHandle<T> =
std::future::Ready<Result<T, std::convert::Infallible>>;
pub fn spawn_blocking<
F: (FnOnce() -> R) + Send + 'static,
R: Send + 'static,
>(
f: F,
) -> JoinHandle<R> {
#[cfg(target_arch = "wasm32")]
{
let result = f();
std::future::ready(Ok(result))
}
#[cfg(not(target_arch = "wasm32"))]
{
deno_unsync::spawn_blocking(f)
}
}
#[cfg(not(target_arch = "wasm32"))]
pub use deno_unsync::sync::MultiRuntimeAsyncValueCreator;
#[cfg(target_arch = "wasm32")]
mod wasm {
use futures::future::LocalBoxFuture;
type CreateFutureFn<TResult> =
Box<dyn Fn() -> LocalBoxFuture<'static, TResult> + Send + Sync>;
pub struct MultiRuntimeAsyncValueCreator<TResult: Send + Clone + 'static> {
create_future: CreateFutureFn<TResult>,
}
impl<TResult: Send + Clone + 'static> std::fmt::Debug
for MultiRuntimeAsyncValueCreator<TResult>
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("MultiRuntimeAsyncValueCreator").finish()
}
}
impl<TResult: Send + Clone + 'static> MultiRuntimeAsyncValueCreator<TResult> {
pub fn new(create_future: CreateFutureFn<TResult>) -> Self {
Self { create_future }
}
pub async fn get(&self) -> TResult {
(self.create_future)().await
}
}
}
#[cfg(target_arch = "wasm32")]
pub use wasm::MultiRuntimeAsyncValueCreator;
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/npm_cache/tarball_extract.rs | libs/npm_cache/tarball_extract.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashSet;
use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
use base64::Engine;
use base64::prelude::BASE64_STANDARD;
use deno_npm::registry::NpmPackageVersionDistInfo;
use deno_npm::registry::NpmPackageVersionDistInfoIntegrity;
use deno_semver::package::PackageNv;
use flate2::read::GzDecoder;
use sha2::Digest;
use sys_traits::FsCanonicalize;
use sys_traits::FsCreateDirAll;
use sys_traits::FsFileSetPermissions;
use sys_traits::FsMetadata;
use sys_traits::FsOpen;
use sys_traits::FsRemoveDirAll;
use sys_traits::FsRemoveFile;
use sys_traits::FsRename;
use sys_traits::OpenOptions;
use sys_traits::SystemRandom;
use sys_traits::ThreadSleep;
use tar::Archive;
use tar::EntryType;
#[derive(Debug, Copy, Clone)]
pub enum TarballExtractionMode {
/// Overwrites the destination directory without deleting any files.
Overwrite,
/// Creates and writes to a sibling temporary directory. When done, moves
/// it to the final destination.
///
/// This is more robust than `Overwrite` as it better handles multiple
/// processes writing to the directory at the same time.
SiblingTempDir,
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum VerifyAndExtractTarballError {
#[class(inherit)]
#[error(transparent)]
TarballIntegrity(#[from] TarballIntegrityError),
#[class(inherit)]
#[error(transparent)]
ExtractTarball(#[from] ExtractTarballError),
#[class(inherit)]
#[error("Failed moving extracted tarball to final destination")]
MoveFailed(std::io::Error),
}
pub fn verify_and_extract_tarball(
sys: &(
impl FsCanonicalize
+ FsCreateDirAll
+ FsMetadata
+ FsOpen
+ FsRename
+ FsRemoveDirAll
+ FsRemoveFile
+ SystemRandom
+ ThreadSleep
),
package_nv: &PackageNv,
data: &[u8],
dist_info: &NpmPackageVersionDistInfo,
output_folder: &Path,
extraction_mode: TarballExtractionMode,
) -> Result<(), VerifyAndExtractTarballError> {
verify_tarball_integrity(package_nv, data, &dist_info.integrity())?;
match extraction_mode {
TarballExtractionMode::Overwrite => {
extract_tarball(sys, data, output_folder).map_err(Into::into)
}
TarballExtractionMode::SiblingTempDir => {
let temp_dir = deno_path_util::get_atomic_path(sys, output_folder);
extract_tarball(sys, data, &temp_dir)?;
rename_with_retries(sys, &temp_dir, output_folder)
.map_err(VerifyAndExtractTarballError::MoveFailed)
}
}
}
fn rename_with_retries(
sys: &(impl ThreadSleep + FsMetadata + FsRemoveDirAll + FsRename),
temp_dir: &Path,
output_folder: &Path,
) -> Result<(), std::io::Error> {
fn already_exists(
sys: &impl FsMetadata,
err: &std::io::Error,
output_folder: &Path,
) -> bool {
// Windows will do an "Access is denied" error
err.kind() == ErrorKind::AlreadyExists
|| sys.fs_exists_no_err(output_folder)
}
let mut count = 0;
// renaming might be flaky if a lot of processes are trying
// to do this, so retry a few times
loop {
match sys.fs_rename(temp_dir, output_folder) {
Ok(_) => return Ok(()),
Err(err) if already_exists(sys, &err, output_folder) => {
// another process copied here, just cleanup
let _ = sys.fs_remove_dir_all(temp_dir);
return Ok(());
}
Err(err) => {
count += 1;
if count > 5 {
// too many retries, cleanup and return the error
let _ = sys.fs_remove_dir_all(temp_dir);
return Err(err);
}
// wait a bit before retrying... this should be very rare or only
// in error cases, so ok to sleep a bit
let sleep_ms = std::cmp::min(100, 20 * count);
sys.thread_sleep(std::time::Duration::from_millis(sleep_ms));
}
}
}
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
#[class(generic)]
pub enum TarballIntegrityError {
#[error("Not implemented hash function for {package}: {hash_kind}")]
NotImplementedHashFunction {
package: Box<PackageNv>,
hash_kind: String,
},
#[error("Not implemented integrity kind for {package}: {integrity}")]
NotImplementedIntegrityKind {
package: Box<PackageNv>,
integrity: String,
},
#[error(
"Tarball checksum did not match what was provided by npm registry for {package}.\n\nExpected: {expected}\nActual: {actual}"
)]
MismatchedChecksum {
package: Box<PackageNv>,
expected: String,
actual: String,
},
}
fn verify_tarball_integrity(
package: &PackageNv,
data: &[u8],
npm_integrity: &NpmPackageVersionDistInfoIntegrity,
) -> Result<(), TarballIntegrityError> {
let (tarball_checksum, expected_checksum) = match npm_integrity {
NpmPackageVersionDistInfoIntegrity::Integrity {
algorithm,
base64_hash,
} => {
let tarball_checksum = match *algorithm {
"sha512" => BASE64_STANDARD.encode(sha2::Sha512::digest(data)),
"sha1" => BASE64_STANDARD.encode(sha1::Sha1::digest(data)),
hash_kind => {
return Err(TarballIntegrityError::NotImplementedHashFunction {
package: Box::new(package.clone()),
hash_kind: hash_kind.to_string(),
});
}
};
(tarball_checksum, base64_hash)
}
NpmPackageVersionDistInfoIntegrity::LegacySha1Hex(hex) => {
let digest = sha1::Sha1::digest(data);
let tarball_checksum = faster_hex::hex_string(digest.as_ref());
(tarball_checksum, hex)
}
NpmPackageVersionDistInfoIntegrity::UnknownIntegrity(integrity) => {
return Err(TarballIntegrityError::NotImplementedIntegrityKind {
package: Box::new(package.clone()),
integrity: integrity.to_string(),
});
}
NpmPackageVersionDistInfoIntegrity::None => {
return Ok(());
}
};
if tarball_checksum != *expected_checksum {
return Err(TarballIntegrityError::MismatchedChecksum {
package: Box::new(package.clone()),
expected: expected_checksum.to_string(),
actual: tarball_checksum,
});
}
Ok(())
}
#[derive(Debug, Copy, Clone)]
pub enum IoErrorOperation {
Creating,
Canonicalizing,
Opening,
Writing,
}
impl std::fmt::Display for IoErrorOperation {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
IoErrorOperation::Creating => write!(f, "creating"),
IoErrorOperation::Canonicalizing => write!(f, "canonicalizing"),
IoErrorOperation::Opening => write!(f, "opening"),
IoErrorOperation::Writing => write!(f, "writing"),
}
}
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
#[class(generic)]
#[error("Failed {} '{}'", operation, path.display())]
pub struct IoWithPathError {
pub path: PathBuf,
pub operation: IoErrorOperation,
#[source]
pub source: std::io::Error,
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum ExtractTarballError {
#[class(inherit)]
#[error(transparent)]
Io(#[from] std::io::Error),
#[class(inherit)]
#[error(transparent)]
IoWithPath(#[from] IoWithPathError),
#[class(generic)]
#[error(
"Extracted directory '{0}' of npm tarball was not in output directory."
)]
NotInOutputDirectory(PathBuf),
}
fn extract_tarball(
sys: &(impl FsCanonicalize + FsCreateDirAll + FsOpen + FsRemoveFile),
data: &[u8],
output_folder: &Path,
) -> Result<(), ExtractTarballError> {
sys
.fs_create_dir_all(output_folder)
.map_err(|source| IoWithPathError {
path: output_folder.to_path_buf(),
operation: IoErrorOperation::Creating,
source,
})?;
let output_folder =
sys
.fs_canonicalize(output_folder)
.map_err(|source| IoWithPathError {
path: output_folder.to_path_buf(),
operation: IoErrorOperation::Canonicalizing,
source,
})?;
let tar = GzDecoder::new(data);
let mut archive = Archive::new(tar);
archive.set_overwrite(true);
archive.set_preserve_permissions(true);
let mut created_dirs = HashSet::new();
for entry in archive.entries()? {
let mut entry = entry?;
let path = entry.path()?;
let entry_type = entry.header().entry_type();
// Some package tarballs contain "pax_global_header", these entries
// should be skipped.
if entry_type == EntryType::XGlobalHeader {
continue;
}
// skip the first component which will be either "package" or the name of the package
let relative_path = path.components().skip(1).collect::<PathBuf>();
let absolute_path = output_folder.join(relative_path);
let dir_path = if entry_type == EntryType::Directory {
absolute_path.as_path()
} else {
absolute_path.parent().unwrap()
};
if created_dirs.insert(dir_path.to_path_buf()) {
sys
.fs_create_dir_all(dir_path)
.map_err(|source| IoWithPathError {
path: output_folder.to_path_buf(),
operation: IoErrorOperation::Creating,
source,
})?;
let canonicalized_dir =
sys
.fs_canonicalize(dir_path)
.map_err(|source| IoWithPathError {
path: output_folder.to_path_buf(),
operation: IoErrorOperation::Canonicalizing,
source,
})?;
if !canonicalized_dir.starts_with(&output_folder) {
return Err(ExtractTarballError::NotInOutputDirectory(
canonicalized_dir.to_path_buf(),
));
}
}
let entry_type = entry.header().entry_type();
match entry_type {
EntryType::Regular => {
let open_options = OpenOptions::new_write();
let mut f =
sys
.fs_open(&absolute_path, &open_options)
.map_err(|source| IoWithPathError {
path: absolute_path.to_path_buf(),
operation: IoErrorOperation::Opening,
source,
})?;
std::io::copy(&mut entry, &mut f).map_err(|source| {
IoWithPathError {
path: absolute_path,
operation: IoErrorOperation::Writing,
source,
}
})?;
if !sys_traits::impls::is_windows() {
let mode = entry.header().mode()?;
f.fs_file_set_permissions(mode)?;
}
}
EntryType::Symlink | EntryType::Link => {
// At the moment, npm doesn't seem to support uploading hardlinks or
// symlinks to the npm registry. If ever adding symlink or hardlink
// support, we will need to validate that the hardlink and symlink
// target are within the package directory.
log::warn!(
"Ignoring npm tarball entry type {:?} for '{}'",
entry_type,
absolute_path.display()
)
}
_ => {
// ignore
}
}
}
Ok(())
}
#[cfg(test)]
mod test {
use deno_semver::Version;
use sys_traits::FsMetadata;
use sys_traits::FsWrite;
use tempfile::TempDir;
use super::*;
#[test]
pub fn test_verify_tarball() {
let package = PackageNv {
name: "package".into(),
version: Version::parse_from_npm("1.0.0").unwrap(),
};
let actual_checksum = "z4PhNX7vuL3xVChQ1m2AB9Yg5AULVxXcg/SpIdNs6c5H0NE8XYXysP+DGNKHfuwvY7kxvUdBeoGlODJ6+SfaPg==";
assert_eq!(
verify_tarball_integrity(
&package,
&Vec::new(),
&NpmPackageVersionDistInfoIntegrity::UnknownIntegrity("test")
)
.unwrap_err()
.to_string(),
"Not implemented integrity kind for package@1.0.0: test",
);
assert_eq!(
verify_tarball_integrity(
&package,
&Vec::new(),
&NpmPackageVersionDistInfoIntegrity::Integrity {
algorithm: "notimplemented",
base64_hash: "test"
}
)
.unwrap_err()
.to_string(),
"Not implemented hash function for package@1.0.0: notimplemented",
);
assert_eq!(
verify_tarball_integrity(
&package,
&Vec::new(),
&NpmPackageVersionDistInfoIntegrity::Integrity {
algorithm: "sha1",
base64_hash: "test"
}
)
.unwrap_err()
.to_string(),
concat!(
"Tarball checksum did not match what was provided by npm ",
"registry for package@1.0.0.\n\nExpected: test\nActual: 2jmj7l5rSw0yVb/vlWAYkK/YBwk=",
),
);
assert_eq!(
verify_tarball_integrity(
&package,
&Vec::new(),
&NpmPackageVersionDistInfoIntegrity::Integrity {
algorithm: "sha512",
base64_hash: "test"
}
)
.unwrap_err()
.to_string(),
format!(
"Tarball checksum did not match what was provided by npm registry for package@1.0.0.\n\nExpected: test\nActual: {actual_checksum}"
),
);
assert!(
verify_tarball_integrity(
&package,
&Vec::new(),
&NpmPackageVersionDistInfoIntegrity::Integrity {
algorithm: "sha512",
base64_hash: actual_checksum,
},
)
.is_ok()
);
let actual_hex = "da39a3ee5e6b4b0d3255bfef95601890afd80709";
assert_eq!(
verify_tarball_integrity(
&package,
&Vec::new(),
&NpmPackageVersionDistInfoIntegrity::LegacySha1Hex("test"),
)
.unwrap_err()
.to_string(),
format!(
"Tarball checksum did not match what was provided by npm registry for package@1.0.0.\n\nExpected: test\nActual: {actual_hex}"
),
);
assert!(
verify_tarball_integrity(
&package,
&Vec::new(),
&NpmPackageVersionDistInfoIntegrity::LegacySha1Hex(actual_hex),
)
.is_ok()
);
}
#[test]
fn rename_with_retries_succeeds_exists() {
let temp_dir = TempDir::new().unwrap();
let folder_1 = temp_dir.path().join("folder_1");
let folder_2 = temp_dir.path().join("folder_2");
let sys = sys_traits::impls::RealSys;
sys.fs_create_dir_all(&folder_1).unwrap();
sys.fs_write(folder_1.join("a.txt"), "test").unwrap();
sys.fs_create_dir_all(&folder_2).unwrap();
// this will not end up in the output as rename_with_retries assumes
// the folders ending up at the destination are the same
sys.fs_write(folder_2.join("b.txt"), "test2").unwrap();
let dest_folder = temp_dir.path().join("dest_folder");
rename_with_retries(&sys, folder_1.as_path(), &dest_folder).unwrap();
rename_with_retries(&sys, folder_2.as_path(), &dest_folder).unwrap();
assert!(sys.fs_exists_no_err(dest_folder.join("a.txt")));
assert!(!sys.fs_exists_no_err(dest_folder.join("b.txt")));
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/crypto/lib.rs | libs/crypto/lib.rs | // Copyright 2018-2025 the Deno authors. MIT license.
#![deny(clippy::print_stderr)]
#![deny(clippy::print_stdout)]
#![deny(clippy::unused_async)]
#![deny(clippy::unnecessary_wraps)]
mod ffi;
pub mod spki;
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/crypto/ffi.rs | libs/crypto/ffi.rs | // Copyright 2018-2025 the Deno authors. MIT license.
pub struct PKey(pub *mut aws_lc_sys::EVP_PKEY);
impl PKey {
pub fn from_ptr(ptr: *mut aws_lc_sys::EVP_PKEY) -> Option<Self> {
if ptr.is_null() { None } else { Some(Self(ptr)) }
}
pub fn as_ptr(&self) -> *mut aws_lc_sys::EVP_PKEY {
self.0
}
}
impl Drop for PKey {
fn drop(&mut self) {
// SAFETY: We need to free the underlying EVP_PKEY when the PKey wrapper is dropped.
// The null check ensures we don't try to free a null pointer.
unsafe {
if self.0.is_null() {
return;
}
aws_lc_sys::EVP_PKEY_free(self.0);
}
}
}
impl std::ops::Deref for PKey {
type Target = *mut aws_lc_sys::EVP_PKEY;
fn deref(&self) -> &Self::Target {
&self.0
}
}
pub struct Bio(pub *mut aws_lc_sys::BIO);
impl Drop for Bio {
fn drop(&mut self) {
// SAFETY: We need to free the underlying BIO when the Bio wrapper is dropped.
// The null check ensures we don't try to free a null pointer.
unsafe {
if self.0.is_null() {
return;
}
aws_lc_sys::BIO_free(self.0);
}
}
}
impl Bio {
pub fn new_memory() -> Result<Self, &'static str> {
// SAFETY: Creating a new memory BIO requires FFI calls to the OpenSSL API.
// We check for null pointer returns to ensure safety.
unsafe {
let bio = aws_lc_sys::BIO_new(aws_lc_sys::BIO_s_mem());
if bio.is_null() {
return Err("Failed to create memory BIO");
}
Ok(Bio(bio))
}
}
pub fn get_contents(&self) -> Result<Vec<u8>, &'static str> {
// SAFETY: Retrieving content from a BIO requires FFI calls and raw pointer manipulation.
// We verify the pointer is not null and create a slice with the correct length.
// The data is copied into a Vec to ensure memory safety after this function returns.
unsafe {
let mut len = 0;
let mut content_ptr = std::ptr::null();
aws_lc_sys::BIO_mem_contents(self.0, &mut content_ptr, &mut len);
if content_ptr.is_null() || len == 0 {
return Err("No content in BIO");
}
let data = std::slice::from_raw_parts(content_ptr, len);
Ok(data.to_vec())
}
}
pub fn as_ptr(&self) -> *mut aws_lc_sys::BIO {
self.0
}
}
impl std::ops::Deref for Bio {
type Target = *mut aws_lc_sys::BIO;
fn deref(&self) -> &Self::Target {
&self.0
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
denoland/deno | https://github.com/denoland/deno/blob/7222e85d435b977de1ab810db067b86f29e6444f/libs/crypto/spki.rs | libs/crypto/spki.rs | // Copyright 2018-2025 the Deno authors. MIT license.
use std::ptr::NonNull;
use crate::ffi::Bio;
use crate::ffi::PKey;
#[derive(Debug)]
pub struct NetscapeSpki(*mut aws_lc_sys::NETSCAPE_SPKI);
impl NetscapeSpki {
/// Decodes a base64-encoded SPKI certificate.
fn from_base64(data: &[u8]) -> Result<Self, &'static str> {
// Trim trailing characters for compatibility with OpenSSL.
let end = data
.iter()
.rposition(|&b| !b" \n\r\t".contains(&b))
.map_or(0, |i| i + 1);
if end == 0 {
return Err("Invalid SPKI data: no base64 content found");
}
// SAFETY: Cast data pointer to convert base64 to NETSCAPE_SPKI
unsafe {
let spki = aws_lc_sys::NETSCAPE_SPKI_b64_decode(
data.as_ptr() as *const _,
end as isize,
);
if spki.is_null() {
return Err("Failed to decode base64 SPKI data");
}
Ok(NetscapeSpki(spki))
}
}
fn verify(&self, pkey: &PKey) -> bool {
// SAFETY: Use public key to verify SPKI certificate
unsafe {
let result = aws_lc_sys::NETSCAPE_SPKI_verify(self.0, pkey.as_ptr());
result > 0
}
}
fn spkac(&self) -> Result<&aws_lc_sys::NETSCAPE_SPKAC, &'static str> {
// SAFETY: Access spkac field via raw pointer with null checks
unsafe {
if self.0.is_null() || (*self.0).spkac.is_null() {
return Err("Invalid SPKAC structure");
}
Ok(&*(*self.0).spkac)
}
}
fn get_public_key(&self) -> Result<PKey, &'static str> {
// SAFETY: Extract public key, null checked by PKey::from_ptr
unsafe {
let pkey = aws_lc_sys::NETSCAPE_SPKI_get_pubkey(self.0);
PKey::from_ptr(pkey).ok_or("Failed to extract public key")
}
}
fn get_challenge(&self) -> Result<Vec<u8>, &'static str> {
// SAFETY: Extract challenge with null checks and BufferGuard for cleanup
unsafe {
let spkac = self.spkac()?;
let challenge = spkac.challenge;
if challenge.is_null() {
return Err("No challenge found in SPKI certificate");
}
let mut buf = std::ptr::null_mut();
let buf_len = aws_lc_sys::ASN1_STRING_to_UTF8(&mut buf, challenge);
if buf_len <= 0 || buf.is_null() {
return Err("Failed to extract challenge string");
}
let _guard = BufferGuard(NonNull::new(buf).unwrap());
let challenge_slice =
std::slice::from_raw_parts(buf as *const u8, buf_len as usize);
Ok(challenge_slice.to_vec())
}
}
pub fn as_ptr(&self) -> *mut aws_lc_sys::NETSCAPE_SPKI {
self.0
}
}
impl Drop for NetscapeSpki {
fn drop(&mut self) {
// SAFETY: Free NETSCAPE_SPKI with null check
unsafe {
if !self.0.is_null() {
aws_lc_sys::NETSCAPE_SPKI_free(self.0);
}
}
}
}
// RAII guard for automatically freeing ASN1 string buffers
struct BufferGuard(NonNull<u8>);
impl Drop for BufferGuard {
fn drop(&mut self) {
// SAFETY: Free ASN1_STRING buffer (NonNull guarantees non-null)
unsafe {
aws_lc_sys::OPENSSL_free(self.0.as_ptr() as *mut std::ffi::c_void);
}
}
}
/// Validates the SPKAC data structure.
///
/// Returns true if the signature in the SPKAC data is valid.
pub fn verify_spkac(data: &[u8]) -> bool {
let spki = match NetscapeSpki::from_base64(data) {
Ok(spki) => spki,
Err(_) => return false,
};
let pkey = match extract_public_key_from_spkac(&spki) {
Ok(pkey) => pkey,
Err(_) => return false,
};
spki.verify(&pkey)
}
/// Extracts the public key from the SPKAC structure.
fn extract_public_key_from_spkac(
spki: &NetscapeSpki,
) -> Result<PKey, &'static str> {
// SAFETY: Extract public key with null checks and proper ownership
unsafe {
let spkac = spki.spkac()?;
let pubkey = spkac.pubkey;
if pubkey.is_null() {
return Err("No public key in SPKAC structure");
}
let pkey = aws_lc_sys::X509_PUBKEY_get(pubkey);
PKey::from_ptr(pkey).ok_or("Failed to extract public key from X509_PUBKEY")
}
}
/// Exports the public key from the SPKAC data in PEM format.
pub fn export_public_key(data: &[u8]) -> Option<Vec<u8>> {
let spki = NetscapeSpki::from_base64(data).ok()?;
let pkey = spki.get_public_key().ok()?;
let bio = Bio::new_memory().ok()?;
// SAFETY: Write public key to BIO in PEM format, check result
unsafe {
let result = aws_lc_sys::PEM_write_bio_PUBKEY(bio.as_ptr(), pkey.as_ptr());
if result <= 0 {
return None;
}
}
bio.get_contents().ok()
}
/// Exports the challenge string from the SPKAC data.
pub fn export_challenge(data: &[u8]) -> Option<Vec<u8>> {
let spki = NetscapeSpki::from_base64(data).ok()?;
spki.get_challenge().ok()
}
#[cfg(test)]
mod tests {
use crate::spki::verify_spkac;
#[test]
fn test_md_spkac() {
// md4 and md5 based signatures are not supported.
// https://github.com/aws/aws-lc/commit/7e28b9ee89d85fbc80b69bc0eeb0070de81ac563
let spkac_data = br#"MIICUzCCATswggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC33FiIiiexwLe/P8DZx5HsqFlmUO7/lvJ7necJVNwqdZ3ax5jpQB0p6uxfqeOvzcN3k5V7UFb/Am+nkSNZMAZhsWzCU2Z4Pjh50QYz3f0Hour7/yIGStOLyYY3hgLK2K8TbhgjQPhdkw9+QtKlpvbL8fLgONAoGrVOFnRQGcr70iFffsm79mgZhKVMgYiHPJqJgGHvCtkGg9zMgS7p63+Q3ZWedtFS2RhMX3uCBy/mH6EOlRCNBbRmA4xxNzyf5GQaki3T+Iz9tOMjdPP+CwV2LqEdylmBuik8vrfTb3qIHLKKBAI8lXN26wWtA3kN4L7NP+cbKlCRlqctvhmylLH1AgMBAAEWE3RoaXMtaXMtYS1jaGFsbGVuZ2UwDQYJKoZIhvcNAQEEBQADggEBAIozmeW1kfDfAVwRQKileZGLRGCD7AjdHLYEe16xTBPve8Af1bDOyuWsAm4qQLYA4FAFROiKeGqxCtIErEvm87/09tCfF1My/1Uj+INjAk39DK9J9alLlTsrwSgd1lb3YlXY7TyitCmh7iXLo4pVhA2chNA3njiMq3CUpSvGbpzrESL2dv97lv590gUD988wkTDVyYsf0T8+X0Kww3AgPWGji+2f2i5/jTfD/s1lK1nqi7ZxFm0pGZoy1MJ51SCEy7Y82ajroI+5786nC02mo9ak7samca4YDZOoxN4d3tax4B/HDF5dqJSm1/31xYLDTfujCM5FkSjRc4m6hnriEkc="#;
assert!(!verify_spkac(spkac_data));
}
#[test]
fn test_spkac_verify() {
let spkac = b"MIICUzCCATswggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCXzfKgGnkkOF7+VwMzGpiWy5nna/VGJOfPBsCVg5WooJHN9nAFyqLxoV0WyhwvIdHhIgcTX2L4BHRa+4B0zb4stRHK02ZknJvionK4kBfa+k7Q4DzasW3ulLCTXPLVBKzW9QSzE4Wult17BX6uSUy3Bpr/Nuk6B4Ja3JnFpdSYmJbWP55kRONFBZYPCXr7T8k6hzEHcevFE/PUi6IU+LKiwyGH5KXAUzRbMtqbZLn/rEAmEBxmv/z/+shAwiRE8s9RqBi+pVdwqWdw6ibNkbM7G3j4CMyfAk7EOpGf5loRIrVWB4XrVYWb2EQ6sd9LfiQ9GwqlFYw006MUo6nxoEtNAgMBAAEWE3RoaXMtaXMtYS1jaGFsbGVuZ2UwDQYJKoZIhvcNAQELBQADggEBAHUw1UoZjG7TCb/JhFo5p8XIFeizGEwYoqttBoVTQ+MeCfnNoLLeAyId0atb2jPnYsI25Z/PHHV1N9t0L/NelY3rZC/Z00Wx8IGeslnGXXbqwnp36Umb0r2VmxTr8z1QaToGyOQXp4Xor9qbQFoANIivyVUYsuqJ1FnDJCC/jBPo4IWiQbTst331v2fiVdV+/XUh9AIjcm4085b65HjFwLxDeWhbgAZ+UfhqBbTVA1K8uUqS8e3gbeaNstZvnclxZ3PlHSk8v1RdIG4e5ThTOwPH5u/7KKeafn9SwgY/Q8KqaVfHHCv1IeVlijamjnyFhWc35kGlBUNgLOnWAOE3GsM=";
assert!(verify_spkac(spkac));
}
#[test]
fn test_spkac_empty() {
let empty_spkac = b"";
assert!(!verify_spkac(empty_spkac));
}
}
| rust | MIT | 7222e85d435b977de1ab810db067b86f29e6444f | 2026-01-04T15:31:58.521149Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-macros/src/select.rs | tokio-macros/src/select.rs | use proc_macro::{TokenStream, TokenTree};
use proc_macro2::Span;
use quote::quote;
use syn::{parse::Parser, Ident};
pub(crate) fn declare_output_enum(input: TokenStream) -> TokenStream {
// passed in is: `(_ _ _)` with one `_` per branch
let branches = match input.into_iter().next() {
Some(TokenTree::Group(group)) => group.stream().into_iter().count(),
_ => panic!("unexpected macro input"),
};
let variants = (0..branches)
.map(|num| Ident::new(&format!("_{num}"), Span::call_site()))
.collect::<Vec<_>>();
// Use a bitfield to track which futures completed
let mask = Ident::new(
if branches <= 8 {
"u8"
} else if branches <= 16 {
"u16"
} else if branches <= 32 {
"u32"
} else if branches <= 64 {
"u64"
} else {
panic!("up to 64 branches supported");
},
Span::call_site(),
);
TokenStream::from(quote! {
pub(super) enum Out<#( #variants ),*> {
#( #variants(#variants), )*
// Include a `Disabled` variant signifying that all select branches
// failed to resolve.
Disabled,
}
pub(super) type Mask = #mask;
})
}
pub(crate) fn clean_pattern_macro(input: TokenStream) -> TokenStream {
// If this isn't a pattern, we return the token stream as-is. The select!
// macro is using it in a location requiring a pattern, so an error will be
// emitted there.
let mut input: syn::Pat = match syn::Pat::parse_single.parse(input.clone()) {
Ok(it) => it,
Err(_) => return input,
};
clean_pattern(&mut input);
quote::ToTokens::into_token_stream(input).into()
}
// Removes any occurrences of ref or mut in the provided pattern.
fn clean_pattern(pat: &mut syn::Pat) {
match pat {
syn::Pat::Lit(_literal) => {}
syn::Pat::Macro(_macro) => {}
syn::Pat::Path(_path) => {}
syn::Pat::Range(_range) => {}
syn::Pat::Rest(_rest) => {}
syn::Pat::Verbatim(_tokens) => {}
syn::Pat::Wild(_underscore) => {}
syn::Pat::Ident(ident) => {
ident.by_ref = None;
ident.mutability = None;
if let Some((_at, pat)) = &mut ident.subpat {
clean_pattern(&mut *pat);
}
}
syn::Pat::Or(or) => {
for case in &mut or.cases {
clean_pattern(case);
}
}
syn::Pat::Slice(slice) => {
for elem in &mut slice.elems {
clean_pattern(elem);
}
}
syn::Pat::Struct(struct_pat) => {
for field in &mut struct_pat.fields {
clean_pattern(&mut field.pat);
}
}
syn::Pat::Tuple(tuple) => {
for elem in &mut tuple.elems {
clean_pattern(elem);
}
}
syn::Pat::TupleStruct(tuple) => {
for elem in &mut tuple.elems {
clean_pattern(elem);
}
}
syn::Pat::Reference(reference) => {
reference.mutability = None;
clean_pattern(&mut reference.pat);
}
syn::Pat::Type(type_pat) => {
clean_pattern(&mut type_pat.pat);
}
_ => {}
}
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-macros/src/lib.rs | tokio-macros/src/lib.rs | #![allow(clippy::needless_doctest_main)]
#![warn(
missing_debug_implementations,
missing_docs,
rust_2018_idioms,
unreachable_pub
)]
#![doc(test(
no_crate_inject,
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))
))]
//! Macros for use with Tokio
mod entry;
mod select;
use proc_macro::TokenStream;
/// Marks async function to be executed by the selected runtime. This macro
/// helps set up a `Runtime` without requiring the user to use
/// [Runtime](../tokio/runtime/struct.Runtime.html) or
/// [Builder](../tokio/runtime/struct.Builder.html) directly.
///
/// Note: This macro is designed to be simplistic and targets applications that
/// do not require a complex setup. If the provided functionality is not
/// sufficient, you may be interested in using
/// [Builder](../tokio/runtime/struct.Builder.html), which provides a more
/// powerful interface.
///
/// Note: This macro can be used on any function and not just the `main`
/// function. Using it on a non-main function makes the function behave as if it
/// was synchronous by starting a new runtime each time it is called. If the
/// function is called often, it is preferable to create the runtime using the
/// runtime builder so the runtime can be reused across calls.
///
/// # Non-worker async function
///
/// Note that the async function marked with this macro does not run as a
/// worker. The expectation is that other tasks are spawned by the function here.
/// Awaiting on other futures from the function provided here will not
/// perform as fast as those spawned as workers.
///
/// # Runtime flavors
///
/// The macro can be configured with a `flavor` parameter to select
/// different runtime configurations.
///
/// ## Multi-threaded
///
/// To use the multi-threaded runtime, the macro can be configured using
///
/// ```
/// #[tokio::main(flavor = "multi_thread", worker_threads = 10)]
/// # async fn main() {}
/// ```
///
/// The `worker_threads` option configures the number of worker threads, and
/// defaults to the number of cpus on the system. This is the default flavor.
///
/// Note: The multi-threaded runtime requires the `rt-multi-thread` feature
/// flag.
///
/// ## Current-thread
///
/// To use the single-threaded runtime known as the `current_thread` runtime,
/// the macro can be configured using
///
/// ```rust
/// #[tokio::main(flavor = "current_thread")]
/// # async fn main() {}
/// ```
///
/// ## Local
///
/// [Unstable API][unstable] only.
///
/// To use the [local runtime], the macro can be configured using
///
/// ```rust
/// # #[cfg(tokio_unstable)]
/// #[tokio::main(flavor = "local")]
/// # async fn main() {}
/// # #[cfg(not(tokio_unstable))]
/// # fn main() {}
/// ```
///
/// # Function arguments
///
/// Arguments are allowed for any functions, aside from `main` which is special.
///
/// # Usage
///
/// ## Using the multi-threaded runtime
///
/// ```rust
/// #[tokio::main]
/// async fn main() {
/// println!("Hello world");
/// }
/// ```
///
/// Equivalent code not using `#[tokio::main]`
///
/// ```rust
/// fn main() {
/// tokio::runtime::Builder::new_multi_thread()
/// .enable_all()
/// .build()
/// .unwrap()
/// .block_on(async {
/// println!("Hello world");
/// })
/// }
/// ```
///
/// ## Using the current-thread runtime
///
/// The basic scheduler is single-threaded.
///
/// ```rust
/// #[tokio::main(flavor = "current_thread")]
/// async fn main() {
/// println!("Hello world");
/// }
/// ```
///
/// Equivalent code not using `#[tokio::main]`
///
/// ```rust
/// fn main() {
/// tokio::runtime::Builder::new_current_thread()
/// .enable_all()
/// .build()
/// .unwrap()
/// .block_on(async {
/// println!("Hello world");
/// })
/// }
/// ```
///
/// ## Using the local runtime
///
/// Available in the [unstable API][unstable] only.
///
/// The [local runtime] is similar to the current-thread runtime but
/// supports [`task::spawn_local`](../tokio/task/fn.spawn_local.html).
///
/// ```rust
/// # #[cfg(tokio_unstable)]
/// #[tokio::main(flavor = "local")]
/// async fn main() {
/// println!("Hello world");
/// }
/// # #[cfg(not(tokio_unstable))]
/// # fn main() {}
/// ```
///
/// Equivalent code not using `#[tokio::main]`
///
/// ```rust
/// # #[cfg(tokio_unstable)]
/// fn main() {
/// tokio::runtime::Builder::new_current_thread()
/// .enable_all()
/// .build_local(tokio::runtime::LocalOptions::default())
/// .unwrap()
/// .block_on(async {
/// println!("Hello world");
/// })
/// }
/// # #[cfg(not(tokio_unstable))]
/// # fn main() {}
/// ```
///
///
/// ## Set number of worker threads
///
/// ```rust
/// #[tokio::main(worker_threads = 2)]
/// async fn main() {
/// println!("Hello world");
/// }
/// ```
///
/// Equivalent code not using `#[tokio::main]`
///
/// ```rust
/// fn main() {
/// tokio::runtime::Builder::new_multi_thread()
/// .worker_threads(2)
/// .enable_all()
/// .build()
/// .unwrap()
/// .block_on(async {
/// println!("Hello world");
/// })
/// }
/// ```
///
/// ## Configure the runtime to start with time paused
///
/// ```rust
/// #[tokio::main(flavor = "current_thread", start_paused = true)]
/// async fn main() {
/// println!("Hello world");
/// }
/// ```
///
/// Equivalent code not using `#[tokio::main]`
///
/// ```rust
/// fn main() {
/// tokio::runtime::Builder::new_current_thread()
/// .enable_all()
/// .start_paused(true)
/// .build()
/// .unwrap()
/// .block_on(async {
/// println!("Hello world");
/// })
/// }
/// ```
///
/// Note that `start_paused` requires the `test-util` feature to be enabled.
///
/// ## Rename package
///
/// ```rust
/// use tokio as tokio1;
///
/// #[tokio1::main(crate = "tokio1")]
/// async fn main() {
/// println!("Hello world");
/// }
/// ```
///
/// Equivalent code not using `#[tokio::main]`
///
/// ```rust
/// use tokio as tokio1;
///
/// fn main() {
/// tokio1::runtime::Builder::new_multi_thread()
/// .enable_all()
/// .build()
/// .unwrap()
/// .block_on(async {
/// println!("Hello world");
/// })
/// }
/// ```
///
/// ## Configure unhandled panic behavior
///
/// Available options are `shutdown_runtime` and `ignore`. For more details, see
/// [`Builder::unhandled_panic`].
///
/// This option is only compatible with the `current_thread` runtime.
///
/// ```no_run
/// #[cfg(tokio_unstable)]
/// #[tokio::main(flavor = "current_thread", unhandled_panic = "shutdown_runtime")]
/// async fn main() {
/// let _ = tokio::spawn(async {
/// panic!("This panic will shutdown the runtime.");
/// }).await;
/// }
/// # #[cfg(not(tokio_unstable))]
/// # fn main() { }
/// ```
///
/// Equivalent code not using `#[tokio::main]`
///
/// ```no_run
/// #[cfg(tokio_unstable)]
/// fn main() {
/// tokio::runtime::Builder::new_current_thread()
/// .enable_all()
/// .unhandled_panic(tokio::runtime::UnhandledPanic::ShutdownRuntime)
/// .build()
/// .unwrap()
/// .block_on(async {
/// let _ = tokio::spawn(async {
/// panic!("This panic will shutdown the runtime.");
/// }).await;
/// })
/// }
/// # #[cfg(not(tokio_unstable))]
/// # fn main() { }
/// ```
///
/// **Note**: This option depends on Tokio's [unstable API][unstable]. See [the
/// documentation on unstable features][unstable] for details on how to enable
/// Tokio's unstable features.
///
/// [`Builder::unhandled_panic`]: ../tokio/runtime/struct.Builder.html#method.unhandled_panic
/// [unstable]: ../tokio/index.html#unstable-features
/// [local runtime]: ../tokio/runtime/struct.LocalRuntime.html
#[proc_macro_attribute]
pub fn main(args: TokenStream, item: TokenStream) -> TokenStream {
entry::main(args.into(), item.into(), true).into()
}
/// Marks async function to be executed by selected runtime. This macro helps set up a `Runtime`
/// without requiring the user to use [Runtime](../tokio/runtime/struct.Runtime.html) or
/// [Builder](../tokio/runtime/struct.Builder.html) directly.
///
/// ## Function arguments:
///
/// Arguments are allowed for any functions aside from `main` which is special
///
/// ## Usage
///
/// ### Using default
///
/// ```rust
/// #[tokio::main(flavor = "current_thread")]
/// async fn main() {
/// println!("Hello world");
/// }
/// ```
///
/// Equivalent code not using `#[tokio::main]`
///
/// ```rust
/// fn main() {
/// tokio::runtime::Builder::new_current_thread()
/// .enable_all()
/// .build()
/// .unwrap()
/// .block_on(async {
/// println!("Hello world");
/// })
/// }
/// ```
///
/// ### Rename package
///
/// ```rust
/// use tokio as tokio1;
///
/// #[tokio1::main(crate = "tokio1")]
/// async fn main() {
/// println!("Hello world");
/// }
/// ```
///
/// Equivalent code not using `#[tokio::main]`
///
/// ```rust
/// use tokio as tokio1;
///
/// fn main() {
/// tokio1::runtime::Builder::new_multi_thread()
/// .enable_all()
/// .build()
/// .unwrap()
/// .block_on(async {
/// println!("Hello world");
/// })
/// }
/// ```
#[proc_macro_attribute]
pub fn main_rt(args: TokenStream, item: TokenStream) -> TokenStream {
entry::main(args.into(), item.into(), false).into()
}
/// Marks async function to be executed by runtime, suitable to test environment.
/// This macro helps set up a `Runtime` without requiring the user to use
/// [Runtime](../tokio/runtime/struct.Runtime.html) or
/// [Builder](../tokio/runtime/struct.Builder.html) directly.
///
/// Note: This macro is designed to be simplistic and targets applications that
/// do not require a complex setup. If the provided functionality is not
/// sufficient, you may be interested in using
/// [Builder](../tokio/runtime/struct.Builder.html), which provides a more
/// powerful interface.
///
/// # Multi-threaded runtime
///
/// To use the multi-threaded runtime, the macro can be configured using
///
/// ```no_run
/// #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
/// async fn my_test() {
/// assert!(true);
/// }
/// ```
///
/// The `worker_threads` option configures the number of worker threads, and
/// defaults to the number of cpus on the system.
///
/// Note: The multi-threaded runtime requires the `rt-multi-thread` feature
/// flag.
///
/// # Current thread runtime
///
/// The default test runtime is single-threaded. Each test gets a
/// separate current-thread runtime.
///
/// ```no_run
/// #[tokio::test]
/// async fn my_test() {
/// assert!(true);
/// }
/// ```
///
/// ## Usage
///
/// ### Using the multi-thread runtime
///
/// ```no_run
/// #[tokio::test(flavor = "multi_thread")]
/// async fn my_test() {
/// assert!(true);
/// }
/// ```
///
/// Equivalent code not using `#[tokio::test]`
///
/// ```no_run
/// #[test]
/// fn my_test() {
/// tokio::runtime::Builder::new_multi_thread()
/// .enable_all()
/// .build()
/// .unwrap()
/// .block_on(async {
/// assert!(true);
/// })
/// }
/// ```
///
/// ### Using current thread runtime
///
/// ```no_run
/// #[tokio::test]
/// async fn my_test() {
/// assert!(true);
/// }
/// ```
///
/// Equivalent code not using `#[tokio::test]`
///
/// ```no_run
/// #[test]
/// fn my_test() {
/// tokio::runtime::Builder::new_current_thread()
/// .enable_all()
/// .build()
/// .unwrap()
/// .block_on(async {
/// assert!(true);
/// })
/// }
/// ```
///
/// ### Set number of worker threads
///
/// ```no_run
/// #[tokio::test(flavor = "multi_thread", worker_threads = 2)]
/// async fn my_test() {
/// assert!(true);
/// }
/// ```
///
/// Equivalent code not using `#[tokio::test]`
///
/// ```no_run
/// #[test]
/// fn my_test() {
/// tokio::runtime::Builder::new_multi_thread()
/// .worker_threads(2)
/// .enable_all()
/// .build()
/// .unwrap()
/// .block_on(async {
/// assert!(true);
/// })
/// }
/// ```
///
/// ### Configure the runtime to start with time paused
///
/// ```no_run
/// #[tokio::test(start_paused = true)]
/// async fn my_test() {
/// assert!(true);
/// }
/// ```
///
/// Equivalent code not using `#[tokio::test]`
///
/// ```no_run
/// #[test]
/// fn my_test() {
/// tokio::runtime::Builder::new_current_thread()
/// .enable_all()
/// .start_paused(true)
/// .build()
/// .unwrap()
/// .block_on(async {
/// assert!(true);
/// })
/// }
/// ```
///
/// Note that `start_paused` requires the `test-util` feature to be enabled.
///
/// ### Rename package
///
/// ```rust
/// use tokio as tokio1;
///
/// #[tokio1::test(crate = "tokio1")]
/// async fn my_test() {
/// println!("Hello world");
/// }
/// ```
///
/// ### Configure unhandled panic behavior
///
/// Available options are `shutdown_runtime` and `ignore`. For more details, see
/// [`Builder::unhandled_panic`].
///
/// This option is only compatible with the `current_thread` runtime.
///
/// ```no_run
/// #[cfg(tokio_unstable)]
/// #[tokio::test(flavor = "current_thread", unhandled_panic = "shutdown_runtime")]
/// async fn my_test() {
/// let _ = tokio::spawn(async {
/// panic!("This panic will shutdown the runtime.");
/// }).await;
/// }
///
/// # fn main() { }
/// ```
///
/// Equivalent code not using `#[tokio::test]`
///
/// ```no_run
/// #[cfg(tokio_unstable)]
/// #[test]
/// fn my_test() {
/// tokio::runtime::Builder::new_current_thread()
/// .enable_all()
/// .unhandled_panic(UnhandledPanic::ShutdownRuntime)
/// .build()
/// .unwrap()
/// .block_on(async {
/// let _ = tokio::spawn(async {
/// panic!("This panic will shutdown the runtime.");
/// }).await;
/// })
/// }
///
/// # fn main() { }
/// ```
///
/// **Note**: This option depends on Tokio's [unstable API][unstable]. See [the
/// documentation on unstable features][unstable] for details on how to enable
/// Tokio's unstable features.
///
/// [`Builder::unhandled_panic`]: ../tokio/runtime/struct.Builder.html#method.unhandled_panic
/// [unstable]: ../tokio/index.html#unstable-features
#[proc_macro_attribute]
pub fn test(args: TokenStream, item: TokenStream) -> TokenStream {
entry::test(args.into(), item.into(), true).into()
}
/// Marks async function to be executed by runtime, suitable to test environment
///
/// ## Usage
///
/// ```no_run
/// #[tokio::test]
/// async fn my_test() {
/// assert!(true);
/// }
/// ```
#[proc_macro_attribute]
pub fn test_rt(args: TokenStream, item: TokenStream) -> TokenStream {
entry::test(args.into(), item.into(), false).into()
}
/// Always fails with the error message below.
/// ```text
/// The #[tokio::main] macro requires rt or rt-multi-thread.
/// ```
#[proc_macro_attribute]
pub fn main_fail(_args: TokenStream, _item: TokenStream) -> TokenStream {
syn::Error::new(
proc_macro2::Span::call_site(),
"The #[tokio::main] macro requires rt or rt-multi-thread.",
)
.to_compile_error()
.into()
}
/// Always fails with the error message below.
/// ```text
/// The #[tokio::test] macro requires rt or rt-multi-thread.
/// ```
#[proc_macro_attribute]
pub fn test_fail(_args: TokenStream, _item: TokenStream) -> TokenStream {
syn::Error::new(
proc_macro2::Span::call_site(),
"The #[tokio::test] macro requires rt or rt-multi-thread.",
)
.to_compile_error()
.into()
}
/// Implementation detail of the `select!` macro. This macro is **not** intended
/// to be used as part of the public API and is permitted to change.
#[proc_macro]
#[doc(hidden)]
pub fn select_priv_declare_output_enum(input: TokenStream) -> TokenStream {
select::declare_output_enum(input)
}
/// Implementation detail of the `select!` macro. This macro is **not** intended
/// to be used as part of the public API and is permitted to change.
#[proc_macro]
#[doc(hidden)]
pub fn select_priv_clean_pattern(input: TokenStream) -> TokenStream {
select::clean_pattern_macro(input)
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-macros/src/entry.rs | tokio-macros/src/entry.rs | use proc_macro2::{Span, TokenStream, TokenTree};
use quote::{quote, quote_spanned, ToTokens};
use syn::parse::{Parse, ParseStream, Parser};
use syn::{braced, Attribute, Ident, Path, Signature, Visibility};
// syn::AttributeArgs does not implement syn::Parse
type AttributeArgs = syn::punctuated::Punctuated<syn::Meta, syn::Token![,]>;
#[derive(Clone, Copy, PartialEq)]
enum RuntimeFlavor {
CurrentThread,
Threaded,
Local,
}
impl RuntimeFlavor {
fn from_str(s: &str) -> Result<RuntimeFlavor, String> {
match s {
"current_thread" => Ok(RuntimeFlavor::CurrentThread),
"multi_thread" => Ok(RuntimeFlavor::Threaded),
"local" => Ok(RuntimeFlavor::Local),
"single_thread" => Err("The single threaded runtime flavor is called `current_thread`.".to_string()),
"basic_scheduler" => Err("The `basic_scheduler` runtime flavor has been renamed to `current_thread`.".to_string()),
"threaded_scheduler" => Err("The `threaded_scheduler` runtime flavor has been renamed to `multi_thread`.".to_string()),
_ => Err(format!("No such runtime flavor `{s}`. The runtime flavors are `current_thread`, `local`, and `multi_thread`.")),
}
}
}
#[derive(Clone, Copy, PartialEq)]
enum UnhandledPanic {
Ignore,
ShutdownRuntime,
}
impl UnhandledPanic {
fn from_str(s: &str) -> Result<UnhandledPanic, String> {
match s {
"ignore" => Ok(UnhandledPanic::Ignore),
"shutdown_runtime" => Ok(UnhandledPanic::ShutdownRuntime),
_ => Err(format!("No such unhandled panic behavior `{s}`. The unhandled panic behaviors are `ignore` and `shutdown_runtime`.")),
}
}
fn into_tokens(self, crate_path: &TokenStream) -> TokenStream {
match self {
UnhandledPanic::Ignore => quote! { #crate_path::runtime::UnhandledPanic::Ignore },
UnhandledPanic::ShutdownRuntime => {
quote! { #crate_path::runtime::UnhandledPanic::ShutdownRuntime }
}
}
}
}
struct FinalConfig {
flavor: RuntimeFlavor,
worker_threads: Option<usize>,
start_paused: Option<bool>,
crate_name: Option<Path>,
unhandled_panic: Option<UnhandledPanic>,
}
/// Config used in case of the attribute not being able to build a valid config
const DEFAULT_ERROR_CONFIG: FinalConfig = FinalConfig {
flavor: RuntimeFlavor::CurrentThread,
worker_threads: None,
start_paused: None,
crate_name: None,
unhandled_panic: None,
};
struct Configuration {
rt_multi_thread_available: bool,
default_flavor: RuntimeFlavor,
flavor: Option<RuntimeFlavor>,
worker_threads: Option<(usize, Span)>,
start_paused: Option<(bool, Span)>,
is_test: bool,
crate_name: Option<Path>,
unhandled_panic: Option<(UnhandledPanic, Span)>,
}
impl Configuration {
fn new(is_test: bool, rt_multi_thread: bool) -> Self {
Configuration {
rt_multi_thread_available: rt_multi_thread,
default_flavor: match is_test {
true => RuntimeFlavor::CurrentThread,
false => RuntimeFlavor::Threaded,
},
flavor: None,
worker_threads: None,
start_paused: None,
is_test,
crate_name: None,
unhandled_panic: None,
}
}
fn set_flavor(&mut self, runtime: syn::Lit, span: Span) -> Result<(), syn::Error> {
if self.flavor.is_some() {
return Err(syn::Error::new(span, "`flavor` set multiple times."));
}
let runtime_str = parse_string(runtime, span, "flavor")?;
let runtime =
RuntimeFlavor::from_str(&runtime_str).map_err(|err| syn::Error::new(span, err))?;
self.flavor = Some(runtime);
Ok(())
}
fn set_worker_threads(
&mut self,
worker_threads: syn::Lit,
span: Span,
) -> Result<(), syn::Error> {
if self.worker_threads.is_some() {
return Err(syn::Error::new(
span,
"`worker_threads` set multiple times.",
));
}
let worker_threads = parse_int(worker_threads, span, "worker_threads")?;
if worker_threads == 0 {
return Err(syn::Error::new(span, "`worker_threads` may not be 0."));
}
self.worker_threads = Some((worker_threads, span));
Ok(())
}
fn set_start_paused(&mut self, start_paused: syn::Lit, span: Span) -> Result<(), syn::Error> {
if self.start_paused.is_some() {
return Err(syn::Error::new(span, "`start_paused` set multiple times."));
}
let start_paused = parse_bool(start_paused, span, "start_paused")?;
self.start_paused = Some((start_paused, span));
Ok(())
}
fn set_crate_name(&mut self, name: syn::Lit, span: Span) -> Result<(), syn::Error> {
if self.crate_name.is_some() {
return Err(syn::Error::new(span, "`crate` set multiple times."));
}
let name_path = parse_path(name, span, "crate")?;
self.crate_name = Some(name_path);
Ok(())
}
fn set_unhandled_panic(
&mut self,
unhandled_panic: syn::Lit,
span: Span,
) -> Result<(), syn::Error> {
if self.unhandled_panic.is_some() {
return Err(syn::Error::new(
span,
"`unhandled_panic` set multiple times.",
));
}
let unhandled_panic = parse_string(unhandled_panic, span, "unhandled_panic")?;
let unhandled_panic =
UnhandledPanic::from_str(&unhandled_panic).map_err(|err| syn::Error::new(span, err))?;
self.unhandled_panic = Some((unhandled_panic, span));
Ok(())
}
fn macro_name(&self) -> &'static str {
if self.is_test {
"tokio::test"
} else {
"tokio::main"
}
}
fn build(&self) -> Result<FinalConfig, syn::Error> {
use RuntimeFlavor as F;
let flavor = self.flavor.unwrap_or(self.default_flavor);
let worker_threads = match (flavor, self.worker_threads) {
(F::CurrentThread | F::Local, Some((_, worker_threads_span))) => {
let msg = format!(
"The `worker_threads` option requires the `multi_thread` runtime flavor. Use `#[{}(flavor = \"multi_thread\")]`",
self.macro_name(),
);
return Err(syn::Error::new(worker_threads_span, msg));
}
(F::CurrentThread | F::Local, None) => None,
(F::Threaded, worker_threads) if self.rt_multi_thread_available => {
worker_threads.map(|(val, _span)| val)
}
(F::Threaded, _) => {
let msg = if self.flavor.is_none() {
"The default runtime flavor is `multi_thread`, but the `rt-multi-thread` feature is disabled."
} else {
"The runtime flavor `multi_thread` requires the `rt-multi-thread` feature."
};
return Err(syn::Error::new(Span::call_site(), msg));
}
};
let start_paused = match (flavor, self.start_paused) {
(F::Threaded, Some((_, start_paused_span))) => {
let msg = format!(
"The `start_paused` option requires the `current_thread` runtime flavor. Use `#[{}(flavor = \"current_thread\")]`",
self.macro_name(),
);
return Err(syn::Error::new(start_paused_span, msg));
}
(F::CurrentThread | F::Local, Some((start_paused, _))) => Some(start_paused),
(_, None) => None,
};
let unhandled_panic = match (flavor, self.unhandled_panic) {
(F::Threaded, Some((_, unhandled_panic_span))) => {
let msg = format!(
"The `unhandled_panic` option requires the `current_thread` runtime flavor. Use `#[{}(flavor = \"current_thread\")]`",
self.macro_name(),
);
return Err(syn::Error::new(unhandled_panic_span, msg));
}
(F::CurrentThread | F::Local, Some((unhandled_panic, _))) => Some(unhandled_panic),
(_, None) => None,
};
Ok(FinalConfig {
crate_name: self.crate_name.clone(),
flavor,
worker_threads,
start_paused,
unhandled_panic,
})
}
}
fn parse_int(int: syn::Lit, span: Span, field: &str) -> Result<usize, syn::Error> {
match int {
syn::Lit::Int(lit) => match lit.base10_parse::<usize>() {
Ok(value) => Ok(value),
Err(e) => Err(syn::Error::new(
span,
format!("Failed to parse value of `{field}` as integer: {e}"),
)),
},
_ => Err(syn::Error::new(
span,
format!("Failed to parse value of `{field}` as integer."),
)),
}
}
fn parse_string(int: syn::Lit, span: Span, field: &str) -> Result<String, syn::Error> {
match int {
syn::Lit::Str(s) => Ok(s.value()),
syn::Lit::Verbatim(s) => Ok(s.to_string()),
_ => Err(syn::Error::new(
span,
format!("Failed to parse value of `{field}` as string."),
)),
}
}
fn parse_path(lit: syn::Lit, span: Span, field: &str) -> Result<Path, syn::Error> {
match lit {
syn::Lit::Str(s) => {
let err = syn::Error::new(
span,
format!(
"Failed to parse value of `{}` as path: \"{}\"",
field,
s.value()
),
);
s.parse::<syn::Path>().map_err(|_| err.clone())
}
_ => Err(syn::Error::new(
span,
format!("Failed to parse value of `{field}` as path."),
)),
}
}
fn parse_bool(bool: syn::Lit, span: Span, field: &str) -> Result<bool, syn::Error> {
match bool {
syn::Lit::Bool(b) => Ok(b.value),
_ => Err(syn::Error::new(
span,
format!("Failed to parse value of `{field}` as bool."),
)),
}
}
fn build_config(
input: &ItemFn,
args: AttributeArgs,
is_test: bool,
rt_multi_thread: bool,
) -> Result<FinalConfig, syn::Error> {
if input.sig.asyncness.is_none() {
let msg = "the `async` keyword is missing from the function declaration";
return Err(syn::Error::new_spanned(input.sig.fn_token, msg));
}
let mut config = Configuration::new(is_test, rt_multi_thread);
let macro_name = config.macro_name();
for arg in args {
match arg {
syn::Meta::NameValue(namevalue) => {
let ident = namevalue
.path
.get_ident()
.ok_or_else(|| {
syn::Error::new_spanned(&namevalue, "Must have specified ident")
})?
.to_string()
.to_lowercase();
let lit = match &namevalue.value {
syn::Expr::Lit(syn::ExprLit { lit, .. }) => lit,
expr => return Err(syn::Error::new_spanned(expr, "Must be a literal")),
};
match ident.as_str() {
"worker_threads" => {
config.set_worker_threads(lit.clone(), syn::spanned::Spanned::span(lit))?;
}
"flavor" => {
config.set_flavor(lit.clone(), syn::spanned::Spanned::span(lit))?;
}
"start_paused" => {
config.set_start_paused(lit.clone(), syn::spanned::Spanned::span(lit))?;
}
"core_threads" => {
let msg = "Attribute `core_threads` is renamed to `worker_threads`";
return Err(syn::Error::new_spanned(namevalue, msg));
}
"crate" => {
config.set_crate_name(lit.clone(), syn::spanned::Spanned::span(lit))?;
}
"unhandled_panic" => {
config
.set_unhandled_panic(lit.clone(), syn::spanned::Spanned::span(lit))?;
}
name => {
let msg = format!(
"Unknown attribute {name} is specified; expected one of: `flavor`, `worker_threads`, `start_paused`, `crate`, `unhandled_panic`",
);
return Err(syn::Error::new_spanned(namevalue, msg));
}
}
}
syn::Meta::Path(path) => {
let name = path
.get_ident()
.ok_or_else(|| syn::Error::new_spanned(&path, "Must have specified ident"))?
.to_string()
.to_lowercase();
let msg = match name.as_str() {
"threaded_scheduler" | "multi_thread" => {
format!(
"Set the runtime flavor with #[{macro_name}(flavor = \"multi_thread\")]."
)
}
"basic_scheduler" | "current_thread" | "single_threaded" => {
format!(
"Set the runtime flavor with #[{macro_name}(flavor = \"current_thread\")]."
)
}
"flavor" | "worker_threads" | "start_paused" | "crate" | "unhandled_panic" => {
format!("The `{name}` attribute requires an argument.")
}
name => {
format!("Unknown attribute {name} is specified; expected one of: `flavor`, `worker_threads`, `start_paused`, `crate`, `unhandled_panic`.")
}
};
return Err(syn::Error::new_spanned(path, msg));
}
other => {
return Err(syn::Error::new_spanned(
other,
"Unknown attribute inside the macro",
));
}
}
}
config.build()
}
fn parse_knobs(mut input: ItemFn, is_test: bool, config: FinalConfig) -> TokenStream {
input.sig.asyncness = None;
// If type mismatch occurs, the current rustc points to the last statement.
let (last_stmt_start_span, last_stmt_end_span) = {
let mut last_stmt = input.stmts.last().cloned().unwrap_or_default().into_iter();
// `Span` on stable Rust has a limitation that only points to the first
// token, not the whole tokens. We can work around this limitation by
// using the first/last span of the tokens like
// `syn::Error::new_spanned` does.
let start = last_stmt.next().map_or_else(Span::call_site, |t| t.span());
let end = last_stmt.last().map_or(start, |t| t.span());
(start, end)
};
let crate_path = config
.crate_name
.map(ToTokens::into_token_stream)
.unwrap_or_else(|| Ident::new("tokio", last_stmt_start_span).into_token_stream());
let mut rt = match config.flavor {
RuntimeFlavor::CurrentThread | RuntimeFlavor::Local => {
quote_spanned! {last_stmt_start_span=>
#crate_path::runtime::Builder::new_current_thread()
}
}
RuntimeFlavor::Threaded => quote_spanned! {last_stmt_start_span=>
#crate_path::runtime::Builder::new_multi_thread()
},
};
let mut checks = vec![];
let mut errors = vec![];
let build = if let RuntimeFlavor::Local = config.flavor {
checks.push(quote! { tokio_unstable });
errors.push("The local runtime flavor is only available when `tokio_unstable` is set.");
quote_spanned! {last_stmt_start_span=> build_local(Default::default())}
} else {
quote_spanned! {last_stmt_start_span=> build()}
};
if let Some(v) = config.worker_threads {
rt = quote_spanned! {last_stmt_start_span=> #rt.worker_threads(#v) };
}
if let Some(v) = config.start_paused {
rt = quote_spanned! {last_stmt_start_span=> #rt.start_paused(#v) };
}
if let Some(v) = config.unhandled_panic {
let unhandled_panic = v.into_tokens(&crate_path);
rt = quote_spanned! {last_stmt_start_span=> #rt.unhandled_panic(#unhandled_panic) };
}
let generated_attrs = if is_test {
quote! {
#[::core::prelude::v1::test]
}
} else {
quote! {}
};
let do_checks: TokenStream = checks
.iter()
.zip(&errors)
.map(|(check, error)| {
quote! {
#[cfg(not(#check))]
compile_error!(#error);
}
})
.collect();
let body_ident = quote! { body };
// This explicit `return` is intentional. See tokio-rs/tokio#4636
let last_block = quote_spanned! {last_stmt_end_span=>
#do_checks
#[cfg(all(#(#checks),*))]
#[allow(clippy::expect_used, clippy::diverging_sub_expression, clippy::needless_return, clippy::unwrap_in_result)]
{
return #rt
.enable_all()
.#build
.expect("Failed building the Runtime")
.block_on(#body_ident);
}
#[cfg(not(all(#(#checks),*)))]
{
panic!("fell through checks")
}
};
let body = input.body();
// For test functions pin the body to the stack and use `Pin<&mut dyn
// Future>` to reduce the amount of `Runtime::block_on` (and related
// functions) copies we generate during compilation due to the generic
// parameter `F` (the future to block on). This could have an impact on
// performance, but because it's only for testing it's unlikely to be very
// large.
//
// We don't do this for the main function as it should only be used once so
// there will be no benefit.
let body = if is_test {
let output_type = match &input.sig.output {
// For functions with no return value syn doesn't print anything,
// but that doesn't work as `Output` for our boxed `Future`, so
// default to `()` (the same type as the function output).
syn::ReturnType::Default => quote! { () },
syn::ReturnType::Type(_, ret_type) => quote! { #ret_type },
};
quote! {
let body = async #body;
#crate_path::pin!(body);
let body: ::core::pin::Pin<&mut dyn ::core::future::Future<Output = #output_type>> = body;
}
} else {
quote! {
let body = async #body;
}
};
input.into_tokens(generated_attrs, body, last_block)
}
fn token_stream_with_error(mut tokens: TokenStream, error: syn::Error) -> TokenStream {
tokens.extend(error.into_compile_error());
tokens
}
pub(crate) fn main(args: TokenStream, item: TokenStream, rt_multi_thread: bool) -> TokenStream {
// If any of the steps for this macro fail, we still want to expand to an item that is as close
// to the expected output as possible. This helps out IDEs such that completions and other
// related features keep working.
let input: ItemFn = match syn::parse2(item.clone()) {
Ok(it) => it,
Err(e) => return token_stream_with_error(item, e),
};
let config = if input.sig.ident == "main" && !input.sig.inputs.is_empty() {
let msg = "the main function cannot accept arguments";
Err(syn::Error::new_spanned(&input.sig.ident, msg))
} else {
AttributeArgs::parse_terminated
.parse2(args)
.and_then(|args| build_config(&input, args, false, rt_multi_thread))
};
match config {
Ok(config) => parse_knobs(input, false, config),
Err(e) => token_stream_with_error(parse_knobs(input, false, DEFAULT_ERROR_CONFIG), e),
}
}
// Check whether given attribute is a test attribute of forms:
// * `#[test]`
// * `#[core::prelude::*::test]` or `#[::core::prelude::*::test]`
// * `#[std::prelude::*::test]` or `#[::std::prelude::*::test]`
fn is_test_attribute(attr: &Attribute) -> bool {
let path = match &attr.meta {
syn::Meta::Path(path) => path,
_ => return false,
};
let candidates = [
["core", "prelude", "*", "test"],
["std", "prelude", "*", "test"],
];
if path.leading_colon.is_none()
&& path.segments.len() == 1
&& path.segments[0].arguments.is_none()
&& path.segments[0].ident == "test"
{
return true;
} else if path.segments.len() != candidates[0].len() {
return false;
}
candidates.into_iter().any(|segments| {
path.segments.iter().zip(segments).all(|(segment, path)| {
segment.arguments.is_none() && (path == "*" || segment.ident == path)
})
})
}
pub(crate) fn test(args: TokenStream, item: TokenStream, rt_multi_thread: bool) -> TokenStream {
// If any of the steps for this macro fail, we still want to expand to an item that is as close
// to the expected output as possible. This helps out IDEs such that completions and other
// related features keep working.
let input: ItemFn = match syn::parse2(item.clone()) {
Ok(it) => it,
Err(e) => return token_stream_with_error(item, e),
};
let config = if let Some(attr) = input.attrs().find(|attr| is_test_attribute(attr)) {
let msg = "second test attribute is supplied, consider removing or changing the order of your test attributes";
Err(syn::Error::new_spanned(attr, msg))
} else {
AttributeArgs::parse_terminated
.parse2(args)
.and_then(|args| build_config(&input, args, true, rt_multi_thread))
};
match config {
Ok(config) => parse_knobs(input, true, config),
Err(e) => token_stream_with_error(parse_knobs(input, true, DEFAULT_ERROR_CONFIG), e),
}
}
struct ItemFn {
outer_attrs: Vec<Attribute>,
vis: Visibility,
sig: Signature,
brace_token: syn::token::Brace,
inner_attrs: Vec<Attribute>,
stmts: Vec<proc_macro2::TokenStream>,
}
impl ItemFn {
/// Access all attributes of the function item.
fn attrs(&self) -> impl Iterator<Item = &Attribute> {
self.outer_attrs.iter().chain(self.inner_attrs.iter())
}
/// Get the body of the function item in a manner so that it can be
/// conveniently used with the `quote!` macro.
fn body(&self) -> Body<'_> {
Body {
brace_token: self.brace_token,
stmts: &self.stmts,
}
}
/// Convert our local function item into a token stream.
fn into_tokens(
self,
generated_attrs: proc_macro2::TokenStream,
body: proc_macro2::TokenStream,
last_block: proc_macro2::TokenStream,
) -> TokenStream {
let mut tokens = proc_macro2::TokenStream::new();
// Outer attributes are simply streamed as-is.
for attr in self.outer_attrs {
attr.to_tokens(&mut tokens);
}
// Inner attributes require extra care, since they're not supported on
// blocks (which is what we're expanded into) we instead lift them
// outside of the function. This matches the behavior of `syn`.
for mut attr in self.inner_attrs {
attr.style = syn::AttrStyle::Outer;
attr.to_tokens(&mut tokens);
}
// Add generated macros at the end, so macros processed later are aware of them.
generated_attrs.to_tokens(&mut tokens);
self.vis.to_tokens(&mut tokens);
self.sig.to_tokens(&mut tokens);
self.brace_token.surround(&mut tokens, |tokens| {
body.to_tokens(tokens);
last_block.to_tokens(tokens);
});
tokens
}
}
impl Parse for ItemFn {
#[inline]
fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
// This parse implementation has been largely lifted from `syn`, with
// the exception of:
// * We don't have access to the plumbing necessary to parse inner
// attributes in-place.
// * We do our own statements parsing to avoid recursively parsing
// entire statements and only look for the parts we're interested in.
let outer_attrs = input.call(Attribute::parse_outer)?;
let vis: Visibility = input.parse()?;
let sig: Signature = input.parse()?;
let content;
let brace_token = braced!(content in input);
let inner_attrs = Attribute::parse_inner(&content)?;
let mut buf = proc_macro2::TokenStream::new();
let mut stmts = Vec::new();
while !content.is_empty() {
if let Some(semi) = content.parse::<Option<syn::Token![;]>>()? {
semi.to_tokens(&mut buf);
stmts.push(buf);
buf = proc_macro2::TokenStream::new();
continue;
}
// Parse a single token tree and extend our current buffer with it.
// This avoids parsing the entire content of the sub-tree.
buf.extend([content.parse::<TokenTree>()?]);
}
if !buf.is_empty() {
stmts.push(buf);
}
Ok(Self {
outer_attrs,
vis,
sig,
brace_token,
inner_attrs,
stmts,
})
}
}
struct Body<'a> {
brace_token: syn::token::Brace,
// Statements, with terminating `;`.
stmts: &'a [TokenStream],
}
impl ToTokens for Body<'_> {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
self.brace_token.surround(tokens, |tokens| {
for stmt in self.stmts {
stmt.to_tokens(tokens);
}
});
}
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tests-integration/src/lib.rs | tests-integration/src/lib.rs | #[cfg(feature = "full")]
doc_comment::doc_comment!(include_str!("../../README.md"));
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tests-integration/src/bin/test-process-signal.rs | tests-integration/src/bin/test-process-signal.rs | // https://github.com/tokio-rs/tokio/issues/3550
fn main() {
for _ in 0..1000 {
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap();
drop(rt);
}
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tests-integration/src/bin/test-mem.rs | tests-integration/src/bin/test-mem.rs | use std::future::poll_fn;
fn main() {
let rt = tokio::runtime::Builder::new_multi_thread()
.worker_threads(1)
.enable_io()
.build()
.unwrap();
rt.block_on(async {
let listener = tokio::net::TcpListener::bind("0.0.0.0:0").await.unwrap();
tokio::spawn(async move {
loop {
poll_fn(|cx| listener.poll_accept(cx)).await.unwrap();
}
});
});
std::thread::sleep(std::time::Duration::from_millis(50));
drop(rt);
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tests-integration/src/bin/test-cat.rs | tests-integration/src/bin/test-cat.rs | //! A cat-like utility that can be used as a subprocess to test I/O
//! stream communication.
use std::io;
use std::io::Write;
fn main() {
let stdin = io::stdin();
let mut stdout = io::stdout();
let mut line = String::new();
loop {
line.clear();
stdin.read_line(&mut line).unwrap();
if line.is_empty() {
break;
}
stdout.write_all(line.as_bytes()).unwrap();
}
stdout.flush().unwrap();
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tests-integration/tests/macros_main.rs | tests-integration/tests/macros_main.rs | #![cfg(all(feature = "macros", feature = "rt-multi-thread"))]
#[tokio::main]
async fn basic_main() -> usize {
1
}
#[tokio::main]
async fn generic_fun<T: Default>() -> T {
T::default()
}
#[tokio::main]
async fn spawning() -> usize {
let join = tokio::spawn(async { 1 });
join.await.unwrap()
}
#[cfg(tokio_unstable)]
#[tokio::main(flavor = "local")]
async fn local_main() -> usize {
let join = tokio::task::spawn_local(async { 1 });
join.await.unwrap()
}
#[test]
fn main_with_spawn() {
assert_eq!(1, spawning());
}
#[test]
fn shell() {
assert_eq!(1, basic_main());
assert_eq!(bool::default(), generic_fun::<bool>());
#[cfg(tokio_unstable)]
assert_eq!(1, local_main());
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tests-integration/tests/macros_select.rs | tests-integration/tests/macros_select.rs | #![cfg(feature = "macros")]
use futures::channel::oneshot;
use futures::executor::block_on;
use std::thread;
#[cfg_attr(
not(feature = "rt-multi-thread"),
ignore = "WASI: std::thread::spawn not supported"
)]
#[test]
fn join_with_select() {
block_on(async {
let (tx1, mut rx1) = oneshot::channel::<i32>();
let (tx2, mut rx2) = oneshot::channel::<i32>();
thread::spawn(move || {
tx1.send(123).unwrap();
tx2.send(456).unwrap();
});
let mut a = None;
let mut b = None;
while a.is_none() || b.is_none() {
tokio::select! {
v1 = (&mut rx1), if a.is_none() => a = Some(v1.unwrap()),
v2 = (&mut rx2), if b.is_none() => b = Some(v2.unwrap()),
}
}
let (a, b) = (a.unwrap(), b.unwrap());
assert_eq!(a, 123);
assert_eq!(b, 456);
});
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tests-integration/tests/macros_pin.rs | tests-integration/tests/macros_pin.rs | use futures::executor::block_on;
async fn my_async_fn() {}
#[test]
fn pin() {
block_on(async {
let future = my_async_fn();
tokio::pin!(future);
(&mut future).await
});
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tests-integration/tests/rt_yield.rs | tests-integration/tests/rt_yield.rs | use tokio::sync::oneshot;
use tokio::task;
async fn spawn_send() {
let (tx, rx) = oneshot::channel();
let task = tokio::spawn(async {
for _ in 0..10 {
task::yield_now().await;
}
tx.send("done").unwrap();
});
assert_eq!("done", rx.await.unwrap());
task.await.unwrap();
}
#[tokio::main(flavor = "current_thread")]
async fn entry_point() {
spawn_send().await;
}
#[tokio::test]
async fn test_macro() {
spawn_send().await;
}
#[test]
fn main_macro() {
entry_point();
}
#[test]
fn manual_rt() {
let rt = tokio::runtime::Builder::new_current_thread()
.build()
.unwrap();
rt.block_on(async { spawn_send().await });
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tests-integration/tests/process_stdio.rs | tests-integration/tests/process_stdio.rs | #![warn(rust_2018_idioms)]
#![cfg(all(feature = "full", not(target_os = "wasi"), not(miri)))]
use tokio::io::{AsyncBufReadExt, AsyncReadExt, AsyncWriteExt, BufReader};
use tokio::join;
use tokio::process::{Child, Command};
use tokio_test::assert_ok;
use futures::future::{self, FutureExt};
use std::env;
use std::io;
use std::process::{ExitStatus, Stdio};
use std::task::ready;
fn cat() -> Command {
let mut cmd = Command::new(env!("CARGO_BIN_EXE_test-cat"));
cmd.stdin(Stdio::piped()).stdout(Stdio::piped());
cmd
}
async fn feed_cat(mut cat: Child, n: usize) -> io::Result<ExitStatus> {
let mut stdin = cat.stdin.take().unwrap();
let stdout = cat.stdout.take().unwrap();
// Produce n lines on the child's stdout.
let write = async {
for i in 0..n {
let bytes = format!("line {i}\n").into_bytes();
stdin.write_all(&bytes).await.unwrap();
}
drop(stdin);
};
let read = async {
let mut reader = BufReader::new(stdout).lines();
let mut num_lines = 0;
// Try to read `n + 1` lines, ensuring the last one is empty
// (i.e. EOF is reached after `n` lines.
loop {
let data = reader
.next_line()
.await
.unwrap_or_else(|_| Some(String::new()))
.expect("failed to read line");
let num_read = data.len();
let done = num_lines >= n;
match (done, num_read) {
(false, 0) => panic!("broken pipe"),
(true, n) if n != 0 => panic!("extraneous data"),
_ => {
let expected = format!("line {num_lines}");
assert_eq!(expected, data);
}
};
num_lines += 1;
if num_lines >= n {
break;
}
}
};
// Compose reading and writing concurrently.
future::join3(write, read, cat.wait())
.map(|(_, _, status)| status)
.await
}
/// Check for the following properties when feeding stdin and
/// consuming stdout of a cat-like process:
///
/// - A number of lines that amounts to a number of bytes exceeding a
/// typical OS buffer size can be fed to the child without
/// deadlock. This tests that we also consume the stdout
/// concurrently; otherwise this would deadlock.
///
/// - We read the same lines from the child that we fed it.
///
/// - The child does produce EOF on stdout after the last line.
#[tokio::test]
async fn feed_a_lot() {
let child = cat().spawn().unwrap();
let status = feed_cat(child, 10000).await.unwrap();
assert_eq!(status.code(), Some(0));
}
#[tokio::test]
async fn wait_with_output_captures() {
let mut child = cat().spawn().unwrap();
let mut stdin = child.stdin.take().unwrap();
let write_bytes = b"1234";
let future = async {
stdin.write_all(write_bytes).await?;
drop(stdin);
let out = child.wait_with_output();
out.await
};
let output = future.await.unwrap();
assert!(output.status.success());
assert_eq!(output.stdout, write_bytes);
assert_eq!(output.stderr.len(), 0);
}
#[tokio::test]
async fn status_closes_any_pipes() {
// Cat will open a pipe between the parent and child.
// If `status_async` doesn't ensure the handles are closed,
// we would end up blocking forever (and time out).
let child = cat().status();
assert_ok!(child.await);
}
#[tokio::test]
async fn try_wait() {
let mut child = cat().spawn().unwrap();
let id = child.id().expect("missing id");
assert!(id > 0);
assert_eq!(None, assert_ok!(child.try_wait()));
// Drop the child's stdio handles so it can terminate
drop(child.stdin.take());
drop(child.stderr.take());
drop(child.stdout.take());
assert_ok!(child.wait().await);
// test that the `.try_wait()` method is fused just like the stdlib
assert!(assert_ok!(child.try_wait()).unwrap().success());
// Can't get id after process has exited
assert_eq!(child.id(), None);
}
#[tokio::test]
async fn pipe_from_one_command_to_another() {
let mut first = cat().spawn().expect("first cmd");
let mut third = cat().spawn().expect("third cmd");
// Convert ChildStdout to Stdio
let second_stdin: Stdio = first
.stdout
.take()
.expect("first.stdout")
.try_into()
.expect("first.stdout into Stdio");
// Convert ChildStdin to Stdio
let second_stdout: Stdio = third
.stdin
.take()
.expect("third.stdin")
.try_into()
.expect("third.stdin into Stdio");
let mut second = cat()
.stdin(second_stdin)
.stdout(second_stdout)
.spawn()
.expect("first cmd");
let msg = "hello world! please pipe this message through";
let mut stdin = first.stdin.take().expect("first.stdin");
let write = async move { stdin.write_all(msg.as_bytes()).await };
let mut stdout = third.stdout.take().expect("third.stdout");
let read = async move {
let mut data = String::new();
stdout.read_to_string(&mut data).await.map(|_| data)
};
let (read, write, first_status, second_status, third_status) =
join!(read, write, first.wait(), second.wait(), third.wait());
assert_eq!(msg, read.expect("read result"));
write.expect("write result");
assert!(first_status.expect("first status").success());
assert!(second_status.expect("second status").success());
assert!(third_status.expect("third status").success());
}
#[tokio::test]
async fn vectored_writes() {
use bytes::{Buf, Bytes};
use std::{io::IoSlice, pin::Pin};
use tokio::io::AsyncWrite;
let mut cat = cat().spawn().unwrap();
let mut stdin = cat.stdin.take().unwrap();
let are_writes_vectored = stdin.is_write_vectored();
let mut stdout = cat.stdout.take().unwrap();
let write = async {
let mut input = Bytes::from_static(b"hello\n").chain(Bytes::from_static(b"world!\n"));
let mut writes_completed = 0;
std::future::poll_fn(|cx| loop {
let mut slices = [IoSlice::new(&[]); 2];
let vectored = input.chunks_vectored(&mut slices);
if vectored == 0 {
return std::task::Poll::Ready(std::io::Result::Ok(()));
}
let n = ready!(Pin::new(&mut stdin).poll_write_vectored(cx, &slices))?;
writes_completed += 1;
input.advance(n);
})
.await?;
drop(stdin);
std::io::Result::Ok(writes_completed)
};
let read = async {
let mut buffer = Vec::with_capacity(6 + 7);
stdout.read_to_end(&mut buffer).await?;
std::io::Result::Ok(buffer)
};
let (write, read, status) = future::join3(write, read, cat.wait()).await;
assert!(status.unwrap().success());
let writes_completed = write.unwrap();
// on unix our small payload should always fit in whatever default sized pipe with a single
// syscall. if multiple are used, then the forwarding does not work, or we are on a platform
// for which the `std` does not support vectored writes.
assert_eq!(writes_completed == 1, are_writes_vectored);
assert_eq!(&read.unwrap(), b"hello\nworld!\n");
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-stream/src/pending.rs | tokio-stream/src/pending.rs | use crate::Stream;
use core::marker::PhantomData;
use core::pin::Pin;
use core::task::{Context, Poll};
/// Stream for the [`pending`](fn@pending) function.
#[derive(Debug)]
#[must_use = "streams do nothing unless polled"]
pub struct Pending<T>(PhantomData<T>);
impl<T> Unpin for Pending<T> {}
unsafe impl<T> Send for Pending<T> {}
unsafe impl<T> Sync for Pending<T> {}
/// Creates a stream that is never ready
///
/// The returned stream is never ready. Attempting to call
/// [`next()`](crate::StreamExt::next) will never complete. Use
/// [`stream::empty()`](super::empty()) to obtain a stream that is
/// immediately empty but returns no values.
///
/// # Examples
///
/// Basic usage:
///
/// ```no_run
/// use tokio_stream::{self as stream, StreamExt};
///
/// #[tokio::main]
/// async fn main() {
/// let mut never = stream::pending::<i32>();
///
/// // This will never complete
/// never.next().await;
///
/// unreachable!();
/// }
/// ```
pub const fn pending<T>() -> Pending<T> {
Pending(PhantomData)
}
impl<T> Stream for Pending<T> {
type Item = T;
fn poll_next(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Option<T>> {
Poll::Pending
}
fn size_hint(&self) -> (usize, Option<usize>) {
(0, None)
}
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-stream/src/wrappers.rs | tokio-stream/src/wrappers.rs | //! Wrappers for Tokio types that implement `Stream`.
/// Error types for the wrappers.
pub mod errors {
cfg_sync! {
pub use crate::wrappers::broadcast::BroadcastStreamRecvError;
}
}
mod mpsc_bounded;
pub use mpsc_bounded::ReceiverStream;
mod mpsc_unbounded;
pub use mpsc_unbounded::UnboundedReceiverStream;
cfg_sync! {
mod broadcast;
pub use broadcast::BroadcastStream;
mod watch;
pub use watch::WatchStream;
}
cfg_signal! {
#[cfg(all(unix, not(loom)))]
mod signal_unix;
#[cfg(all(unix, not(loom)))]
pub use signal_unix::SignalStream;
#[cfg(any(windows, docsrs))]
mod signal_windows;
#[cfg(any(windows, docsrs))]
pub use signal_windows::{CtrlCStream, CtrlBreakStream};
}
cfg_time! {
mod interval;
pub use interval::IntervalStream;
}
cfg_net! {
#[cfg(not(loom))]
mod tcp_listener;
#[cfg(not(loom))]
pub use tcp_listener::TcpListenerStream;
#[cfg(all(unix, not(loom)))]
mod unix_listener;
#[cfg(all(unix, not(loom)))]
pub use unix_listener::UnixListenerStream;
}
cfg_io_util! {
mod split;
pub use split::SplitStream;
mod lines;
pub use lines::LinesStream;
}
cfg_fs! {
#[cfg(not(loom))]
mod read_dir;
#[cfg(not(loom))]
pub use read_dir::ReadDirStream;
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-stream/src/lib.rs | tokio-stream/src/lib.rs | #![allow(
clippy::cognitive_complexity,
clippy::large_enum_variant,
clippy::needless_doctest_main
)]
#![warn(
missing_debug_implementations,
missing_docs,
rust_2018_idioms,
unreachable_pub
)]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![doc(test(
no_crate_inject,
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))
))]
//! Stream utilities for Tokio.
//!
//! A `Stream` is an asynchronous sequence of values. It can be thought of as
//! an asynchronous version of the standard library's `Iterator` trait.
//!
//! This crate provides helpers to work with them. For examples of usage and a more in-depth
//! description of streams you can also refer to the [streams
//! tutorial](https://tokio.rs/tokio/tutorial/streams) on the tokio website.
//!
//! # Iterating over a Stream
//!
//! Due to similarities with the standard library's `Iterator` trait, some new
//! users may assume that they can use `for in` syntax to iterate over a
//! `Stream`, but this is unfortunately not possible. Instead, you can use a
//! `while let` loop as follows:
//!
//! ```rust
//! use tokio_stream::{self as stream, StreamExt};
//!
//! # #[tokio::main(flavor = "current_thread")]
//! # async fn main() {
//! let mut stream = stream::iter(vec![0, 1, 2]);
//!
//! while let Some(value) = stream.next().await {
//! println!("Got {}", value);
//! }
//! # }
//! ```
//!
//! # Returning a Stream from a function
//!
//! A common way to stream values from a function is to pass in the sender
//! half of a channel and use the receiver as the stream. This requires awaiting
//! both futures to ensure progress is made. Another alternative is the
//! [async-stream] crate, which contains macros that provide a `yield` keyword
//! and allow you to return an `impl Stream`.
//!
//! [async-stream]: https://docs.rs/async-stream
//!
//! # Conversion to and from `AsyncRead`/`AsyncWrite`
//!
//! It is often desirable to convert a `Stream` into an [`AsyncRead`],
//! especially when dealing with plaintext formats streamed over the network.
//! The opposite conversion from an [`AsyncRead`] into a `Stream` is also
//! another commonly required feature. To enable these conversions,
//! [`tokio-util`] provides the [`StreamReader`] and [`ReaderStream`]
//! types when the io feature is enabled.
//!
//! [`tokio-util`]: https://docs.rs/tokio-util/latest/tokio_util/codec/index.html
//! [`tokio::io`]: https://docs.rs/tokio/latest/tokio/io/index.html
//! [`AsyncRead`]: https://docs.rs/tokio/latest/tokio/io/trait.AsyncRead.html
//! [`AsyncWrite`]: https://docs.rs/tokio/latest/tokio/io/trait.AsyncWrite.html
//! [`ReaderStream`]: https://docs.rs/tokio-util/latest/tokio_util/io/struct.ReaderStream.html
//! [`StreamReader`]: https://docs.rs/tokio-util/latest/tokio_util/io/struct.StreamReader.html
#[macro_use]
mod macros;
pub mod wrappers;
mod stream_ext;
pub use stream_ext::{collect::FromStream, StreamExt};
/// Adapters for [`Stream`]s created by methods in [`StreamExt`].
pub mod adapters {
pub use crate::stream_ext::{
Chain, Filter, FilterMap, Fuse, Map, MapWhile, Merge, Peekable, Skip, SkipWhile, Take,
TakeWhile, Then,
};
cfg_time! {
pub use crate::stream_ext::{ChunksTimeout, Timeout, TimeoutRepeating};
}
}
cfg_time! {
#[deprecated = "Import those symbols from adapters instead"]
#[doc(hidden)]
pub use stream_ext::timeout::Timeout;
pub use stream_ext::timeout::Elapsed;
}
mod empty;
pub use empty::{empty, Empty};
mod iter;
pub use iter::{iter, Iter};
mod once;
pub use once::{once, Once};
mod pending;
pub use pending::{pending, Pending};
mod stream_map;
pub use stream_map::StreamMap;
mod stream_close;
pub use stream_close::StreamNotifyClose;
#[doc(no_inline)]
pub use futures_core::Stream;
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-stream/src/stream_close.rs | tokio-stream/src/stream_close.rs | use crate::Stream;
use pin_project_lite::pin_project;
use std::pin::Pin;
use std::task::{Context, Poll};
pin_project! {
/// A `Stream` that wraps the values in an `Option`.
///
/// Whenever the wrapped stream yields an item, this stream yields that item
/// wrapped in `Some`. When the inner stream ends, then this stream first
/// yields a `None` item, and then this stream will also end.
///
/// # Example
///
/// Using `StreamNotifyClose` to handle closed streams with `StreamMap`.
///
/// ```
/// use tokio_stream::{StreamExt, StreamMap, StreamNotifyClose};
///
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// let mut map = StreamMap::new();
/// let stream = StreamNotifyClose::new(tokio_stream::iter(vec![0, 1]));
/// let stream2 = StreamNotifyClose::new(tokio_stream::iter(vec![0, 1]));
/// map.insert(0, stream);
/// map.insert(1, stream2);
/// while let Some((key, val)) = map.next().await {
/// match val {
/// Some(val) => println!("got {val:?} from stream {key:?}"),
/// None => println!("stream {key:?} closed"),
/// }
/// }
/// # }
/// ```
#[must_use = "streams do nothing unless polled"]
pub struct StreamNotifyClose<S> {
#[pin]
inner: Option<S>,
}
}
impl<S> StreamNotifyClose<S> {
/// Create a new `StreamNotifyClose`.
pub fn new(stream: S) -> Self {
Self {
inner: Some(stream),
}
}
/// Get back the inner `Stream`.
///
/// Returns `None` if the stream has reached its end.
pub fn into_inner(self) -> Option<S> {
self.inner
}
}
impl<S> Stream for StreamNotifyClose<S>
where
S: Stream,
{
type Item = Option<S::Item>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
// We can't invoke poll_next after it ended, so we unset the inner stream as a marker.
match self
.as_mut()
.project()
.inner
.as_pin_mut()
.map(|stream| S::poll_next(stream, cx))
{
Some(Poll::Ready(Some(item))) => Poll::Ready(Some(Some(item))),
Some(Poll::Ready(None)) => {
self.project().inner.set(None);
Poll::Ready(Some(None))
}
Some(Poll::Pending) => Poll::Pending,
None => Poll::Ready(None),
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if let Some(inner) = &self.inner {
// We always return +1 because when there's a stream there's at least one more item.
let (l, u) = inner.size_hint();
(l.saturating_add(1), u.and_then(|u| u.checked_add(1)))
} else {
(0, Some(0))
}
}
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-stream/src/empty.rs | tokio-stream/src/empty.rs | use crate::Stream;
use core::marker::PhantomData;
use core::pin::Pin;
use core::task::{Context, Poll};
/// Stream for the [`empty`](fn@empty) function.
#[derive(Debug)]
#[must_use = "streams do nothing unless polled"]
pub struct Empty<T>(PhantomData<T>);
impl<T> Unpin for Empty<T> {}
unsafe impl<T> Send for Empty<T> {}
unsafe impl<T> Sync for Empty<T> {}
/// Creates a stream that yields nothing.
///
/// The returned stream is immediately ready and returns `None`. Use
/// [`stream::pending()`](super::pending()) to obtain a stream that is never
/// ready.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use tokio_stream::{self as stream, StreamExt};
///
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// let mut none = stream::empty::<i32>();
///
/// assert_eq!(None, none.next().await);
/// # }
/// ```
pub const fn empty<T>() -> Empty<T> {
Empty(PhantomData)
}
impl<T> Stream for Empty<T> {
type Item = T;
fn poll_next(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Option<T>> {
Poll::Ready(None)
}
fn size_hint(&self) -> (usize, Option<usize>) {
(0, Some(0))
}
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-stream/src/stream_ext.rs | tokio-stream/src/stream_ext.rs | use core::future::Future;
use futures_core::Stream;
mod all;
use all::AllFuture;
mod any;
use any::AnyFuture;
mod chain;
pub use chain::Chain;
pub(crate) mod collect;
use collect::{Collect, FromStream};
mod filter;
pub use filter::Filter;
mod filter_map;
pub use filter_map::FilterMap;
mod fold;
use fold::FoldFuture;
mod fuse;
pub use fuse::Fuse;
mod map;
pub use map::Map;
mod map_while;
pub use map_while::MapWhile;
mod merge;
pub use merge::Merge;
mod next;
use next::Next;
mod skip;
pub use skip::Skip;
mod skip_while;
pub use skip_while::SkipWhile;
mod take;
pub use take::Take;
mod take_while;
pub use take_while::TakeWhile;
mod then;
pub use then::Then;
mod try_next;
use try_next::TryNext;
mod peekable;
pub use peekable::Peekable;
cfg_time! {
pub(crate) mod timeout;
pub(crate) mod timeout_repeating;
pub use timeout::Timeout;
pub use timeout_repeating::TimeoutRepeating;
use tokio::time::{Duration, Interval};
mod throttle;
use throttle::{throttle, Throttle};
mod chunks_timeout;
pub use chunks_timeout::ChunksTimeout;
}
/// An extension trait for the [`Stream`] trait that provides a variety of
/// convenient combinator functions.
///
/// Be aware that the `Stream` trait in Tokio is a re-export of the trait found
/// in the [futures] crate, however both Tokio and futures provide separate
/// `StreamExt` utility traits, and some utilities are only available on one of
/// these traits. Click [here][futures-StreamExt] to see the other `StreamExt`
/// trait in the futures crate.
///
/// If you need utilities from both `StreamExt` traits, you should prefer to
/// import one of them, and use the other through the fully qualified call
/// syntax. For example:
/// ```
/// // import one of the traits:
/// use futures::stream::StreamExt;
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
///
/// let a = tokio_stream::iter(vec![1, 3, 5]);
/// let b = tokio_stream::iter(vec![2, 4, 6]);
///
/// // use the fully qualified call syntax for the other trait:
/// let merged = tokio_stream::StreamExt::merge(a, b);
///
/// // use normal call notation for futures::stream::StreamExt::collect
/// let output: Vec<_> = merged.collect().await;
/// assert_eq!(output, vec![1, 2, 3, 4, 5, 6]);
/// # }
/// ```
///
/// [`Stream`]: crate::Stream
/// [futures]: https://docs.rs/futures
/// [futures-StreamExt]: https://docs.rs/futures/0.3/futures/stream/trait.StreamExt.html
pub trait StreamExt: Stream {
/// Consumes and returns the next value in the stream or `None` if the
/// stream is finished.
///
/// Equivalent to:
///
/// ```ignore
/// async fn next(&mut self) -> Option<Self::Item>;
/// ```
///
/// Note that because `next` doesn't take ownership over the stream,
/// the [`Stream`] type must be [`Unpin`]. If you want to use `next` with a
/// [`!Unpin`](Unpin) stream, you'll first have to pin the stream. This can
/// be done by boxing the stream using [`Box::pin`] or
/// pinning it to the stack using the `pin_mut!` macro from the `pin_utils`
/// crate.
///
/// # Cancel safety
///
/// This method is cancel safe. The returned future only
/// holds onto a reference to the underlying stream,
/// so dropping it will never lose a value.
///
/// # Examples
///
/// ```
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// use tokio_stream::{self as stream, StreamExt};
///
/// let mut stream = stream::iter(1..=3);
///
/// assert_eq!(stream.next().await, Some(1));
/// assert_eq!(stream.next().await, Some(2));
/// assert_eq!(stream.next().await, Some(3));
/// assert_eq!(stream.next().await, None);
/// # }
/// ```
fn next(&mut self) -> Next<'_, Self>
where
Self: Unpin,
{
Next::new(self)
}
/// Consumes and returns the next item in the stream. If an error is
/// encountered before the next item, the error is returned instead.
///
/// Equivalent to:
///
/// ```ignore
/// async fn try_next(&mut self) -> Result<Option<T>, E>;
/// ```
///
/// This is similar to the [`next`](StreamExt::next) combinator,
/// but returns a [`Result<Option<T>, E>`](Result) rather than
/// an [`Option<Result<T, E>>`](Option), making for easy use
/// with the [`?`](std::ops::Try) operator.
///
/// # Cancel safety
///
/// This method is cancel safe. The returned future only
/// holds onto a reference to the underlying stream,
/// so dropping it will never lose a value.
///
/// # Examples
///
/// ```
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
///
/// use tokio_stream::{self as stream, StreamExt};
///
/// let mut stream = stream::iter(vec![Ok(1), Ok(2), Err("nope")]);
///
/// assert_eq!(stream.try_next().await, Ok(Some(1)));
/// assert_eq!(stream.try_next().await, Ok(Some(2)));
/// assert_eq!(stream.try_next().await, Err("nope"));
/// # }
/// ```
fn try_next<T, E>(&mut self) -> TryNext<'_, Self>
where
Self: Stream<Item = Result<T, E>> + Unpin,
{
TryNext::new(self)
}
/// Maps this stream's items to a different type, returning a new stream of
/// the resulting type.
///
/// The provided closure is executed over all elements of this stream as
/// they are made available. It is executed inline with calls to
/// [`poll_next`](Stream::poll_next).
///
/// Note that this function consumes the stream passed into it and returns a
/// wrapped version of it, similar to the existing `map` methods in the
/// standard library.
///
/// # Examples
///
/// ```
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// use tokio_stream::{self as stream, StreamExt};
///
/// let stream = stream::iter(1..=3);
/// let mut stream = stream.map(|x| x + 3);
///
/// assert_eq!(stream.next().await, Some(4));
/// assert_eq!(stream.next().await, Some(5));
/// assert_eq!(stream.next().await, Some(6));
/// # }
/// ```
fn map<T, F>(self, f: F) -> Map<Self, F>
where
F: FnMut(Self::Item) -> T,
Self: Sized,
{
Map::new(self, f)
}
/// Map this stream's items to a different type for as long as determined by
/// the provided closure. A stream of the target type will be returned,
/// which will yield elements until the closure returns `None`.
///
/// The provided closure is executed over all elements of this stream as
/// they are made available, until it returns `None`. It is executed inline
/// with calls to [`poll_next`](Stream::poll_next). Once `None` is returned,
/// the underlying stream will not be polled again.
///
/// Note that this function consumes the stream passed into it and returns a
/// wrapped version of it, similar to the [`Iterator::map_while`] method in the
/// standard library.
///
/// # Examples
///
/// ```
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// use tokio_stream::{self as stream, StreamExt};
///
/// let stream = stream::iter(1..=10);
/// let mut stream = stream.map_while(|x| {
/// if x < 4 {
/// Some(x + 3)
/// } else {
/// None
/// }
/// });
/// assert_eq!(stream.next().await, Some(4));
/// assert_eq!(stream.next().await, Some(5));
/// assert_eq!(stream.next().await, Some(6));
/// assert_eq!(stream.next().await, None);
/// # }
/// ```
fn map_while<T, F>(self, f: F) -> MapWhile<Self, F>
where
F: FnMut(Self::Item) -> Option<T>,
Self: Sized,
{
MapWhile::new(self, f)
}
/// Maps this stream's items asynchronously to a different type, returning a
/// new stream of the resulting type.
///
/// The provided closure is executed over all elements of this stream as
/// they are made available, and the returned future is executed. Only one
/// future is executed at the time.
///
/// Note that this function consumes the stream passed into it and returns a
/// wrapped version of it, similar to the existing `then` methods in the
/// standard library.
///
/// Be aware that if the future is not `Unpin`, then neither is the `Stream`
/// returned by this method. To handle this, you can use `tokio::pin!` as in
/// the example below or put the stream in a `Box` with `Box::pin(stream)`.
///
/// # Examples
///
/// ```
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// use tokio_stream::{self as stream, StreamExt};
///
/// async fn do_async_work(value: i32) -> i32 {
/// value + 3
/// }
///
/// let stream = stream::iter(1..=3);
/// let stream = stream.then(do_async_work);
///
/// tokio::pin!(stream);
///
/// assert_eq!(stream.next().await, Some(4));
/// assert_eq!(stream.next().await, Some(5));
/// assert_eq!(stream.next().await, Some(6));
/// # }
/// ```
fn then<F, Fut>(self, f: F) -> Then<Self, Fut, F>
where
F: FnMut(Self::Item) -> Fut,
Fut: Future,
Self: Sized,
{
Then::new(self, f)
}
/// Combine two streams into one by interleaving the output of both as it
/// is produced.
///
/// Values are produced from the merged stream in the order they arrive from
/// the two source streams. If both source streams provide values
/// simultaneously, the merge stream alternates between them. This provides
/// some level of fairness. You should not chain calls to `merge`, as this
/// will break the fairness of the merging.
///
/// The merged stream completes once **both** source streams complete. When
/// one source stream completes before the other, the merge stream
/// exclusively polls the remaining stream.
///
/// For merging multiple streams, consider using [`StreamMap`] instead.
///
/// [`StreamMap`]: crate::StreamMap
///
/// # Examples
///
/// ```
/// use tokio_stream::{StreamExt, Stream};
/// use tokio::sync::mpsc;
/// use tokio::time;
///
/// use std::time::Duration;
/// use std::pin::Pin;
///
/// # /*
/// #[tokio::main]
/// # */
/// # #[tokio::main(flavor = "current_thread")]
/// async fn main() {
/// # time::pause();
/// let (tx1, mut rx1) = mpsc::channel::<usize>(10);
/// let (tx2, mut rx2) = mpsc::channel::<usize>(10);
///
/// // Convert the channels to a `Stream`.
/// let rx1 = Box::pin(async_stream::stream! {
/// while let Some(item) = rx1.recv().await {
/// yield item;
/// }
/// }) as Pin<Box<dyn Stream<Item = usize> + Send>>;
///
/// let rx2 = Box::pin(async_stream::stream! {
/// while let Some(item) = rx2.recv().await {
/// yield item;
/// }
/// }) as Pin<Box<dyn Stream<Item = usize> + Send>>;
///
/// let mut rx = rx1.merge(rx2);
///
/// tokio::spawn(async move {
/// // Send some values immediately
/// tx1.send(1).await.unwrap();
/// tx1.send(2).await.unwrap();
///
/// // Let the other task send values
/// time::sleep(Duration::from_millis(20)).await;
///
/// tx1.send(4).await.unwrap();
/// });
///
/// tokio::spawn(async move {
/// // Wait for the first task to send values
/// time::sleep(Duration::from_millis(5)).await;
///
/// tx2.send(3).await.unwrap();
///
/// time::sleep(Duration::from_millis(25)).await;
///
/// // Send the final value
/// tx2.send(5).await.unwrap();
/// });
///
/// assert_eq!(1, rx.next().await.unwrap());
/// assert_eq!(2, rx.next().await.unwrap());
/// assert_eq!(3, rx.next().await.unwrap());
/// assert_eq!(4, rx.next().await.unwrap());
/// assert_eq!(5, rx.next().await.unwrap());
///
/// // The merged stream is consumed
/// assert!(rx.next().await.is_none());
/// }
/// ```
fn merge<U>(self, other: U) -> Merge<Self, U>
where
U: Stream<Item = Self::Item>,
Self: Sized,
{
Merge::new(self, other)
}
/// Filters the values produced by this stream according to the provided
/// predicate.
///
/// As values of this stream are made available, the provided predicate `f`
/// will be run against them. If the predicate
/// resolves to `true`, then the stream will yield the value, but if the
/// predicate resolves to `false`, then the value
/// will be discarded and the next value will be produced.
///
/// Note that this function consumes the stream passed into it and returns a
/// wrapped version of it, similar to [`Iterator::filter`] method in the
/// standard library.
///
/// # Examples
///
/// ```
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// use tokio_stream::{self as stream, StreamExt};
///
/// let stream = stream::iter(1..=8);
/// let mut evens = stream.filter(|x| x % 2 == 0);
///
/// assert_eq!(Some(2), evens.next().await);
/// assert_eq!(Some(4), evens.next().await);
/// assert_eq!(Some(6), evens.next().await);
/// assert_eq!(Some(8), evens.next().await);
/// assert_eq!(None, evens.next().await);
/// # }
/// ```
fn filter<F>(self, f: F) -> Filter<Self, F>
where
F: FnMut(&Self::Item) -> bool,
Self: Sized,
{
Filter::new(self, f)
}
/// Filters the values produced by this stream while simultaneously mapping
/// them to a different type according to the provided closure.
///
/// As values of this stream are made available, the provided function will
/// be run on them. If the predicate `f` resolves to
/// [`Some(item)`](Some) then the stream will yield the value `item`, but if
/// it resolves to [`None`], then the value will be skipped.
///
/// Note that this function consumes the stream passed into it and returns a
/// wrapped version of it, similar to [`Iterator::filter_map`] method in the
/// standard library.
///
/// # Examples
/// ```
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// use tokio_stream::{self as stream, StreamExt};
///
/// let stream = stream::iter(1..=8);
/// let mut evens = stream.filter_map(|x| {
/// if x % 2 == 0 { Some(x + 1) } else { None }
/// });
///
/// assert_eq!(Some(3), evens.next().await);
/// assert_eq!(Some(5), evens.next().await);
/// assert_eq!(Some(7), evens.next().await);
/// assert_eq!(Some(9), evens.next().await);
/// assert_eq!(None, evens.next().await);
/// # }
/// ```
fn filter_map<T, F>(self, f: F) -> FilterMap<Self, F>
where
F: FnMut(Self::Item) -> Option<T>,
Self: Sized,
{
FilterMap::new(self, f)
}
/// Creates a stream which ends after the first `None`.
///
/// After a stream returns `None`, behavior is undefined. Future calls to
/// `poll_next` may or may not return `Some(T)` again or they may panic.
/// `fuse()` adapts a stream, ensuring that after `None` is given, it will
/// return `None` forever.
///
/// # Examples
///
/// ```
/// use tokio_stream::{Stream, StreamExt};
///
/// use std::pin::Pin;
/// use std::task::{Context, Poll};
///
/// // a stream which alternates between Some and None
/// struct Alternate {
/// state: i32,
/// }
///
/// impl Stream for Alternate {
/// type Item = i32;
///
/// fn poll_next(mut self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Option<i32>> {
/// let val = self.state;
/// self.state = self.state + 1;
///
/// // if it's even, Some(i32), else None
/// if val % 2 == 0 {
/// Poll::Ready(Some(val))
/// } else {
/// Poll::Ready(None)
/// }
/// }
/// }
///
/// # /*
/// #[tokio::main]
/// # */
/// # #[tokio::main(flavor = "current_thread")]
/// async fn main() {
/// let mut stream = Alternate { state: 0 };
///
/// // the stream goes back and forth
/// assert_eq!(stream.next().await, Some(0));
/// assert_eq!(stream.next().await, None);
/// assert_eq!(stream.next().await, Some(2));
/// assert_eq!(stream.next().await, None);
///
/// // however, once it is fused
/// let mut stream = stream.fuse();
///
/// assert_eq!(stream.next().await, Some(4));
/// assert_eq!(stream.next().await, None);
///
/// // it will always return `None` after the first time.
/// assert_eq!(stream.next().await, None);
/// assert_eq!(stream.next().await, None);
/// assert_eq!(stream.next().await, None);
/// }
/// ```
fn fuse(self) -> Fuse<Self>
where
Self: Sized,
{
Fuse::new(self)
}
/// Creates a new stream of at most `n` items of the underlying stream.
///
/// Once `n` items have been yielded from this stream then it will always
/// return that the stream is done.
///
/// # Examples
///
/// ```
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// use tokio_stream::{self as stream, StreamExt};
///
/// let mut stream = stream::iter(1..=10).take(3);
///
/// assert_eq!(Some(1), stream.next().await);
/// assert_eq!(Some(2), stream.next().await);
/// assert_eq!(Some(3), stream.next().await);
/// assert_eq!(None, stream.next().await);
/// # }
/// ```
fn take(self, n: usize) -> Take<Self>
where
Self: Sized,
{
Take::new(self, n)
}
/// Take elements from this stream while the provided predicate
/// resolves to `true`.
///
/// This function, like `Iterator::take_while`, will take elements from the
/// stream until the predicate `f` resolves to `false`. Once one element
/// returns false it will always return that the stream is done.
///
/// # Examples
///
/// ```
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// use tokio_stream::{self as stream, StreamExt};
///
/// let mut stream = stream::iter(1..=10).take_while(|x| *x <= 3);
///
/// assert_eq!(Some(1), stream.next().await);
/// assert_eq!(Some(2), stream.next().await);
/// assert_eq!(Some(3), stream.next().await);
/// assert_eq!(None, stream.next().await);
/// # }
/// ```
fn take_while<F>(self, f: F) -> TakeWhile<Self, F>
where
F: FnMut(&Self::Item) -> bool,
Self: Sized,
{
TakeWhile::new(self, f)
}
/// Creates a new stream that will skip the `n` first items of the
/// underlying stream.
///
/// # Examples
///
/// ```
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// use tokio_stream::{self as stream, StreamExt};
///
/// let mut stream = stream::iter(1..=10).skip(7);
///
/// assert_eq!(Some(8), stream.next().await);
/// assert_eq!(Some(9), stream.next().await);
/// assert_eq!(Some(10), stream.next().await);
/// assert_eq!(None, stream.next().await);
/// # }
/// ```
fn skip(self, n: usize) -> Skip<Self>
where
Self: Sized,
{
Skip::new(self, n)
}
/// Skip elements from the underlying stream while the provided predicate
/// resolves to `true`.
///
/// This function, like [`Iterator::skip_while`], will ignore elements from the
/// stream until the predicate `f` resolves to `false`. Once one element
/// returns false, the rest of the elements will be yielded.
///
/// [`Iterator::skip_while`]: std::iter::Iterator::skip_while()
///
/// # Examples
///
/// ```
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// use tokio_stream::{self as stream, StreamExt};
/// let mut stream = stream::iter(vec![1,2,3,4,1]).skip_while(|x| *x < 3);
///
/// assert_eq!(Some(3), stream.next().await);
/// assert_eq!(Some(4), stream.next().await);
/// assert_eq!(Some(1), stream.next().await);
/// assert_eq!(None, stream.next().await);
/// # }
/// ```
fn skip_while<F>(self, f: F) -> SkipWhile<Self, F>
where
F: FnMut(&Self::Item) -> bool,
Self: Sized,
{
SkipWhile::new(self, f)
}
/// Tests if every element of the stream matches a predicate.
///
/// Equivalent to:
///
/// ```ignore
/// async fn all<F>(&mut self, f: F) -> bool;
/// ```
///
/// `all()` takes a closure that returns `true` or `false`. It applies
/// this closure to each element of the stream, and if they all return
/// `true`, then so does `all`. If any of them return `false`, it
/// returns `false`. An empty stream returns `true`.
///
/// `all()` is short-circuiting; in other words, it will stop processing
/// as soon as it finds a `false`, given that no matter what else happens,
/// the result will also be `false`.
///
/// An empty stream returns `true`.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// use tokio_stream::{self as stream, StreamExt};
///
/// let a = [1, 2, 3];
///
/// assert!(stream::iter(&a).all(|&x| x > 0).await);
///
/// assert!(!stream::iter(&a).all(|&x| x > 2).await);
/// # }
/// ```
///
/// Stopping at the first `false`:
///
/// ```
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// use tokio_stream::{self as stream, StreamExt};
///
/// let a = [1, 2, 3];
///
/// let mut iter = stream::iter(&a);
///
/// assert!(!iter.all(|&x| x != 2).await);
///
/// // we can still use `iter`, as there are more elements.
/// assert_eq!(iter.next().await, Some(&3));
/// # }
/// ```
fn all<F>(&mut self, f: F) -> AllFuture<'_, Self, F>
where
Self: Unpin,
F: FnMut(Self::Item) -> bool,
{
AllFuture::new(self, f)
}
/// Tests if any element of the stream matches a predicate.
///
/// Equivalent to:
///
/// ```ignore
/// async fn any<F>(&mut self, f: F) -> bool;
/// ```
///
/// `any()` takes a closure that returns `true` or `false`. It applies
/// this closure to each element of the stream, and if any of them return
/// `true`, then so does `any()`. If they all return `false`, it
/// returns `false`.
///
/// `any()` is short-circuiting; in other words, it will stop processing
/// as soon as it finds a `true`, given that no matter what else happens,
/// the result will also be `true`.
///
/// An empty stream returns `false`.
///
/// Basic usage:
///
/// ```
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// use tokio_stream::{self as stream, StreamExt};
///
/// let a = [1, 2, 3];
///
/// assert!(stream::iter(&a).any(|&x| x > 0).await);
///
/// assert!(!stream::iter(&a).any(|&x| x > 5).await);
/// # }
/// ```
///
/// Stopping at the first `true`:
///
/// ```
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// use tokio_stream::{self as stream, StreamExt};
///
/// let a = [1, 2, 3];
///
/// let mut iter = stream::iter(&a);
///
/// assert!(iter.any(|&x| x != 2).await);
///
/// // we can still use `iter`, as there are more elements.
/// assert_eq!(iter.next().await, Some(&2));
/// # }
/// ```
fn any<F>(&mut self, f: F) -> AnyFuture<'_, Self, F>
where
Self: Unpin,
F: FnMut(Self::Item) -> bool,
{
AnyFuture::new(self, f)
}
/// Combine two streams into one by first returning all values from the
/// first stream then all values from the second stream.
///
/// As long as `self` still has values to emit, no values from `other` are
/// emitted, even if some are ready.
///
/// # Examples
///
/// ```
/// use tokio_stream::{self as stream, StreamExt};
///
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// let one = stream::iter(vec![1, 2, 3]);
/// let two = stream::iter(vec![4, 5, 6]);
///
/// let mut stream = one.chain(two);
///
/// assert_eq!(stream.next().await, Some(1));
/// assert_eq!(stream.next().await, Some(2));
/// assert_eq!(stream.next().await, Some(3));
/// assert_eq!(stream.next().await, Some(4));
/// assert_eq!(stream.next().await, Some(5));
/// assert_eq!(stream.next().await, Some(6));
/// assert_eq!(stream.next().await, None);
/// # }
/// ```
fn chain<U>(self, other: U) -> Chain<Self, U>
where
U: Stream<Item = Self::Item>,
Self: Sized,
{
Chain::new(self, other)
}
/// A combinator that applies a function to every element in a stream
/// producing a single, final value.
///
/// Equivalent to:
///
/// ```ignore
/// async fn fold<B, F>(self, init: B, f: F) -> B;
/// ```
///
/// # Examples
/// Basic usage:
/// ```
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// use tokio_stream::{self as stream, *};
///
/// let s = stream::iter(vec![1u8, 2, 3]);
/// let sum = s.fold(0, |acc, x| acc + x).await;
///
/// assert_eq!(sum, 6);
/// # }
/// ```
fn fold<B, F>(self, init: B, f: F) -> FoldFuture<Self, B, F>
where
Self: Sized,
F: FnMut(B, Self::Item) -> B,
{
FoldFuture::new(self, init, f)
}
/// Drain stream pushing all emitted values into a collection.
///
/// Equivalent to:
///
/// ```ignore
/// async fn collect<T>(self) -> T;
/// ```
///
/// `collect` streams all values, awaiting as needed. Values are pushed into
/// a collection. A number of different target collection types are
/// supported, including [`Vec`], [`String`], and [`Bytes`].
///
/// [`Bytes`]: https://docs.rs/bytes/0.6.0/bytes/struct.Bytes.html
///
/// # `Result`
///
/// `collect()` can also be used with streams of type `Result<T, E>` where
/// `T: FromStream<_>`. In this case, `collect()` will stream as long as
/// values yielded from the stream are `Ok(_)`. If `Err(_)` is encountered,
/// streaming is terminated and `collect()` returns the `Err`.
///
/// # Notes
///
/// `FromStream` is currently a sealed trait. Stabilization is pending
/// enhancements to the Rust language.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use tokio_stream::{self as stream, StreamExt};
///
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// let doubled: Vec<i32> =
/// stream::iter(vec![1, 2, 3])
/// .map(|x| x * 2)
/// .collect()
/// .await;
///
/// assert_eq!(vec![2, 4, 6], doubled);
/// # }
/// ```
///
/// Collecting a stream of `Result` values
///
/// ```
/// use tokio_stream::{self as stream, StreamExt};
///
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// // A stream containing only `Ok` values will be collected
/// let values: Result<Vec<i32>, &str> =
/// stream::iter(vec![Ok(1), Ok(2), Ok(3)])
/// .collect()
/// .await;
///
/// assert_eq!(Ok(vec![1, 2, 3]), values);
///
/// // A stream containing `Err` values will return the first error.
/// let results = vec![Ok(1), Err("no"), Ok(2), Ok(3), Err("nein")];
///
/// let values: Result<Vec<i32>, &str> =
/// stream::iter(results)
/// .collect()
/// .await;
///
/// assert_eq!(Err("no"), values);
/// # }
/// ```
fn collect<T>(self) -> Collect<Self, T, T::InternalCollection>
where
T: FromStream<Self::Item>,
Self: Sized,
{
Collect::new(self)
}
/// Applies a per-item timeout to the passed stream.
///
/// `timeout()` takes a `Duration` that represents the maximum amount of
/// time each element of the stream has to complete before timing out.
///
/// If the wrapped stream yields a value before the deadline is reached, the
/// value is returned. Otherwise, an error is returned. The caller may decide
/// to continue consuming the stream and will eventually get the next source
/// stream value once it becomes available. See
/// [`timeout_repeating`](StreamExt::timeout_repeating) for an alternative
/// where the timeouts will repeat.
///
/// # Notes
///
/// This function consumes the stream passed into it and returns a
/// wrapped version of it.
///
/// Polling the returned stream will continue to poll the inner stream even
/// if one or more items time out.
///
/// # Examples
///
/// Suppose we have a stream `int_stream` that yields 3 numbers (1, 2, 3):
///
/// ```
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// use tokio_stream::{self as stream, StreamExt};
/// use std::time::Duration;
/// # let int_stream = stream::iter(1..=3);
///
/// let int_stream = int_stream.timeout(Duration::from_secs(1));
/// tokio::pin!(int_stream);
///
/// // When no items time out, we get the 3 elements in succession:
/// assert_eq!(int_stream.try_next().await, Ok(Some(1)));
/// assert_eq!(int_stream.try_next().await, Ok(Some(2)));
/// assert_eq!(int_stream.try_next().await, Ok(Some(3)));
/// assert_eq!(int_stream.try_next().await, Ok(None));
///
/// // If the second item times out, we get an error and continue polling the stream:
/// # let mut int_stream = stream::iter(vec![Ok(1), Err(()), Ok(2), Ok(3)]);
/// assert_eq!(int_stream.try_next().await, Ok(Some(1)));
/// assert!(int_stream.try_next().await.is_err());
/// assert_eq!(int_stream.try_next().await, Ok(Some(2)));
/// assert_eq!(int_stream.try_next().await, Ok(Some(3)));
/// assert_eq!(int_stream.try_next().await, Ok(None));
///
/// // If we want to stop consuming the source stream the first time an
/// // element times out, we can use the `take_while` operator:
/// # let int_stream = stream::iter(vec![Ok(1), Err(()), Ok(2), Ok(3)]);
/// let mut int_stream = int_stream.take_while(Result::is_ok);
///
/// assert_eq!(int_stream.try_next().await, Ok(Some(1)));
/// assert_eq!(int_stream.try_next().await, Ok(None));
/// # }
/// ```
///
/// Once a timeout error is received, no further events will be received
/// unless the wrapped stream yields a value (timeouts do not repeat).
///
/// ```
/// # #[tokio::main(flavor = "current_thread", start_paused = true)]
/// # async fn main() {
/// use tokio_stream::{StreamExt, wrappers::IntervalStream};
/// use std::time::Duration;
/// let interval_stream = IntervalStream::new(tokio::time::interval(Duration::from_millis(100)));
/// let timeout_stream = interval_stream.timeout(Duration::from_millis(10));
/// tokio::pin!(timeout_stream);
///
/// // Only one timeout will be received between values in the source stream.
/// assert!(timeout_stream.try_next().await.is_ok());
/// assert!(timeout_stream.try_next().await.is_err(), "expected one timeout");
/// assert!(timeout_stream.try_next().await.is_ok(), "expected no more timeouts");
/// # }
/// ```
#[cfg(feature = "time")]
#[cfg_attr(docsrs, doc(cfg(feature = "time")))]
fn timeout(self, duration: Duration) -> Timeout<Self>
where
Self: Sized,
{
Timeout::new(self, duration)
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | true |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-stream/src/stream_map.rs | tokio-stream/src/stream_map.rs | use crate::Stream;
use std::borrow::Borrow;
use std::future::poll_fn;
use std::hash::Hash;
use std::pin::Pin;
use std::task::{ready, Context, Poll};
/// Combine many streams into one, indexing each source stream with a unique
/// key.
///
/// `StreamMap` is similar to [`StreamExt::merge`] in that it combines source
/// streams into a single merged stream that yields values in the order that
/// they arrive from the source streams. However, `StreamMap` has a lot more
/// flexibility in usage patterns.
///
/// `StreamMap` can:
///
/// * Merge an arbitrary number of streams.
/// * Track which source stream the value was received from.
/// * Handle inserting and removing streams from the set of managed streams at
/// any point during iteration.
///
/// All source streams held by `StreamMap` are indexed using a key. This key is
/// included with the value when a source stream yields a value. The key is also
/// used to remove the stream from the `StreamMap` before the stream has
/// completed streaming.
///
/// # `Unpin`
///
/// Because the `StreamMap` API moves streams during runtime, both streams and
/// keys must be `Unpin`. In order to insert a `!Unpin` stream into a
/// `StreamMap`, use [`pin!`] to pin the stream to the stack or [`Box::pin`] to
/// pin the stream in the heap.
///
/// # Implementation
///
/// `StreamMap` is backed by a `Vec<(K, V)>`. There is no guarantee that this
/// internal implementation detail will persist in future versions, but it is
/// important to know the runtime implications. In general, `StreamMap` works
/// best with a "smallish" number of streams as all entries are scanned on
/// insert, remove, and polling. In cases where a large number of streams need
/// to be merged, it may be advisable to use tasks sending values on a shared
/// [`mpsc`] channel.
///
/// # Notes
///
/// `StreamMap` removes finished streams automatically, without alerting the user.
/// In some scenarios, the caller would want to know on closed streams.
/// To do this, use [`StreamNotifyClose`] as a wrapper to your stream.
/// It will return None when the stream is closed.
///
/// [`StreamExt::merge`]: crate::StreamExt::merge
/// [`mpsc`]: https://docs.rs/tokio/1.0/tokio/sync/mpsc/index.html
/// [`pin!`]: https://docs.rs/tokio/1.0/tokio/macro.pin.html
/// [`Box::pin`]: std::boxed::Box::pin
/// [`StreamNotifyClose`]: crate::StreamNotifyClose
///
/// # Examples
///
/// Merging two streams, then remove them after receiving the first value
///
/// ```
/// use tokio_stream::{StreamExt, StreamMap, Stream};
/// use tokio::sync::mpsc;
/// use std::pin::Pin;
///
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// let (tx1, mut rx1) = mpsc::channel::<usize>(10);
/// let (tx2, mut rx2) = mpsc::channel::<usize>(10);
///
/// // Convert the channels to a `Stream`.
/// let rx1 = Box::pin(async_stream::stream! {
/// while let Some(item) = rx1.recv().await {
/// yield item;
/// }
/// }) as Pin<Box<dyn Stream<Item = usize> + Send>>;
///
/// let rx2 = Box::pin(async_stream::stream! {
/// while let Some(item) = rx2.recv().await {
/// yield item;
/// }
/// }) as Pin<Box<dyn Stream<Item = usize> + Send>>;
///
/// tokio::spawn(async move {
/// tx1.send(1).await.unwrap();
///
/// // This value will never be received. The send may or may not return
/// // `Err` depending on if the remote end closed first or not.
/// let _ = tx1.send(2).await;
/// });
///
/// tokio::spawn(async move {
/// tx2.send(3).await.unwrap();
/// let _ = tx2.send(4).await;
/// });
///
/// let mut map = StreamMap::new();
///
/// // Insert both streams
/// map.insert("one", rx1);
/// map.insert("two", rx2);
///
/// // Read twice
/// for _ in 0..2 {
/// let (key, val) = map.next().await.unwrap();
///
/// if key == "one" {
/// assert_eq!(val, 1);
/// } else {
/// assert_eq!(val, 3);
/// }
///
/// // Remove the stream to prevent reading the next value
/// map.remove(key);
/// }
/// # }
/// ```
///
/// This example models a read-only client to a chat system with channels. The
/// client sends commands to join and leave channels. `StreamMap` is used to
/// manage active channel subscriptions.
///
/// For simplicity, messages are displayed with `println!`, but they could be
/// sent to the client over a socket.
///
/// ```no_run
/// use tokio_stream::{Stream, StreamExt, StreamMap};
///
/// enum Command {
/// Join(String),
/// Leave(String),
/// }
///
/// fn commands() -> impl Stream<Item = Command> {
/// // Streams in user commands by parsing `stdin`.
/// # tokio_stream::pending()
/// }
///
/// // Join a channel, returns a stream of messages received on the channel.
/// fn join(channel: &str) -> impl Stream<Item = String> + Unpin {
/// // left as an exercise to the reader
/// # tokio_stream::pending()
/// }
///
/// #[tokio::main]
/// async fn main() {
/// let mut channels = StreamMap::new();
///
/// // Input commands (join / leave channels).
/// let cmds = commands();
/// tokio::pin!(cmds);
///
/// loop {
/// tokio::select! {
/// Some(cmd) = cmds.next() => {
/// match cmd {
/// Command::Join(chan) => {
/// // Join the channel and add it to the `channels`
/// // stream map
/// let msgs = join(&chan);
/// channels.insert(chan, msgs);
/// }
/// Command::Leave(chan) => {
/// channels.remove(&chan);
/// }
/// }
/// }
/// Some((chan, msg)) = channels.next() => {
/// // Received a message, display it on stdout with the channel
/// // it originated from.
/// println!("{}: {}", chan, msg);
/// }
/// // Both the `commands` stream and the `channels` stream are
/// // complete. There is no more work to do, so leave the loop.
/// else => break,
/// }
/// }
/// }
/// ```
///
/// Using `StreamNotifyClose` to handle closed streams with `StreamMap`.
///
/// ```
/// use tokio_stream::{StreamExt, StreamMap, StreamNotifyClose};
///
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// let mut map = StreamMap::new();
/// let stream = StreamNotifyClose::new(tokio_stream::iter(vec![0, 1]));
/// let stream2 = StreamNotifyClose::new(tokio_stream::iter(vec![0, 1]));
/// map.insert(0, stream);
/// map.insert(1, stream2);
/// while let Some((key, val)) = map.next().await {
/// match val {
/// Some(val) => println!("got {val:?} from stream {key:?}"),
/// None => println!("stream {key:?} closed"),
/// }
/// }
/// # }
/// ```
#[derive(Debug)]
pub struct StreamMap<K, V> {
/// Streams stored in the map
entries: Vec<(K, V)>,
}
impl<K, V> StreamMap<K, V> {
/// An iterator visiting all key-value pairs in arbitrary order.
///
/// The iterator element type is `&'a (K, V)`.
///
/// # Examples
///
/// ```
/// use tokio_stream::{StreamMap, pending};
///
/// let mut map = StreamMap::new();
///
/// map.insert("a", pending::<i32>());
/// map.insert("b", pending());
/// map.insert("c", pending());
///
/// for (key, stream) in map.iter() {
/// println!("({}, {:?})", key, stream);
/// }
/// ```
pub fn iter(&self) -> impl Iterator<Item = &(K, V)> {
self.entries.iter()
}
/// An iterator visiting all key-value pairs mutably in arbitrary order.
///
/// The iterator element type is `&'a mut (K, V)`.
///
/// # Examples
///
/// ```
/// use tokio_stream::{StreamMap, pending};
///
/// let mut map = StreamMap::new();
///
/// map.insert("a", pending::<i32>());
/// map.insert("b", pending());
/// map.insert("c", pending());
///
/// for (key, stream) in map.iter_mut() {
/// println!("({}, {:?})", key, stream);
/// }
/// ```
pub fn iter_mut(&mut self) -> impl Iterator<Item = &mut (K, V)> {
self.entries.iter_mut()
}
/// Creates an empty `StreamMap`.
///
/// The stream map is initially created with a capacity of `0`, so it will
/// not allocate until it is first inserted into.
///
/// # Examples
///
/// ```
/// use tokio_stream::{StreamMap, Pending};
///
/// let map: StreamMap<&str, Pending<()>> = StreamMap::new();
/// ```
pub fn new() -> StreamMap<K, V> {
StreamMap { entries: vec![] }
}
/// Creates an empty `StreamMap` with the specified capacity.
///
/// The stream map will be able to hold at least `capacity` elements without
/// reallocating. If `capacity` is 0, the stream map will not allocate.
///
/// # Examples
///
/// ```
/// use tokio_stream::{StreamMap, Pending};
///
/// let map: StreamMap<&str, Pending<()>> = StreamMap::with_capacity(10);
/// ```
pub fn with_capacity(capacity: usize) -> StreamMap<K, V> {
StreamMap {
entries: Vec::with_capacity(capacity),
}
}
/// Returns an iterator visiting all keys in arbitrary order.
///
/// The iterator element type is `&'a K`.
///
/// # Examples
///
/// ```
/// use tokio_stream::{StreamMap, pending};
///
/// let mut map = StreamMap::new();
///
/// map.insert("a", pending::<i32>());
/// map.insert("b", pending());
/// map.insert("c", pending());
///
/// for key in map.keys() {
/// println!("{}", key);
/// }
/// ```
pub fn keys(&self) -> impl Iterator<Item = &K> {
self.iter().map(|(k, _)| k)
}
/// An iterator visiting all values in arbitrary order.
///
/// The iterator element type is `&'a V`.
///
/// # Examples
///
/// ```
/// use tokio_stream::{StreamMap, pending};
///
/// let mut map = StreamMap::new();
///
/// map.insert("a", pending::<i32>());
/// map.insert("b", pending());
/// map.insert("c", pending());
///
/// for stream in map.values() {
/// println!("{:?}", stream);
/// }
/// ```
pub fn values(&self) -> impl Iterator<Item = &V> {
self.iter().map(|(_, v)| v)
}
/// An iterator visiting all values mutably in arbitrary order.
///
/// The iterator element type is `&'a mut V`.
///
/// # Examples
///
/// ```
/// use tokio_stream::{StreamMap, pending};
///
/// let mut map = StreamMap::new();
///
/// map.insert("a", pending::<i32>());
/// map.insert("b", pending());
/// map.insert("c", pending());
///
/// for stream in map.values_mut() {
/// println!("{:?}", stream);
/// }
/// ```
pub fn values_mut(&mut self) -> impl Iterator<Item = &mut V> {
self.iter_mut().map(|(_, v)| v)
}
/// Returns the number of streams the map can hold without reallocating.
///
/// This number is a lower bound; the `StreamMap` might be able to hold
/// more, but is guaranteed to be able to hold at least this many.
///
/// # Examples
///
/// ```
/// use tokio_stream::{StreamMap, Pending};
///
/// let map: StreamMap<i32, Pending<()>> = StreamMap::with_capacity(100);
/// assert!(map.capacity() >= 100);
/// ```
pub fn capacity(&self) -> usize {
self.entries.capacity()
}
/// Returns the number of streams in the map.
///
/// # Examples
///
/// ```
/// use tokio_stream::{StreamMap, pending};
///
/// let mut a = StreamMap::new();
/// assert_eq!(a.len(), 0);
/// a.insert(1, pending::<i32>());
/// assert_eq!(a.len(), 1);
/// ```
pub fn len(&self) -> usize {
self.entries.len()
}
/// Returns `true` if the map contains no elements.
///
/// # Examples
///
/// ```
/// use tokio_stream::{StreamMap, pending};
///
/// let mut a = StreamMap::new();
/// assert!(a.is_empty());
/// a.insert(1, pending::<i32>());
/// assert!(!a.is_empty());
/// ```
pub fn is_empty(&self) -> bool {
self.entries.is_empty()
}
/// Clears the map, removing all key-stream pairs. Keeps the allocated
/// memory for reuse.
///
/// # Examples
///
/// ```
/// use tokio_stream::{StreamMap, pending};
///
/// let mut a = StreamMap::new();
/// a.insert(1, pending::<i32>());
/// a.clear();
/// assert!(a.is_empty());
/// ```
pub fn clear(&mut self) {
self.entries.clear();
}
/// Insert a key-stream pair into the map.
///
/// If the map did not have this key present, `None` is returned.
///
/// If the map did have this key present, the new `stream` replaces the old
/// one and the old stream is returned.
///
/// # Examples
///
/// ```
/// use tokio_stream::{StreamMap, pending};
///
/// let mut map = StreamMap::new();
///
/// assert!(map.insert(37, pending::<i32>()).is_none());
/// assert!(!map.is_empty());
///
/// map.insert(37, pending());
/// assert!(map.insert(37, pending()).is_some());
/// ```
pub fn insert(&mut self, k: K, stream: V) -> Option<V>
where
K: Hash + Eq,
{
let ret = self.remove(&k);
self.entries.push((k, stream));
ret
}
/// Removes a key from the map, returning the stream at the key if the key was previously in the map.
///
/// The key may be any borrowed form of the map's key type, but `Hash` and
/// `Eq` on the borrowed form must match those for the key type.
///
/// # Examples
///
/// ```
/// use tokio_stream::{StreamMap, pending};
///
/// let mut map = StreamMap::new();
/// map.insert(1, pending::<i32>());
/// assert!(map.remove(&1).is_some());
/// assert!(map.remove(&1).is_none());
/// ```
pub fn remove<Q>(&mut self, k: &Q) -> Option<V>
where
K: Borrow<Q>,
Q: Hash + Eq + ?Sized,
{
for i in 0..self.entries.len() {
if self.entries[i].0.borrow() == k {
return Some(self.entries.swap_remove(i).1);
}
}
None
}
/// Returns `true` if the map contains a stream for the specified key.
///
/// The key may be any borrowed form of the map's key type, but `Hash` and
/// `Eq` on the borrowed form must match those for the key type.
///
/// # Examples
///
/// ```
/// use tokio_stream::{StreamMap, pending};
///
/// let mut map = StreamMap::new();
/// map.insert(1, pending::<i32>());
/// assert_eq!(map.contains_key(&1), true);
/// assert_eq!(map.contains_key(&2), false);
/// ```
pub fn contains_key<Q>(&self, k: &Q) -> bool
where
K: Borrow<Q>,
Q: Hash + Eq + ?Sized,
{
for i in 0..self.entries.len() {
if self.entries[i].0.borrow() == k {
return true;
}
}
false
}
}
impl<K, V> StreamMap<K, V>
where
K: Unpin,
V: Stream + Unpin,
{
/// Polls the next value, includes the vec entry index
fn poll_next_entry(&mut self, cx: &mut Context<'_>) -> Poll<Option<(usize, V::Item)>> {
let start = self::rand::thread_rng_n(self.entries.len() as u32) as usize;
let mut idx = start;
for _ in 0..self.entries.len() {
let (_, stream) = &mut self.entries[idx];
match Pin::new(stream).poll_next(cx) {
Poll::Ready(Some(val)) => return Poll::Ready(Some((idx, val))),
Poll::Ready(None) => {
// Remove the entry
self.entries.swap_remove(idx);
// Check if this was the last entry, if so the cursor needs
// to wrap
if idx == self.entries.len() {
idx = 0;
} else if idx < start && start <= self.entries.len() {
// The stream being swapped into the current index has
// already been polled, so skip it.
idx = idx.wrapping_add(1) % self.entries.len();
}
}
Poll::Pending => {
idx = idx.wrapping_add(1) % self.entries.len();
}
}
}
// If the map is empty, then the stream is complete.
if self.entries.is_empty() {
Poll::Ready(None)
} else {
Poll::Pending
}
}
}
impl<K, V> Default for StreamMap<K, V> {
fn default() -> Self {
Self::new()
}
}
impl<K, V> StreamMap<K, V>
where
K: Clone + Unpin,
V: Stream + Unpin,
{
/// Receives multiple items on this [`StreamMap`], extending the provided `buffer`.
///
/// This method returns the number of items that is appended to the `buffer`.
///
/// Note that this method does not guarantee that exactly `limit` items
/// are received. Rather, if at least one item is available, it returns
/// as many items as it can up to the given limit. This method returns
/// zero only if the `StreamMap` is empty (or if `limit` is zero).
///
/// # Cancel safety
///
/// This method is cancel safe. If `next_many` is used as the event in a
/// [`tokio::select!`](tokio::select) statement and some other branch
/// completes first, it is guaranteed that no items were received on any of
/// the underlying streams.
pub async fn next_many(&mut self, buffer: &mut Vec<(K, V::Item)>, limit: usize) -> usize {
poll_fn(|cx| self.poll_next_many(cx, buffer, limit)).await
}
/// Polls to receive multiple items on this `StreamMap`, extending the provided `buffer`.
///
/// This method returns:
/// * `Poll::Pending` if no items are available but the `StreamMap` is not empty.
/// * `Poll::Ready(count)` where `count` is the number of items successfully received and
/// stored in `buffer`. This can be less than, or equal to, `limit`.
/// * `Poll::Ready(0)` if `limit` is set to zero or when the `StreamMap` is empty.
///
/// Note that this method does not guarantee that exactly `limit` items
/// are received. Rather, if at least one item is available, it returns
/// as many items as it can up to the given limit. This method returns
/// zero only if the `StreamMap` is empty (or if `limit` is zero).
pub fn poll_next_many(
&mut self,
cx: &mut Context<'_>,
buffer: &mut Vec<(K, V::Item)>,
limit: usize,
) -> Poll<usize> {
if limit == 0 || self.entries.is_empty() {
return Poll::Ready(0);
}
let mut added = 0;
let start = self::rand::thread_rng_n(self.entries.len() as u32) as usize;
let mut idx = start;
while added < limit {
// Indicates whether at least one stream returned a value when polled or not
let mut should_loop = false;
for _ in 0..self.entries.len() {
let (_, stream) = &mut self.entries[idx];
match Pin::new(stream).poll_next(cx) {
Poll::Ready(Some(val)) => {
added += 1;
let key = self.entries[idx].0.clone();
buffer.push((key, val));
should_loop = true;
idx = idx.wrapping_add(1) % self.entries.len();
}
Poll::Ready(None) => {
// Remove the entry
self.entries.swap_remove(idx);
// Check if this was the last entry, if so the cursor needs
// to wrap
if idx == self.entries.len() {
idx = 0;
} else if idx < start && start <= self.entries.len() {
// The stream being swapped into the current index has
// already been polled, so skip it.
idx = idx.wrapping_add(1) % self.entries.len();
}
}
Poll::Pending => {
idx = idx.wrapping_add(1) % self.entries.len();
}
}
}
if !should_loop {
break;
}
}
if added > 0 {
Poll::Ready(added)
} else if self.entries.is_empty() {
Poll::Ready(0)
} else {
Poll::Pending
}
}
}
impl<K, V> Stream for StreamMap<K, V>
where
K: Clone + Unpin,
V: Stream + Unpin,
{
type Item = (K, V::Item);
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
if let Some((idx, val)) = ready!(self.poll_next_entry(cx)) {
let key = self.entries[idx].0.clone();
Poll::Ready(Some((key, val)))
} else {
Poll::Ready(None)
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let mut ret = (0, Some(0));
for (_, stream) in &self.entries {
let hint = stream.size_hint();
ret.0 += hint.0;
match (ret.1, hint.1) {
(Some(a), Some(b)) => ret.1 = Some(a + b),
(Some(_), None) => ret.1 = None,
_ => {}
}
}
ret
}
}
impl<K, V> FromIterator<(K, V)> for StreamMap<K, V>
where
K: Hash + Eq,
{
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let iterator = iter.into_iter();
let (lower_bound, _) = iterator.size_hint();
let mut stream_map = Self::with_capacity(lower_bound);
for (key, value) in iterator {
stream_map.insert(key, value);
}
stream_map
}
}
impl<K, V> Extend<(K, V)> for StreamMap<K, V> {
fn extend<T>(&mut self, iter: T)
where
T: IntoIterator<Item = (K, V)>,
{
self.entries.extend(iter);
}
}
mod rand {
use std::cell::Cell;
mod loom {
#[cfg(not(loom))]
pub(crate) mod rand {
use std::collections::hash_map::RandomState;
use std::hash::BuildHasher;
use std::sync::atomic::AtomicU32;
use std::sync::atomic::Ordering::Relaxed;
static COUNTER: AtomicU32 = AtomicU32::new(1);
pub(crate) fn seed() -> u64 {
// Hash some unique-ish data to generate some new state
RandomState::new().hash_one(COUNTER.fetch_add(1, Relaxed))
}
}
#[cfg(loom)]
pub(crate) mod rand {
pub(crate) fn seed() -> u64 {
1
}
}
}
/// Fast random number generate
///
/// Implement `xorshift64+`: 2 32-bit `xorshift` sequences added together.
/// Shift triplet `[17,7,16]` was calculated as indicated in Marsaglia's
/// `Xorshift` paper: <https://www.jstatsoft.org/article/view/v008i14/xorshift.pdf>
/// This generator passes the SmallCrush suite, part of TestU01 framework:
/// <http://simul.iro.umontreal.ca/testu01/tu01.html>
#[derive(Debug)]
pub(crate) struct FastRand {
one: Cell<u32>,
two: Cell<u32>,
}
impl FastRand {
/// Initialize a new, thread-local, fast random number generator.
pub(crate) fn new(seed: u64) -> FastRand {
let one = (seed >> 32) as u32;
let mut two = seed as u32;
if two == 0 {
// This value cannot be zero
two = 1;
}
FastRand {
one: Cell::new(one),
two: Cell::new(two),
}
}
pub(crate) fn fastrand_n(&self, n: u32) -> u32 {
// This is similar to fastrand() % n, but faster.
// See https://lemire.me/blog/2016/06/27/a-fast-alternative-to-the-modulo-reduction/
let mul = (self.fastrand() as u64).wrapping_mul(n as u64);
(mul >> 32) as u32
}
fn fastrand(&self) -> u32 {
let mut s1 = self.one.get();
let s0 = self.two.get();
s1 ^= s1 << 17;
s1 = s1 ^ s0 ^ s1 >> 7 ^ s0 >> 16;
self.one.set(s0);
self.two.set(s1);
s0.wrapping_add(s1)
}
}
// Used by `StreamMap`
pub(crate) fn thread_rng_n(n: u32) -> u32 {
thread_local! {
static THREAD_RNG: FastRand = FastRand::new(loom::rand::seed());
}
THREAD_RNG.with(|rng| rng.fastrand_n(n))
}
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-stream/src/macros.rs | tokio-stream/src/macros.rs | macro_rules! cfg_fs {
($($item:item)*) => {
$(
#[cfg(feature = "fs")]
#[cfg_attr(docsrs, doc(cfg(feature = "fs")))]
$item
)*
}
}
macro_rules! cfg_io_util {
($($item:item)*) => {
$(
#[cfg(feature = "io-util")]
#[cfg_attr(docsrs, doc(cfg(feature = "io-util")))]
$item
)*
}
}
macro_rules! cfg_net {
($($item:item)*) => {
$(
#[cfg(feature = "net")]
#[cfg_attr(docsrs, doc(cfg(feature = "net")))]
$item
)*
}
}
macro_rules! cfg_time {
($($item:item)*) => {
$(
#[cfg(feature = "time")]
#[cfg_attr(docsrs, doc(cfg(feature = "time")))]
$item
)*
}
}
macro_rules! cfg_sync {
($($item:item)*) => {
$(
#[cfg(feature = "sync")]
#[cfg_attr(docsrs, doc(cfg(feature = "sync")))]
$item
)*
}
}
macro_rules! cfg_signal {
($($item:item)*) => {
$(
#[cfg(feature = "signal")]
#[cfg_attr(docsrs, doc(cfg(feature = "signal")))]
$item
)*
}
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-stream/src/once.rs | tokio-stream/src/once.rs | use crate::{Iter, Stream};
use core::option;
use core::pin::Pin;
use core::task::{Context, Poll};
/// Stream for the [`once`](fn@once) function.
#[derive(Debug)]
#[must_use = "streams do nothing unless polled"]
pub struct Once<T> {
iter: Iter<option::IntoIter<T>>,
}
impl<I> Unpin for Once<I> {}
/// Creates a stream that emits an element exactly once.
///
/// The returned stream is immediately ready and emits the provided value once.
///
/// # Examples
///
/// ```
/// use tokio_stream::{self as stream, StreamExt};
///
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() {
/// // one is the loneliest number
/// let mut one = stream::once(1);
///
/// assert_eq!(Some(1), one.next().await);
///
/// // just one, that's all we get
/// assert_eq!(None, one.next().await);
/// # }
/// ```
pub fn once<T>(value: T) -> Once<T> {
Once {
iter: crate::iter(Some(value)),
}
}
impl<T> Stream for Once<T> {
type Item = T;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<T>> {
Pin::new(&mut self.iter).poll_next(cx)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-stream/src/iter.rs | tokio-stream/src/iter.rs | use crate::Stream;
use core::pin::Pin;
use core::task::{Context, Poll};
/// Stream for the [`iter`](fn@iter) function.
#[derive(Debug)]
#[must_use = "streams do nothing unless polled"]
pub struct Iter<I> {
iter: I,
yield_amt: usize,
}
impl<I> Unpin for Iter<I> {}
/// Converts an `Iterator` into a `Stream` which is always ready
/// to yield the next value.
///
/// Iterators in Rust don't express the ability to block, so this adapter
/// simply always calls `iter.next()` and returns that.
///
/// ```
/// # async fn dox() {
/// use tokio_stream::{self as stream, StreamExt};
///
/// let mut stream = stream::iter(vec![17, 19]);
///
/// assert_eq!(stream.next().await, Some(17));
/// assert_eq!(stream.next().await, Some(19));
/// assert_eq!(stream.next().await, None);
/// # }
/// ```
pub fn iter<I>(i: I) -> Iter<I::IntoIter>
where
I: IntoIterator,
{
Iter {
iter: i.into_iter(),
yield_amt: 0,
}
}
impl<I> Stream for Iter<I>
where
I: Iterator,
{
type Item = I::Item;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<I::Item>> {
// TODO: add coop back
if self.yield_amt >= 32 {
self.yield_amt = 0;
cx.waker().wake_by_ref();
Poll::Pending
} else {
self.yield_amt += 1;
Poll::Ready(self.iter.next())
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-stream/src/stream_ext/merge.rs | tokio-stream/src/stream_ext/merge.rs | use crate::stream_ext::Fuse;
use crate::Stream;
use core::pin::Pin;
use core::task::{Context, Poll};
use pin_project_lite::pin_project;
pin_project! {
/// Stream returned by the [`merge`](super::StreamExt::merge) method.
pub struct Merge<T, U> {
#[pin]
a: Fuse<T>,
#[pin]
b: Fuse<U>,
// When `true`, poll `a` first, otherwise, `poll` b`.
a_first: bool,
}
}
impl<T, U> Merge<T, U> {
pub(super) fn new(a: T, b: U) -> Merge<T, U>
where
T: Stream,
U: Stream,
{
Merge {
a: Fuse::new(a),
b: Fuse::new(b),
a_first: true,
}
}
}
impl<T, U> Stream for Merge<T, U>
where
T: Stream,
U: Stream<Item = T::Item>,
{
type Item = T::Item;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<T::Item>> {
let me = self.project();
let a_first = *me.a_first;
// Toggle the flag
*me.a_first = !a_first;
if a_first {
poll_next(me.a, me.b, cx)
} else {
poll_next(me.b, me.a, cx)
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
super::merge_size_hints(self.a.size_hint(), self.b.size_hint())
}
}
fn poll_next<T, U>(
first: Pin<&mut T>,
second: Pin<&mut U>,
cx: &mut Context<'_>,
) -> Poll<Option<T::Item>>
where
T: Stream,
U: Stream<Item = T::Item>,
{
let mut done = true;
match first.poll_next(cx) {
Poll::Ready(Some(val)) => return Poll::Ready(Some(val)),
Poll::Ready(None) => {}
Poll::Pending => done = false,
}
match second.poll_next(cx) {
Poll::Ready(Some(val)) => return Poll::Ready(Some(val)),
Poll::Ready(None) => {}
Poll::Pending => done = false,
}
if done {
Poll::Ready(None)
} else {
Poll::Pending
}
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-stream/src/stream_ext/fold.rs | tokio-stream/src/stream_ext/fold.rs | use crate::Stream;
use core::future::Future;
use core::marker::PhantomPinned;
use core::pin::Pin;
use core::task::{ready, Context, Poll};
use pin_project_lite::pin_project;
pin_project! {
/// Future returned by the [`fold`](super::StreamExt::fold) method.
#[derive(Debug)]
#[must_use = "futures do nothing unless you `.await` or poll them"]
pub struct FoldFuture<St, B, F> {
#[pin]
stream: St,
acc: Option<B>,
f: F,
// Make this future `!Unpin` for compatibility with async trait methods.
#[pin]
_pin: PhantomPinned,
}
}
impl<St, B, F> FoldFuture<St, B, F> {
pub(super) fn new(stream: St, init: B, f: F) -> Self {
Self {
stream,
acc: Some(init),
f,
_pin: PhantomPinned,
}
}
}
impl<St, B, F> Future for FoldFuture<St, B, F>
where
St: Stream,
F: FnMut(B, St::Item) -> B,
{
type Output = B;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let mut me = self.project();
loop {
let next = ready!(me.stream.as_mut().poll_next(cx));
match next {
Some(v) => {
let old = me.acc.take().unwrap();
let new = (me.f)(old, v);
*me.acc = Some(new);
}
None => return Poll::Ready(me.acc.take().unwrap()),
}
}
}
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.