repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/lib.rs | crates/policy/src/lib.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
pub mod arn;
pub mod auth;
pub mod error;
pub mod format;
pub mod policy;
pub mod service_type;
pub mod utils;
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy.rs | crates/policy/src/policy.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
pub mod action;
mod doc;
mod effect;
mod function;
mod id;
pub mod opa;
#[allow(clippy::module_inception)]
mod policy;
mod principal;
pub mod resource;
pub mod statement;
pub(crate) mod utils;
pub mod variables;
pub use action::ActionSet;
pub use doc::PolicyDoc;
pub use effect::Effect;
pub use function::Functions;
pub use id::ID;
pub use policy::*;
pub use principal::Principal;
pub use resource::ResourceSet;
pub use statement::Statement;
#[derive(thiserror::Error, Debug)]
#[cfg_attr(test, derive(Eq, PartialEq))]
pub enum Error {
#[error("invalid Version '{0}'")]
InvalidVersion(String),
#[error("invalid Effect '{0}'")]
InvalidEffect(String),
#[error("both 'Action' and 'NotAction' are empty")]
NonAction,
#[error("'Resource' is empty")]
NonResource,
#[error("invalid key name: '{0}'")]
InvalidKeyName(String),
#[error("invalid key: '{0}'")]
InvalidKey(String),
#[error("invalid action: '{0}'")]
InvalidAction(String),
#[error("invalid resource, type: '{0}', pattern: '{1}'")]
InvalidResource(String, String),
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/error.rs | crates/policy/src/error.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::policy;
pub type Result<T> = core::result::Result<T, Error>;
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error(transparent)]
PolicyError(#[from] policy::Error),
#[error("{0}")]
StringError(String),
#[error("crypto: {0}")]
CryptoError(#[from] rustfs_crypto::Error),
#[error("user '{0}' does not exist")]
NoSuchUser(String),
#[error("account '{0}' does not exist")]
NoSuchAccount(String),
#[error("service account '{0}' does not exist")]
NoSuchServiceAccount(String),
#[error("temp account '{0}' does not exist")]
NoSuchTempAccount(String),
#[error("group '{0}' does not exist")]
NoSuchGroup(String),
#[error("policy does not exist")]
NoSuchPolicy,
#[error("policy in use")]
PolicyInUse,
#[error("group not empty")]
GroupNotEmpty,
#[error("invalid arguments specified")]
InvalidArgument,
#[error("not initialized")]
IamSysNotInitialized,
#[error("invalid service type: {0}")]
InvalidServiceType(String),
#[error("malformed credential")]
ErrCredMalformed,
#[error("CredNotInitialized")]
CredNotInitialized,
#[error("invalid access key length")]
InvalidAccessKeyLength,
#[error("invalid secret key length")]
InvalidSecretKeyLength,
#[error("access key contains reserved characters =,")]
ContainsReservedChars,
#[error("group name contains reserved characters =,")]
GroupNameContainsReservedChars,
#[error("jwt err {0}")]
JWTError(#[from] jsonwebtoken::errors::Error),
#[error("no access key")]
NoAccessKey,
#[error("invalid token")]
InvalidToken,
#[error("invalid access_key")]
InvalidAccessKey,
#[error("action not allowed")]
IAMActionNotAllowed,
#[error("invalid expiration")]
InvalidExpiration,
#[error("no secret key with access key")]
NoSecretKeyWithAccessKey,
#[error("no access key with secret key")]
NoAccessKeyWithSecretKey,
#[error("policy too large")]
PolicyTooLarge,
#[error("io error: {0}")]
Io(std::io::Error),
#[error("system already initialized")]
IamSysAlreadyInitialized,
}
impl Error {
pub fn other<E>(error: E) -> Self
where
E: Into<Box<dyn std::error::Error + Send + Sync>>,
{
Error::Io(std::io::Error::other(error))
}
}
impl From<std::io::Error> for Error {
fn from(e: std::io::Error) -> Self {
Error::Io(e)
}
}
impl From<time::error::ComponentRange> for Error {
fn from(e: time::error::ComponentRange) -> Self {
Error::other(e)
}
}
impl From<serde_json::Error> for Error {
fn from(e: serde_json::Error) -> Self {
Error::other(e)
}
}
// impl From<jsonwebtoken::errors::Error> for Error {
// fn from(e: jsonwebtoken::errors::Error) -> Self {
// Error::JWTError(e)
// }
// }
impl From<regex::Error> for Error {
fn from(e: regex::Error) -> Self {
Error::other(e)
}
}
// pub fn is_err_no_such_user(e: &Error) -> bool {
// matches!(e, Error::NoSuchUser(_))
// }
pub fn is_err_no_such_policy(err: &Error) -> bool {
matches!(err, Error::NoSuchPolicy)
}
pub fn is_err_no_such_user(err: &Error) -> bool {
matches!(err, Error::NoSuchUser(_))
}
pub fn is_err_no_such_account(err: &Error) -> bool {
matches!(err, Error::NoSuchAccount(_))
}
pub fn is_err_no_such_temp_account(err: &Error) -> bool {
matches!(err, Error::NoSuchTempAccount(_))
}
pub fn is_err_no_such_group(err: &Error) -> bool {
matches!(err, Error::NoSuchGroup(_))
}
pub fn is_err_no_such_service_account(err: &Error) -> bool {
matches!(err, Error::NoSuchServiceAccount(_))
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::{Error as IoError, ErrorKind};
#[test]
fn test_policy_error_from_io_error() {
let io_error = IoError::new(ErrorKind::PermissionDenied, "permission denied");
let policy_error: Error = io_error.into();
match policy_error {
Error::Io(inner_io) => {
assert_eq!(inner_io.kind(), ErrorKind::PermissionDenied);
assert!(inner_io.to_string().contains("permission denied"));
}
_ => panic!("Expected Io variant"),
}
}
#[test]
fn test_policy_error_other_function() {
let custom_error = "Custom policy error";
let policy_error = Error::other(custom_error);
match policy_error {
Error::Io(io_error) => {
assert!(io_error.to_string().contains(custom_error));
assert_eq!(io_error.kind(), ErrorKind::Other);
}
_ => panic!("Expected Io variant"),
}
}
#[test]
fn test_policy_error_from_crypto_error() {
// Test conversion from crypto::Error - use an actual variant
let crypto_error = rustfs_crypto::Error::ErrUnexpectedHeader;
let policy_error: Error = crypto_error.into();
match policy_error {
Error::CryptoError(_) => {
// Verify the conversion worked
assert!(policy_error.to_string().contains("crypto"));
}
_ => panic!("Expected CryptoError variant"),
}
}
#[test]
fn test_policy_error_from_jwt_error() {
use jsonwebtoken::{Algorithm, DecodingKey, Validation, decode};
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize, Clone)]
struct Claims {
sub: String,
exp: usize,
}
// Create an invalid JWT to generate a JWT error
let invalid_token = "invalid.jwt.token";
let key = DecodingKey::from_secret(b"secret");
let validation = Validation::new(Algorithm::HS256);
let jwt_result = decode::<Claims>(invalid_token, &key, &validation);
assert!(jwt_result.is_err());
let jwt_error = jwt_result.unwrap_err();
let policy_error: Error = jwt_error.into();
match policy_error {
Error::JWTError(_) => {
// Verify the conversion worked
assert!(policy_error.to_string().contains("jwt err"));
}
_ => panic!("Expected JWTError variant"),
}
}
#[test]
fn test_policy_error_from_serde_json() {
// Test conversion from serde_json::Error
let invalid_json = r#"{"invalid": json}"#;
let json_error = serde_json::from_str::<serde_json::Value>(invalid_json).unwrap_err();
let policy_error: Error = json_error.into();
match policy_error {
Error::Io(io_error) => {
assert_eq!(io_error.kind(), ErrorKind::Other);
}
_ => panic!("Expected Io variant"),
}
}
#[test]
fn test_policy_error_from_time_component_range() {
use time::{Date, Month};
// Create an invalid date to generate a ComponentRange error
let time_result = Date::from_calendar_date(2023, Month::January, 32); // Invalid day
assert!(time_result.is_err());
let time_error = time_result.unwrap_err();
let policy_error: Error = time_error.into();
match policy_error {
Error::Io(io_error) => {
assert_eq!(io_error.kind(), ErrorKind::Other);
}
_ => panic!("Expected Io variant"),
}
}
#[test]
#[allow(clippy::invalid_regex)]
fn test_policy_error_from_regex_error() {
use regex::Regex;
// Create an invalid regex to generate a regex error (unclosed bracket)
let regex_result = Regex::new("[");
assert!(regex_result.is_err());
let regex_error = regex_result.unwrap_err();
let policy_error: Error = regex_error.into();
match policy_error {
Error::Io(io_error) => {
assert_eq!(io_error.kind(), ErrorKind::Other);
}
_ => panic!("Expected Io variant"),
}
}
#[test]
fn test_helper_functions() {
// Test helper functions for error type checking
assert!(is_err_no_such_policy(&Error::NoSuchPolicy));
assert!(!is_err_no_such_policy(&Error::NoSuchUser("test".to_string())));
assert!(is_err_no_such_user(&Error::NoSuchUser("test".to_string())));
assert!(!is_err_no_such_user(&Error::NoSuchAccount("test".to_string())));
assert!(is_err_no_such_account(&Error::NoSuchAccount("test".to_string())));
assert!(!is_err_no_such_account(&Error::NoSuchUser("test".to_string())));
assert!(is_err_no_such_temp_account(&Error::NoSuchTempAccount("test".to_string())));
assert!(!is_err_no_such_temp_account(&Error::NoSuchAccount("test".to_string())));
assert!(is_err_no_such_group(&Error::NoSuchGroup("test".to_string())));
assert!(!is_err_no_such_group(&Error::NoSuchUser("test".to_string())));
assert!(is_err_no_such_service_account(&Error::NoSuchServiceAccount("test".to_string())));
assert!(!is_err_no_such_service_account(&Error::NoSuchAccount("test".to_string())));
}
#[test]
fn test_error_display_format() {
let test_cases = vec![
(Error::NoSuchUser("testuser".to_string()), "user 'testuser' does not exist"),
(Error::NoSuchAccount("testaccount".to_string()), "account 'testaccount' does not exist"),
(
Error::NoSuchServiceAccount("service1".to_string()),
"service account 'service1' does not exist",
),
(Error::NoSuchTempAccount("temp1".to_string()), "temp account 'temp1' does not exist"),
(Error::NoSuchGroup("group1".to_string()), "group 'group1' does not exist"),
(Error::NoSuchPolicy, "policy does not exist"),
(Error::PolicyInUse, "policy in use"),
(Error::GroupNotEmpty, "group not empty"),
(Error::InvalidArgument, "invalid arguments specified"),
(Error::IamSysNotInitialized, "not initialized"),
(Error::InvalidServiceType("invalid".to_string()), "invalid service type: invalid"),
(Error::ErrCredMalformed, "malformed credential"),
(Error::CredNotInitialized, "CredNotInitialized"),
(Error::InvalidAccessKeyLength, "invalid access key length"),
(Error::InvalidSecretKeyLength, "invalid secret key length"),
(Error::ContainsReservedChars, "access key contains reserved characters =,"),
(Error::GroupNameContainsReservedChars, "group name contains reserved characters =,"),
(Error::NoAccessKey, "no access key"),
(Error::InvalidToken, "invalid token"),
(Error::InvalidAccessKey, "invalid access_key"),
(Error::IAMActionNotAllowed, "action not allowed"),
(Error::InvalidExpiration, "invalid expiration"),
(Error::NoSecretKeyWithAccessKey, "no secret key with access key"),
(Error::NoAccessKeyWithSecretKey, "no access key with secret key"),
(Error::PolicyTooLarge, "policy too large"),
];
for (error, expected_message) in test_cases {
assert_eq!(error.to_string(), expected_message);
}
}
#[test]
fn test_string_error_variant() {
let custom_message = "Custom error message";
let error = Error::StringError(custom_message.to_string());
assert_eq!(error.to_string(), custom_message);
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/utils.rs | crates/policy/src/utils.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use jsonwebtoken::{Algorithm, DecodingKey, EncodingKey, Header};
use serde::{Serialize, de::DeserializeOwned};
pub fn generate_jwt<T: Serialize>(claims: &T, secret: &str) -> std::result::Result<String, jsonwebtoken::errors::Error> {
let header = Header::new(Algorithm::HS512);
jsonwebtoken::encode(&header, &claims, &EncodingKey::from_secret(secret.as_bytes()))
}
pub fn extract_claims<T: DeserializeOwned + Clone>(
token: &str,
secret: &str,
) -> std::result::Result<jsonwebtoken::TokenData<T>, jsonwebtoken::errors::Error> {
jsonwebtoken::decode::<T>(
token,
&DecodingKey::from_secret(secret.as_bytes()),
&jsonwebtoken::Validation::new(Algorithm::HS512),
)
}
#[cfg(test)]
mod tests {
use super::generate_jwt;
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize, PartialEq)]
struct Claims {
sub: String,
company: String,
}
#[test]
fn test_generate_jwt() {
let claims = Claims {
sub: "user1".to_string(),
company: "example".to_string(),
};
let secret = "my_secret";
let token = generate_jwt(&claims, secret).unwrap();
assert!(!token.is_empty());
}
// #[test]
// fn test_extract_claims() {
// let claims = Claims {
// sub: "user1".to_string(),
// company: "example".to_string(),
// };
// let secret = "my_secret";
// let token = generate_jwt(&claims, secret).unwrap();
// let decoded_claims = extract_claims::<Claims>(&token, secret).unwrap();
// assert_eq!(decoded_claims.claims, claims);
// }
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/format.rs | crates/policy/src/format.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use serde::{Deserialize, Serialize};
#[derive(Deserialize, Serialize, Default)]
pub struct Format {
pub version: i32,
}
// impl Format {
// pub const PATH: &str = "config/iam/config/format.json";
// pub const DEFAULT_VERSION: i32 = 1;
// pub fn new() -> Self {
// Self {
// version: Self::DEFAULT_VERSION,
// }
// }
// }
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/service_type.rs | crates/policy/src/service_type.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::error::Error;
#[derive(PartialEq, Eq, Debug)]
pub enum ServiceType {
S3,
STS,
}
impl TryFrom<&str> for ServiceType {
type Error = Error;
fn try_from(value: &str) -> Result<Self, Self::Error> {
let service_type = match value {
"s3" => Self::S3,
"sts" => Self::STS,
_ => return Err(Error::InvalidServiceType(value.to_owned())),
};
Ok(service_type)
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/arn.rs | crates/policy/src/arn.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::error::{Error, Result};
use regex::Regex;
const ARN_PREFIX_ARN: &str = "arn";
const ARN_PARTITION_RUSTFS: &str = "rustfs";
const ARN_SERVICE_IAM: &str = "iam";
const ARN_RESOURCE_TYPE_ROLE: &str = "role";
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct ARN {
pub partition: String,
pub service: String,
pub region: String,
pub resource_type: String,
pub resource_id: String,
}
impl ARN {
pub fn new_iam_role_arn(resource_id: &str, server_region: &str) -> Result<Self> {
let valid_resource_id_regex = Regex::new(r"^[A-Za-z0-9_/\.-]+$")?;
if !valid_resource_id_regex.is_match(resource_id) {
return Err(Error::other("ARN resource ID invalid"));
}
Ok(ARN {
partition: ARN_PARTITION_RUSTFS.to_string(),
service: ARN_SERVICE_IAM.to_string(),
region: server_region.to_string(),
resource_type: ARN_RESOURCE_TYPE_ROLE.to_string(),
resource_id: resource_id.to_string(),
})
}
pub fn parse(arn_str: &str) -> Result<Self> {
let ps: Vec<&str> = arn_str.split(':').collect();
if ps.len() != 6 || ps[0] != ARN_PREFIX_ARN {
return Err(Error::other("ARN format invalid"));
}
if ps[1] != ARN_PARTITION_RUSTFS {
return Err(Error::other("ARN partition invalid"));
}
if ps[2] != ARN_SERVICE_IAM {
return Err(Error::other("ARN service invalid"));
}
if !ps[4].is_empty() {
return Err(Error::other("ARN account-id invalid"));
}
let res: Vec<&str> = ps[5].splitn(2, '/').collect();
if res.len() != 2 {
return Err(Error::other("ARN resource invalid"));
}
if res[0] != ARN_RESOURCE_TYPE_ROLE {
return Err(Error::other("ARN resource type invalid"));
}
let valid_resource_id_regex = Regex::new(r"^[A-Za-z0-9_/\.-]+$")?;
if !valid_resource_id_regex.is_match(res[1]) {
return Err(Error::other("ARN resource ID invalid"));
}
Ok(ARN {
partition: ARN_PARTITION_RUSTFS.to_string(),
service: ARN_SERVICE_IAM.to_string(),
region: ps[3].to_string(),
resource_type: ARN_RESOURCE_TYPE_ROLE.to_string(),
resource_id: res[1].to_string(),
})
}
}
impl std::fmt::Display for ARN {
#[allow(clippy::write_literal)]
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}:{}:{}:{}:{}:{}/{}",
ARN_PREFIX_ARN,
self.partition,
self.service,
self.region,
"", // account-id is always empty in this implementation
self.resource_type,
self.resource_id
)
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/auth/mod.rs | crates/policy/src/auth/mod.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
mod credentials;
pub use credentials::*;
use rustfs_credentials::Credentials;
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::collections::HashMap;
use time::OffsetDateTime;
#[derive(Serialize, Deserialize, Clone, Debug, Default)]
pub struct UserIdentity {
pub version: i64,
pub credentials: Credentials,
pub update_at: Option<OffsetDateTime>,
}
impl UserIdentity {
/// Create a new UserIdentity
///
/// # Arguments
/// * `credentials` - Credentials object
///
/// # Returns
/// * UserIdentity
pub fn new(credentials: Credentials) -> Self {
UserIdentity {
version: 1,
credentials,
update_at: Some(OffsetDateTime::now_utc()),
}
}
/// Add an SSH public key to user identity for SFTP authentication
pub fn add_ssh_public_key(&mut self, public_key: &str) {
self.credentials
.claims
.get_or_insert_with(HashMap::new)
.insert("ssh_public_keys".to_string(), json!([public_key]));
}
/// Get all SSH public keys for user identity
pub fn get_ssh_public_keys(&self) -> Vec<String> {
self.credentials
.claims
.as_ref()
.and_then(|claims| claims.get("ssh_public_keys"))
.and_then(|keys| keys.as_array())
.map(|arr| arr.iter().filter_map(|v| v.as_str()).map(String::from).collect())
.unwrap_or_default()
}
}
impl From<Credentials> for UserIdentity {
fn from(value: Credentials) -> Self {
UserIdentity {
version: 1,
credentials: value,
update_at: Some(OffsetDateTime::now_utc()),
}
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/auth/credentials.rs | crates/policy/src/auth/credentials.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::error::{Error, Result};
use crate::policy::{Policy, Validator};
use crate::utils;
use rustfs_credentials::Credentials;
use serde::Serialize;
use serde_json::{Value, json};
use std::collections::HashMap;
use std::convert::TryFrom;
use time::OffsetDateTime;
use tracing::warn;
const ACCESS_KEY_MIN_LEN: usize = 3;
const ACCESS_KEY_MAX_LEN: usize = 20;
const SECRET_KEY_MIN_LEN: usize = 8;
const SECRET_KEY_MAX_LEN: usize = 40;
pub const ACCOUNT_ON: &str = "on";
pub const ACCOUNT_OFF: &str = "off";
const RESERVED_CHARS: &str = "=,";
/// ContainsReservedChars - returns whether the input string contains reserved characters.
///
/// # Arguments
/// * `s` - input string to check.
///
/// # Returns
/// * `bool` - true if contains reserved characters, false otherwise.
///
pub fn contains_reserved_chars(s: &str) -> bool {
s.contains(RESERVED_CHARS)
}
/// IsAccessKeyValid - validate access key for right length.
///
/// # Arguments
/// * `access_key` - access key to validate.
///
/// # Returns
/// * `bool` - true if valid, false otherwise.
///
pub fn is_access_key_valid(access_key: &str) -> bool {
access_key.len() >= ACCESS_KEY_MIN_LEN
}
/// IsSecretKeyValid - validate secret key for right length.
///
/// # Arguments
/// * `secret_key` - secret key to validate.
///
/// # Returns
/// * `bool` - true if valid, false otherwise.
///
pub fn is_secret_key_valid(secret_key: &str) -> bool {
secret_key.len() >= SECRET_KEY_MIN_LEN
}
/// GenerateCredentials - generate a new access key and secret key pair.
///
/// # Returns
/// * `Ok((String, String))` - access key and secret key pair.
/// * `Err(Error)` - if an error occurs during generation.
///
pub fn generate_credentials() -> Result<(String, String)> {
let ak = rustfs_credentials::gen_access_key(20)?;
let sk = rustfs_credentials::gen_secret_key(40)?;
Ok((ak, sk))
}
/// GetNewCredentialsWithMetadata - generate new credentials with metadata claims and token secret.
///
/// # Arguments
/// * `claims` - metadata claims to be included in the token.
/// * `token_secret` - secret used to sign the token.
///
/// # Returns
/// * `Ok(Credentials)` - newly generated credentials.
/// * `Err(Error)` - if an error occurs during generation.
///
pub fn get_new_credentials_with_metadata(claims: &HashMap<String, Value>, token_secret: &str) -> Result<Credentials> {
let (ak, sk) = generate_credentials()?;
create_new_credentials_with_metadata(&ak, &sk, claims, token_secret)
}
/// CreateNewCredentialsWithMetadata - create new credentials with provided access key, secret key, metadata claims, and token secret.
///
/// # Arguments
/// * `ak` - access key.
/// * `sk` - secret key.
/// * `claims` - metadata claims to be included in the token.
/// * `token_secret` - secret used to sign the token.
///
/// # Returns
/// * `Ok(Credentials)` - newly created credentials.
/// * `Err(Error)` - if an error occurs during creation.
///
pub fn create_new_credentials_with_metadata(
ak: &str,
sk: &str,
claims: &HashMap<String, Value>,
token_secret: &str,
) -> Result<Credentials> {
if ak.len() < ACCESS_KEY_MIN_LEN || ak.len() > ACCESS_KEY_MAX_LEN {
return Err(Error::InvalidAccessKeyLength);
}
if sk.len() < SECRET_KEY_MIN_LEN || sk.len() > SECRET_KEY_MAX_LEN {
return Err(Error::InvalidAccessKeyLength);
}
if token_secret.is_empty() {
return Ok(Credentials {
access_key: ak.to_owned(),
secret_key: sk.to_owned(),
status: ACCOUNT_OFF.to_owned(),
..Default::default()
});
}
let expiration = {
if let Some(v) = claims.get("exp") {
if let Some(expiry) = v.as_i64() {
Some(OffsetDateTime::from_unix_timestamp(expiry)?)
} else {
None
}
} else {
None
}
};
warn!("create_new_credentials_with_metadata expiration {expiration:?}, access_key: {ak}");
let token = utils::generate_jwt(&claims, token_secret)?;
Ok(Credentials {
access_key: ak.to_owned(),
secret_key: sk.to_owned(),
session_token: token,
status: ACCOUNT_ON.to_owned(),
expiration,
..Default::default()
})
}
/// JWTSign - sign the provided claims with the given token secret to generate a JWT token.
///
/// # Arguments
/// * `claims` - claims to be included in the token.
/// * `token_secret` - secret used to sign the token.
///
/// # Returns
/// * `Ok(String)` - generated JWT token.
/// * `Err(Error)` - if an error occurs during signing.
///
pub fn jwt_sign<T: Serialize>(claims: &T, token_secret: &str) -> Result<String> {
let token = utils::generate_jwt(claims, token_secret)?;
Ok(token)
}
#[derive(Default)]
pub struct CredentialsBuilder {
session_policy: Option<Policy>,
access_key: String,
secret_key: String,
name: Option<String>,
description: Option<String>,
expiration: Option<OffsetDateTime>,
allow_site_replicator_account: bool,
claims: Option<Value>,
parent_user: String,
groups: Option<Vec<String>>,
}
impl CredentialsBuilder {
/// Create a new CredentialsBuilder instance.
///
/// # Returns
/// * `CredentialsBuilder` - a new instance of CredentialsBuilder.
///
pub fn new() -> Self {
Self::default()
}
/// Set the session policy for the credentials.
///
/// # Arguments
/// * `policy` - an optional Policy to set as the session policy.
///
/// # Returns
/// * `Self` - the updated CredentialsBuilder instance.
///
pub fn session_policy(mut self, policy: Option<Policy>) -> Self {
self.session_policy = policy;
self
}
pub fn access_key(mut self, access_key: String) -> Self {
self.access_key = access_key;
self
}
pub fn secret_key(mut self, secret_key: String) -> Self {
self.secret_key = secret_key;
self
}
pub fn name(mut self, name: String) -> Self {
self.name = Some(name);
self
}
pub fn description(mut self, description: String) -> Self {
self.description = Some(description);
self
}
pub fn expiration(mut self, expiration: Option<OffsetDateTime>) -> Self {
self.expiration = expiration;
self
}
pub fn allow_site_replicator_account(mut self, allow_site_replicator_account: bool) -> Self {
self.allow_site_replicator_account = allow_site_replicator_account;
self
}
pub fn claims(mut self, claims: Value) -> Self {
self.claims = Some(claims);
self
}
pub fn parent_user(mut self, parent_user: String) -> Self {
self.parent_user = parent_user;
self
}
pub fn groups(mut self, groups: Vec<String>) -> Self {
self.groups = Some(groups);
self
}
pub fn try_build(self) -> Result<Credentials> {
self.try_into()
}
}
impl TryFrom<CredentialsBuilder> for Credentials {
type Error = Error;
fn try_from(mut value: CredentialsBuilder) -> std::result::Result<Self, Self::Error> {
if value.parent_user.is_empty() {
return Err(Error::InvalidArgument);
}
if (value.access_key.is_empty() && !value.secret_key.is_empty())
|| (!value.access_key.is_empty() && value.secret_key.is_empty())
{
return Err(Error::other("Either ak or sk is empty"));
}
if value.parent_user == value.access_key.as_str() {
return Err(Error::InvalidArgument);
}
if value.access_key == "site-replicator-0" && !value.allow_site_replicator_account {
return Err(Error::InvalidArgument);
}
let mut claim = json!({
"parent": value.parent_user
});
if let Some(p) = value.session_policy {
p.is_valid()?;
let policy_buf = serde_json::to_vec(&p).map_err(|_| Error::InvalidArgument)?;
if policy_buf.len() > 4096 {
return Err(Error::other("session policy is too large"));
}
claim["sessionPolicy"] = json!(base64_simd::STANDARD.encode_to_string(&policy_buf));
claim[rustfs_credentials::IAM_POLICY_CLAIM_NAME_SA] = json!(rustfs_credentials::EMBEDDED_POLICY_TYPE);
} else {
claim[rustfs_credentials::IAM_POLICY_CLAIM_NAME_SA] = json!(rustfs_credentials::INHERITED_POLICY_TYPE);
}
if let Some(Value::Object(obj)) = value.claims {
for (key, value) in obj {
if claim.get(&key).is_some() {
continue;
}
claim[key] = value;
}
}
if value.access_key.is_empty() {
value.access_key = rustfs_credentials::gen_access_key(20)?;
}
if value.secret_key.is_empty() {
value.secret_key = rustfs_credentials::gen_secret_key(40)?;
}
claim["accessKey"] = json!(&value.access_key);
let mut cred = Credentials {
status: "on".into(),
parent_user: value.parent_user,
groups: value.groups,
name: value.name,
description: value.description,
..Default::default()
};
if !value.secret_key.is_empty() {
let session_token = rustfs_crypto::jwt_encode(value.access_key.as_bytes(), &claim)
.map_err(|_| Error::other("session policy is too large"))?;
cred.session_token = session_token;
// cred.expiration = Some(
// OffsetDateTime::from_unix_timestamp(
// claim
// .get("exp")
// .and_then(|x| x.as_i64())
// .ok_or(Error::StringError("invalid exp".into()))?,
// )
// .map_err(|_| Error::StringError("invalie timestamp".into()))?,
// );
} else {
// cred.expiration =
// Some(OffsetDateTime::from_unix_timestamp(0).map_err(|_| Error::StringError("invalie timestamp".into()))?);
}
cred.expiration = value.expiration;
cred.access_key = value.access_key;
cred.secret_key = value.secret_key;
Ok(cred)
}
}
// #[cfg(test)]
// #[allow(non_snake_case)]
// mod tests {
// use test_case::test_case;
// use time::Date;
// use super::CredentialHeader;
// use super::CredentialHeaderScope;
// use crate::service_type::ServiceType;
// #[test_case(
// "Credential=aaaaaaaaaaaaaaaaaaaa/20241127/us-east-1/s3/aws4_request" =>
// CredentialHeader{
// access_key: "aaaaaaaaaaaaaaaaaaaa".into(),
// scop: CredentialHeaderScope {
// date: Date::from_calendar_date(2024, time::Month::November, 27).unwrap(),
// region: "us-east-1".to_owned(),
// service: ServiceType::S3,
// request: "aws4_request".into(),
// }
// };
// "1")]
// #[test_case(
// "Credential=aaaaaaaaaaa/aaaaaaaaa/20241127/us-east-1/s3/aws4_request" =>
// CredentialHeader{
// access_key: "aaaaaaaaaaa/aaaaaaaaa".into(),
// scop: CredentialHeaderScope {
// date: Date::from_calendar_date(2024, time::Month::November, 27).unwrap(),
// region: "us-east-1".to_owned(),
// service: ServiceType::S3,
// request: "aws4_request".into(),
// }
// };
// "2")]
// #[test_case(
// "Credential=aaaaaaaaaaa/aaaaaaaaa/20241127/us-east-1/sts/aws4_request" =>
// CredentialHeader{
// access_key: "aaaaaaaaaaa/aaaaaaaaa".into(),
// scop: CredentialHeaderScope {
// date: Date::from_calendar_date(2024, time::Month::November, 27).unwrap(),
// region: "us-east-1".to_owned(),
// service: ServiceType::STS,
// request: "aws4_request".into(),
// }
// };
// "3")]
// fn test_CredentialHeader_from_str_successful(input: &str) -> CredentialHeader {
// CredentialHeader::try_from(input).unwrap()
// }
// #[test_case("Credential")]
// #[test_case("Cred=")]
// #[test_case("Credential=abc")]
// #[test_case("Credential=a/20241127/us-east-1/s3/aws4_request")]
// #[test_case("Credential=aa/20241127/us-east-1/s3/aws4_request")]
// #[test_case("Credential=aaaa/20241127/us-east-1/asa/aws4_request")]
// #[test_case("Credential=aaaa/20241127/us-east-1/sts/aws4a_request")]
// fn test_credential_header_from_str_failed(input: &str) {
// if CredentialHeader::try_from(input).is_ok() {
// unreachable!()
// }
// }
// }
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/effect.rs | crates/policy/src/policy/effect.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::error::{Error, Result};
use serde::{Deserialize, Serialize};
use strum::{EnumString, IntoStaticStr};
use super::Validator;
#[derive(Serialize, Clone, Deserialize, EnumString, IntoStaticStr, Default, Debug, PartialEq)]
#[serde(try_from = "&str", into = "&str")]
pub enum Effect {
#[default]
#[strum(serialize = "Allow")]
Allow,
#[strum(serialize = "Deny")]
Deny,
}
impl Effect {
pub fn is_allowed(&self, allowed: bool) -> bool {
if matches!(self, Self::Allow) {
return allowed;
}
!allowed
}
}
impl Validator for Effect {
type Error = Error;
fn is_valid(&self) -> Result<()> {
Ok(())
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/variables.rs | crates/policy/src/policy/variables.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use async_trait::async_trait;
use moka::future::Cache;
use serde_json::Value;
use std::collections::HashMap;
use std::future::Future;
use std::time::Duration;
use time::OffsetDateTime;
/// Context information for variable resolution
#[derive(Debug, Clone, Default)]
pub struct VariableContext {
pub is_https: bool,
pub source_ip: Option<String>,
pub account_id: Option<String>,
pub region: Option<String>,
pub username: Option<String>,
pub claims: Option<HashMap<String, Value>>,
pub conditions: HashMap<String, Vec<String>>,
pub custom_variables: HashMap<String, String>,
}
impl VariableContext {
pub fn new() -> Self {
Self::default()
}
}
pub struct VariableResolverCache {
/// Moka cache storing resolved results
cache: Cache<String, String>,
}
impl VariableResolverCache {
pub fn new(capacity: usize, ttl_seconds: u64) -> Self {
let cache = Cache::builder()
.max_capacity(capacity as u64)
.time_to_live(Duration::from_secs(ttl_seconds))
.build();
Self { cache }
}
pub async fn get(&self, key: &str) -> Option<String> {
self.cache.get(key).await
}
pub async fn put(&self, key: String, value: String) {
self.cache.insert(key, value).await;
}
pub async fn clear(&self) {
self.cache.invalidate_all();
}
}
/// Cached dynamic AWS variable resolver
pub struct CachedAwsVariableResolver {
inner: VariableResolver,
cache: VariableResolverCache,
}
impl CachedAwsVariableResolver {
pub fn new(context: VariableContext) -> Self {
Self {
inner: VariableResolver::new(context),
cache: VariableResolverCache::new(100, 300), // 100 entries, 5 minutes expiration
}
}
pub fn is_dynamic(&self, variable_name: &str) -> bool {
self.inner.is_dynamic(variable_name)
}
}
#[async_trait]
impl PolicyVariableResolver for CachedAwsVariableResolver {
async fn resolve(&self, variable_name: &str) -> Option<String> {
if self.is_dynamic(variable_name) {
return self.inner.resolve(variable_name).await;
}
if let Some(cached) = self.cache.get(variable_name).await {
return Some(cached);
}
let value = self.inner.resolve(variable_name).await?;
self.cache.put(variable_name.to_string(), value.clone()).await;
Some(value)
}
async fn resolve_multiple(&self, variable_name: &str) -> Option<Vec<String>> {
self.inner.resolve_multiple(variable_name).await
}
fn is_dynamic(&self, variable_name: &str) -> bool {
self.inner.is_dynamic(variable_name)
}
}
/// Policy variable resolver trait
#[async_trait]
pub trait PolicyVariableResolver: Sync {
async fn resolve(&self, variable_name: &str) -> Option<String>;
async fn resolve_multiple(&self, variable_name: &str) -> Option<Vec<String>> {
self.resolve(variable_name).await.map(|s| vec![s])
}
fn is_dynamic(&self, variable_name: &str) -> bool;
}
/// AWS variable resolver
pub struct VariableResolver {
context: VariableContext,
}
impl VariableResolver {
pub fn new(context: VariableContext) -> Self {
Self { context }
}
fn get_claim_as_strings(&self, claim_name: &str) -> Option<Vec<String>> {
self.context
.claims
.as_ref()
.and_then(|claims| claims.get(claim_name))
.and_then(|value| match value {
Value::String(s) => Some(vec![s.clone()]),
Value::Array(arr) => Some(
arr.iter()
.filter_map(|item| match item {
Value::String(s) => Some(s.clone()),
Value::Number(n) => Some(n.to_string()),
Value::Bool(b) => Some(b.to_string()),
_ => None,
})
.collect(),
),
Value::Number(n) => Some(vec![n.to_string()]),
Value::Bool(b) => Some(vec![b.to_string()]),
_ => None,
})
}
fn resolve_username(&self) -> Option<String> {
self.context.username.clone()
}
fn resolve_userid(&self) -> Option<String> {
self.get_claim_as_strings("sub")
.or_else(|| self.get_claim_as_strings("parent"))
.and_then(|mut vec| vec.pop()) // 取第一个值,保持原有逻辑
}
fn resolve_principal_type(&self) -> String {
if let Some(claims) = &self.context.claims {
if claims.contains_key("roleArn") {
return "AssumedRole".to_string();
}
if claims.contains_key("parent") && claims.contains_key("sa-policy") {
return "ServiceAccount".to_string();
}
}
"User".to_string()
}
fn resolve_secure_transport(&self) -> String {
if self.context.is_https { "true" } else { "false" }.to_string()
}
fn resolve_current_time(&self) -> String {
let now = OffsetDateTime::now_utc();
now.format(&time::format_description::well_known::Rfc3339)
.unwrap_or_else(|_| now.to_string())
}
fn resolve_epoch_time(&self) -> String {
OffsetDateTime::now_utc().unix_timestamp().to_string()
}
fn resolve_account_id(&self) -> Option<String> {
self.context.account_id.clone()
}
fn resolve_region(&self) -> Option<String> {
self.context.region.clone()
}
fn resolve_source_ip(&self) -> Option<String> {
self.context.source_ip.clone()
}
fn resolve_custom_variable(&self, variable_name: &str) -> Option<String> {
let custom_key = variable_name.strip_prefix("custom:")?;
self.context.custom_variables.get(custom_key).cloned()
}
}
#[async_trait]
impl PolicyVariableResolver for VariableResolver {
async fn resolve(&self, variable_name: &str) -> Option<String> {
match variable_name {
"aws:username" => self.resolve_username(),
"aws:userid" => self.resolve_userid(),
"aws:PrincipalType" => Some(self.resolve_principal_type()),
"aws:SecureTransport" => Some(self.resolve_secure_transport()),
"aws:CurrentTime" => Some(self.resolve_current_time()),
"aws:EpochTime" => Some(self.resolve_epoch_time()),
"aws:AccountId" => self.resolve_account_id(),
"aws:Region" => self.resolve_region(),
"aws:SourceIp" => self.resolve_source_ip(),
_ => {
// Handle custom:* variables
if variable_name.starts_with("custom:") {
self.resolve_custom_variable(variable_name)
} else {
None
}
}
}
}
async fn resolve_multiple(&self, variable_name: &str) -> Option<Vec<String>> {
match variable_name {
"aws:username" => self.resolve_username().map(|s| vec![s]),
"aws:userid" => self
.get_claim_as_strings("sub")
.or_else(|| self.get_claim_as_strings("parent")),
_ => self.resolve(variable_name).await.map(|s| vec![s]),
}
}
fn is_dynamic(&self, variable_name: &str) -> bool {
matches!(variable_name, "aws:CurrentTime" | "aws:EpochTime")
}
}
pub async fn resolve_aws_variables(pattern: &str, resolver: &dyn PolicyVariableResolver) -> Vec<String> {
let mut results = vec![pattern.to_string()];
let mut changed = true;
let max_iterations = 10; // Prevent infinite loops
let mut iteration = 0;
while changed && iteration < max_iterations {
changed = false;
iteration += 1;
let mut new_results = Vec::new();
for result in &results {
let resolved = resolve_single_pass(result, resolver).await;
if resolved.len() > 1 || (resolved.len() == 1 && &resolved[0] != result) {
changed = true;
}
new_results.extend(resolved);
}
// Remove duplicates while preserving order
results.clear();
let mut seen = std::collections::HashSet::new();
for result in new_results {
if seen.insert(result.clone()) {
results.push(result);
}
}
}
results
}
// Need to box the future to avoid infinite size due to recursion
fn resolve_aws_variables_boxed<'a>(
pattern: &'a str,
resolver: &'a dyn PolicyVariableResolver,
) -> std::pin::Pin<Box<dyn Future<Output = Vec<String>> + Send + 'a>> {
Box::pin(resolve_aws_variables(pattern, resolver))
}
/// Single pass resolution of variables in a string
async fn resolve_single_pass(pattern: &str, resolver: &dyn PolicyVariableResolver) -> Vec<String> {
// Find all ${...} format variables
let mut results = vec![pattern.to_string()];
// Process each result string
let mut i = 0;
while i < results.len() {
let mut start = 0;
let mut modified = false;
// Find variables in current string
while let Some(pos) = results[i][start..].find("${") {
let actual_pos = start + pos;
// Find the matching closing brace, taking into account nested braces
let mut brace_count = 1;
let mut end_pos = actual_pos + 2; // Start after "${"
while end_pos < results[i].len() && brace_count > 0 {
match results[i].chars().nth(end_pos).unwrap() {
'{' => brace_count += 1,
'}' => brace_count -= 1,
_ => {}
}
if brace_count > 0 {
end_pos += 1;
}
}
if brace_count == 0 {
let var_name = &results[i][actual_pos + 2..end_pos];
// Check if this is a nested variable (contains ${...} inside)
if var_name.contains("${") {
// For nested variables like ${${a}-${b}}, we need to resolve the inner variables first
// Then use the resolved result as a new variable to resolve
let resolved_inner = resolve_aws_variables_boxed(var_name, resolver).await;
let mut new_results = Vec::new();
for resolved_var_name in resolved_inner {
let prefix = &results[i][..actual_pos];
let suffix = &results[i][end_pos + 1..];
new_results.push(format!("{prefix}{resolved_var_name}{suffix}"));
}
if !new_results.is_empty() {
// Update result set
results.splice(i..i + 1, new_results);
modified = true;
break;
} else {
// If we couldn't resolve the nested variable, keep the original
start = end_pos + 1;
}
} else {
// Regular variable resolution
if let Some(values) = resolver.resolve_multiple(var_name).await {
if !values.is_empty() {
// If there are multiple values, create a new result for each value
let mut new_results = Vec::new();
let prefix = &results[i][..actual_pos];
let suffix = &results[i][end_pos + 1..];
for value in values {
new_results.push(format!("{prefix}{value}{suffix}"));
}
results.splice(i..i + 1, new_results);
modified = true;
break;
} else {
// Variable resolved to empty, just remove the variable placeholder
let mut new_results = Vec::new();
let prefix = &results[i][..actual_pos];
let suffix = &results[i][end_pos + 1..];
new_results.push(format!("{prefix}{suffix}"));
results.splice(i..i + 1, new_results);
modified = true;
break;
}
} else {
// Variable not found, skip
start = end_pos + 1;
}
}
} else {
// No matching closing brace found, break loop
break;
}
}
if !modified {
i += 1;
}
}
results
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json::Value;
use std::collections::HashMap;
#[tokio::test]
async fn test_resolve_aws_variables_with_username() {
let mut context = VariableContext::new();
context.username = Some("testuser".to_string());
let resolver = VariableResolver::new(context);
let result = resolve_aws_variables("${aws:username}-bucket", &resolver).await;
assert_eq!(result, vec!["testuser-bucket".to_string()]);
}
#[tokio::test]
async fn test_resolve_aws_variables_with_userid() {
let mut claims = HashMap::new();
claims.insert("sub".to_string(), Value::String("AIDACKCEVSQ6C2EXAMPLE".to_string()));
let mut context = VariableContext::new();
context.claims = Some(claims);
let resolver = VariableResolver::new(context);
let result = resolve_aws_variables("${aws:userid}-bucket", &resolver).await;
assert_eq!(result, vec!["AIDACKCEVSQ6C2EXAMPLE-bucket".to_string()]);
}
#[tokio::test]
async fn test_resolve_aws_variables_with_multiple_variables() {
let mut claims = HashMap::new();
claims.insert("sub".to_string(), Value::String("AIDACKCEVSQ6C2EXAMPLE".to_string()));
let mut context = VariableContext::new();
context.claims = Some(claims);
context.username = Some("testuser".to_string());
let resolver = VariableResolver::new(context);
let result = resolve_aws_variables("${aws:username}-${aws:userid}-bucket", &resolver).await;
assert_eq!(result, vec!["testuser-AIDACKCEVSQ6C2EXAMPLE-bucket".to_string()]);
}
#[tokio::test]
async fn test_resolve_aws_variables_no_variables() {
let context = VariableContext::new();
let resolver = VariableResolver::new(context);
let result = resolve_aws_variables("test-bucket", &resolver).await;
assert_eq!(result, vec!["test-bucket".to_string()]);
}
#[tokio::test]
async fn test_cached_aws_variable_resolver_dynamic_variables() {
let context = VariableContext::new();
let cached_resolver = CachedAwsVariableResolver::new(context);
// Dynamic variables should not be cached
let result1 = resolve_aws_variables("${aws:EpochTime}-bucket", &cached_resolver).await;
// Add a delay of 1 second to ensure different timestamps
tokio::time::sleep(Duration::from_secs(1)).await;
let result2 = resolve_aws_variables("${aws:EpochTime}-bucket", &cached_resolver).await;
// Both results should be different (different timestamps)
assert_ne!(result1, result2);
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/resource.rs | crates/policy/src/policy/resource.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::error::{Error, Result};
use serde::{
Deserialize, Deserializer, Serialize,
de::{self, Error as DeError, Visitor},
};
use std::{
collections::{HashMap, HashSet},
fmt,
hash::Hash,
ops::Deref,
};
use super::{
Error as IamError, Validator,
function::key_name::KeyName,
utils::{path, wildcard},
variables::PolicyVariableResolver,
};
#[derive(Serialize, Clone, Default, Debug)]
pub struct ResourceSet(pub HashSet<Resource>);
impl ResourceSet {
pub async fn is_match(&self, resource: &str, conditions: &HashMap<String, Vec<String>>) -> bool {
self.is_match_with_resolver(resource, conditions, None).await
}
pub async fn is_match_with_resolver(
&self,
resource: &str,
conditions: &HashMap<String, Vec<String>>,
resolver: Option<&dyn PolicyVariableResolver>,
) -> bool {
for re in self.0.iter() {
if re.is_match_with_resolver(resource, conditions, resolver).await {
return true;
}
}
false
}
pub async fn match_resource(&self, resource: &str) -> bool {
for re in self.0.iter() {
if re.match_resource(resource).await {
return true;
}
}
false
}
}
impl Deref for ResourceSet {
type Target = HashSet<Resource>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Validator for ResourceSet {
type Error = Error;
fn is_valid(&self) -> Result<()> {
for resource in self.0.iter() {
resource.is_valid()?;
}
Ok(())
}
}
impl PartialEq for ResourceSet {
fn eq(&self, other: &Self) -> bool {
self.len() == other.len() && self.0.iter().all(|x| other.0.contains(x))
}
}
impl<'de> Deserialize<'de> for ResourceSet {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct ResourceOrVecVisitor;
impl<'de> Visitor<'de> for ResourceOrVecVisitor {
type Value = ResourceSet;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a string or an array of strings")
}
fn visit_str<E>(self, value: &str) -> std::result::Result<Self::Value, E>
where
E: de::Error,
{
let resource = Resource::try_from(value).map_err(|e| E::custom(format!("invalid resource: {}", e)))?;
let mut set = HashSet::new();
set.insert(resource);
Ok(ResourceSet(set))
}
fn visit_seq<A>(self, mut seq: A) -> std::result::Result<Self::Value, A::Error>
where
A: de::SeqAccess<'de>,
A::Error: DeError,
{
let mut set = HashSet::with_capacity(seq.size_hint().unwrap_or(0));
while let Some(value) = seq.next_element::<String>()? {
match Resource::try_from(value.as_str()) {
Ok(resource) => {
set.insert(resource);
}
Err(e) => {
return Err(A::Error::custom(format!("invalid resource: {}", e)));
}
}
}
Ok(ResourceSet(set))
}
}
deserializer.deserialize_any(ResourceOrVecVisitor)
}
}
#[derive(Hash, Eq, PartialEq, Clone, Debug)]
pub enum Resource {
S3(String),
Kms(String),
}
impl Resource {
pub const S3_PREFIX: &'static str = "arn:aws:s3:::";
pub async fn is_match(&self, resource: &str, conditions: &HashMap<String, Vec<String>>) -> bool {
self.is_match_with_resolver(resource, conditions, None).await
}
pub async fn is_match_with_resolver(
&self,
resource: &str,
conditions: &HashMap<String, Vec<String>>,
resolver: Option<&dyn PolicyVariableResolver>,
) -> bool {
let pattern = match self {
Resource::S3(s) => s.to_owned(),
Resource::Kms(s) => s.to_owned(),
};
let patterns = if let Some(res) = resolver {
super::variables::resolve_aws_variables(&pattern, res).await
} else {
vec![pattern.clone()]
};
for pattern in patterns {
let mut resolved_pattern = pattern;
// Apply condition substitutions
if !conditions.is_empty() {
for key in KeyName::COMMON_KEYS {
if let Some(rvalue) = conditions.get(key.name())
&& matches!(rvalue.first().map(|c| !c.is_empty()), Some(true))
{
resolved_pattern = resolved_pattern.replace(&key.var_name(), &rvalue[0]);
}
}
}
let cp = path::clean(resource);
if cp != "." && cp == resolved_pattern.as_str() {
return true;
}
if wildcard::is_match(resolved_pattern, resource) {
return true;
}
}
false
}
pub async fn match_resource(&self, resource: &str) -> bool {
self.is_match(resource, &HashMap::new()).await
}
}
impl TryFrom<&str> for Resource {
type Error = Error;
fn try_from(value: &str) -> std::result::Result<Self, Self::Error> {
let resource = if value.starts_with(Self::S3_PREFIX) {
Resource::S3(value.strip_prefix(Self::S3_PREFIX).unwrap().into())
} else {
return Err(IamError::InvalidResource("unknown".into(), value.into()).into());
};
resource.is_valid()?;
Ok(resource)
}
}
impl Validator for Resource {
type Error = Error;
fn is_valid(&self) -> std::result::Result<(), Error> {
match self {
Self::S3(pattern) => {
if pattern.is_empty() || pattern.starts_with('/') {
return Err(IamError::InvalidResource("s3".into(), pattern.into()).into());
}
}
Self::Kms(pattern) => {
if pattern.is_empty()
|| pattern
.char_indices()
.find(|&(_, c)| c == '/' || c == '\\' || c == '.')
.map(|(i, _)| i)
.is_some()
{
return Err(IamError::InvalidResource("kms".into(), pattern.into()).into());
}
}
}
Ok(())
}
}
impl Serialize for Resource {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
match self {
Resource::S3(s) => serializer.serialize_str(&format!("{}{}", Self::S3_PREFIX, s)),
Resource::Kms(s) => serializer.serialize_str(s),
}
}
}
impl<'de> Deserialize<'de> for Resource {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let value = String::deserialize(deserializer)?;
Resource::try_from(value.as_str()).map_err(serde::de::Error::custom)
}
}
#[cfg(test)]
mod tests {
use crate::policy::resource::Resource;
use std::collections::HashMap;
use test_case::test_case;
#[test_case("arn:aws:s3:::*","mybucket" => true; "1")]
#[test_case("arn:aws:s3:::*","mybucket/myobject" => true; "2")]
#[test_case("arn:aws:s3:::mybucket*","mybucket" => true; "3")]
#[test_case("arn:aws:s3:::mybucket*","mybucket/myobject" => true; "4")]
#[test_case("arn:aws:s3:::*/*","mybucket/myobject"=> true; "5")]
#[test_case("arn:aws:s3:::mybucket/*","mybucket/myobject" => true; "6")]
#[test_case("arn:aws:s3:::mybucket*/myobject","mybucket/myobject" => true; "7")]
#[test_case("arn:aws:s3:::mybucket*/myobject","mybucket100/myobject" => true; "8")]
#[test_case("arn:aws:s3:::mybucket?0/2010/photos/*","mybucket20/2010/photos/1.jpg" => true; "9")]
#[test_case("arn:aws:s3:::mybucket","mybucket" => true; "10")]
#[test_case("arn:aws:s3:::mybucket?0","mybucket30" => true; "11")]
#[test_case("arn:aws:s3:::*/*","mybucket" => false; "12")]
#[test_case("arn:aws:s3:::mybucket/*","mybucket10/myobject" => false; "13")]
#[test_case("arn:aws:s3:::mybucket?0/2010/photos/*","mybucket0/2010/photos/1.jpg" => false; "14")]
#[test_case("arn:aws:s3:::mybucket","mybucket/myobject" => false; "15")]
fn test_resource_is_match(resource: &str, object: &str) -> bool {
let resource: Resource = resource.try_into().unwrap();
pollster::block_on(resource.is_match(object, &HashMap::new()))
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/id.rs | crates/policy/src/policy/id.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::error::{Error, Result};
use serde::{Deserialize, Serialize};
use std::ops::Deref;
use super::Validator;
#[derive(Serialize, Deserialize, Clone, Default, Debug)]
pub struct ID(pub String);
impl Validator for ID {
type Error = Error;
/// if id is a valid utf string, then it is valid.
fn is_valid(&self) -> Result<()> {
Ok(())
}
}
impl<T: ToString> From<T> for ID {
fn from(value: T) -> Self {
Self(value.to_string())
}
}
impl Deref for ID {
type Target = String;
fn deref(&self) -> &Self::Target {
&self.0
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/doc.rs | crates/policy/src/policy/doc.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use serde::{Deserialize, Serialize, de::Error};
use time::OffsetDateTime;
use super::Policy;
#[derive(Serialize, Deserialize, Default, Clone)]
pub struct PolicyDoc {
pub version: i64,
pub policy: Policy,
pub create_date: Option<OffsetDateTime>,
pub update_date: Option<OffsetDateTime>,
}
impl PolicyDoc {
pub fn new(policy: Policy) -> Self {
Self {
version: 1,
policy,
create_date: Some(OffsetDateTime::now_utc()),
update_date: Some(OffsetDateTime::now_utc()),
}
}
pub fn update(&mut self, policy: Policy) {
self.version += 1;
self.policy = policy;
self.update_date = Some(OffsetDateTime::now_utc());
if self.create_date.is_none() {
self.create_date = self.update_date;
}
}
pub fn default_policy(policy: Policy) -> Self {
Self {
version: 1,
policy,
create_date: None,
update_date: None,
}
}
}
impl TryFrom<Vec<u8>> for PolicyDoc {
type Error = serde_json::Error;
fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> {
// Try to parse as PolicyDoc first
if let Ok(policy_doc) = serde_json::from_slice::<PolicyDoc>(&value) {
return Ok(policy_doc);
}
// Fall back to parsing as Policy and wrap in PolicyDoc
serde_json::from_slice::<Policy>(&value)
.map(|policy| Self {
policy,
..Default::default()
})
.map_err(|_| serde_json::Error::custom("Failed to parse as PolicyDoc or Policy".to_string()))
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/policy.rs | crates/policy/src/policy/policy.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::{Effect, Error as IamError, ID, Statement, action::Action, statement::BPStatement};
use crate::error::{Error, Result};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::collections::{HashMap, HashSet};
/// DEFAULT_VERSION is the default version.
/// https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_version.html
pub const DEFAULT_VERSION: &str = "2012-10-17";
/// check the data is Validator
pub trait Validator {
type Error;
fn is_valid(&self) -> Result<()> {
Ok(())
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Args<'a> {
pub account: &'a str,
pub groups: &'a Option<Vec<String>>,
pub action: Action,
pub bucket: &'a str,
pub conditions: &'a HashMap<String, Vec<String>>,
pub is_owner: bool,
pub object: &'a str,
pub claims: &'a HashMap<String, Value>,
pub deny_only: bool,
}
impl Args<'_> {
pub fn get_role_arn(&self) -> Option<&str> {
self.claims.get("roleArn").and_then(|x| x.as_str())
}
pub fn get_policies(&self, policy_claim_name: &str) -> (HashSet<String>, bool) {
get_policies_from_claims(self.claims, policy_claim_name)
}
}
#[derive(Serialize, Deserialize, Clone, Default, Debug)]
pub struct Policy {
#[serde(default, rename = "ID")]
pub id: ID,
#[serde(rename = "Version")]
pub version: String,
#[serde(rename = "Statement")]
pub statements: Vec<Statement>,
}
impl Policy {
pub async fn is_allowed(&self, args: &Args<'_>) -> bool {
for statement in self.statements.iter().filter(|s| matches!(s.effect, Effect::Deny)) {
if !statement.is_allowed(args).await {
return false;
}
}
if args.deny_only || args.is_owner {
return true;
}
for statement in self.statements.iter().filter(|s| matches!(s.effect, Effect::Allow)) {
if statement.is_allowed(args).await {
return true;
}
}
false
}
pub async fn match_resource(&self, resource: &str) -> bool {
for statement in self.statements.iter() {
if statement.resources.match_resource(resource).await {
return true;
}
}
false
}
fn drop_duplicate_statements(&mut self) {
let mut dups = HashSet::new();
for i in 0..self.statements.len() {
if dups.contains(&i) {
// i is already a duplicate of some statement, so we do not need to
// compare with it.
continue;
}
for j in (i + 1)..self.statements.len() {
if !self.statements[i].eq(&self.statements[j]) {
continue;
}
// save duplicate statement index for removal.
dups.insert(j);
}
}
// remove duplicate items from the slice.
let mut c = 0;
for i in 0..self.statements.len() {
if dups.contains(&i) {
continue;
}
self.statements[c] = self.statements[i].clone();
c += 1;
}
self.statements.truncate(c);
}
pub fn merge_policies(inputs: Vec<Policy>) -> Policy {
let mut merged = Policy::default();
for p in inputs {
if merged.version.is_empty() {
merged.version = p.version.clone();
}
for st in p.statements {
merged.statements.push(st.clone());
}
}
merged.drop_duplicate_statements();
merged
}
pub fn is_empty(&self) -> bool {
self.statements.is_empty()
}
pub fn validate(&self) -> Result<()> {
self.is_valid()
}
pub fn parse_config(data: &[u8]) -> Result<Policy> {
let policy: Policy = serde_json::from_slice(data)?;
policy.validate()?;
Ok(policy)
}
}
impl Validator for Policy {
type Error = Error;
fn is_valid(&self) -> Result<()> {
if !self.version.is_empty() && !self.version.eq(DEFAULT_VERSION) {
return Err(IamError::InvalidVersion(self.version.clone()).into());
}
for statement in self.statements.iter() {
statement.is_valid()?;
}
Ok(())
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct BucketPolicyArgs<'a> {
pub account: &'a str,
pub groups: &'a Option<Vec<String>>,
pub action: Action,
pub bucket: &'a str,
pub conditions: &'a HashMap<String, Vec<String>>,
pub is_owner: bool,
pub object: &'a str,
}
#[derive(Serialize, Deserialize, Clone, Default, Debug)]
pub struct BucketPolicy {
#[serde(default, rename = "ID")]
pub id: ID,
#[serde(rename = "Version")]
pub version: String,
#[serde(rename = "Statement")]
pub statements: Vec<BPStatement>,
}
impl BucketPolicy {
pub async fn is_allowed(&self, args: &BucketPolicyArgs<'_>) -> bool {
for statement in self.statements.iter().filter(|s| matches!(s.effect, Effect::Deny)) {
if !statement.is_allowed(args).await {
return false;
}
}
if args.is_owner {
return true;
}
for statement in self.statements.iter().filter(|s| matches!(s.effect, Effect::Allow)) {
if statement.is_allowed(args).await {
return true;
}
}
false
}
}
impl Validator for BucketPolicy {
type Error = Error;
fn is_valid(&self) -> Result<()> {
if !self.version.is_empty() && !self.version.eq(DEFAULT_VERSION) {
return Err(IamError::InvalidVersion(self.version.clone()).into());
}
for statement in self.statements.iter() {
statement.is_valid()?;
}
Ok(())
}
}
fn get_values_from_claims(claims: &HashMap<String, Value>, claim_name: &str) -> (HashSet<String>, bool) {
let mut s = HashSet::new();
if let Some(pname) = claims.get(claim_name) {
if let Some(pnames) = pname.as_array() {
for pname in pnames {
if let Some(pname_str) = pname.as_str() {
for pname in pname_str.split(',') {
let pname = pname.trim();
if !pname.is_empty() {
s.insert(pname.to_string());
}
}
}
}
return (s, true);
} else if let Some(pname_str) = pname.as_str() {
for pname in pname_str.split(',') {
let pname = pname.trim();
if !pname.is_empty() {
s.insert(pname.to_string());
}
}
return (s, true);
}
}
(s, false)
}
pub fn get_policies_from_claims(claims: &HashMap<String, Value>, policy_claim_name: &str) -> (HashSet<String>, bool) {
get_values_from_claims(claims, policy_claim_name)
}
pub fn iam_policy_claim_name_sa() -> String {
rustfs_credentials::IAM_POLICY_CLAIM_NAME_SA.to_string()
}
pub mod default {
use std::{collections::HashSet, sync::LazyLock};
use crate::policy::{
ActionSet, DEFAULT_VERSION, Effect, Functions, ResourceSet, Statement,
action::{Action, AdminAction, KmsAction, S3Action},
resource::Resource,
};
use super::Policy;
#[allow(clippy::incompatible_msrv)]
pub static DEFAULT_POLICIES: LazyLock<[(&'static str, Policy); 6]> = LazyLock::new(|| {
[
(
"readwrite",
Policy {
id: "".into(),
version: DEFAULT_VERSION.into(),
statements: vec![Statement {
sid: "".into(),
effect: Effect::Allow,
actions: ActionSet({
let mut hash_set = HashSet::new();
hash_set.insert(Action::S3Action(S3Action::AllActions));
hash_set
}),
not_actions: ActionSet(Default::default()),
resources: ResourceSet({
let mut hash_set = HashSet::new();
hash_set.insert(Resource::S3("*".into()));
hash_set
}),
conditions: Functions::default(),
..Default::default()
}],
},
),
(
"readonly",
Policy {
id: "".into(),
version: DEFAULT_VERSION.into(),
statements: vec![Statement {
sid: "".into(),
effect: Effect::Allow,
actions: ActionSet({
let mut hash_set = HashSet::new();
hash_set.insert(Action::S3Action(S3Action::GetBucketLocationAction));
hash_set.insert(Action::S3Action(S3Action::GetObjectAction));
hash_set
}),
not_actions: ActionSet(Default::default()),
resources: ResourceSet({
let mut hash_set = HashSet::new();
hash_set.insert(Resource::S3("*".into()));
hash_set
}),
conditions: Functions::default(),
..Default::default()
}],
},
),
(
"writeonly",
Policy {
id: "".into(),
version: DEFAULT_VERSION.into(),
statements: vec![Statement {
sid: "".into(),
effect: Effect::Allow,
actions: ActionSet({
let mut hash_set = HashSet::new();
hash_set.insert(Action::S3Action(S3Action::PutObjectAction));
hash_set
}),
not_actions: ActionSet(Default::default()),
resources: ResourceSet({
let mut hash_set = HashSet::new();
hash_set.insert(Resource::S3("*".into()));
hash_set
}),
conditions: Functions::default(),
..Default::default()
}],
},
),
(
"writeonly",
Policy {
id: "".into(),
version: DEFAULT_VERSION.into(),
statements: vec![Statement {
sid: "".into(),
effect: Effect::Allow,
actions: ActionSet({
let mut hash_set = HashSet::new();
hash_set.insert(Action::S3Action(S3Action::PutObjectAction));
hash_set
}),
not_actions: ActionSet(Default::default()),
resources: ResourceSet({
let mut hash_set = HashSet::new();
hash_set.insert(Resource::S3("*".into()));
hash_set
}),
conditions: Functions::default(),
..Default::default()
}],
},
),
(
"diagnostics",
Policy {
id: "".into(),
version: DEFAULT_VERSION.into(),
statements: vec![Statement {
sid: "".into(),
effect: Effect::Allow,
actions: ActionSet({
let mut hash_set = HashSet::new();
hash_set.insert(Action::AdminAction(AdminAction::ProfilingAdminAction));
hash_set.insert(Action::AdminAction(AdminAction::TraceAdminAction));
hash_set.insert(Action::AdminAction(AdminAction::ConsoleLogAdminAction));
hash_set.insert(Action::AdminAction(AdminAction::ServerInfoAdminAction));
hash_set.insert(Action::AdminAction(AdminAction::TopLocksAdminAction));
hash_set.insert(Action::AdminAction(AdminAction::HealthInfoAdminAction));
hash_set.insert(Action::AdminAction(AdminAction::PrometheusAdminAction));
hash_set.insert(Action::AdminAction(AdminAction::BandwidthMonitorAction));
hash_set
}),
not_actions: ActionSet(Default::default()),
resources: ResourceSet({
let mut hash_set = HashSet::new();
hash_set.insert(Resource::S3("*".into()));
hash_set
}),
conditions: Functions::default(),
..Default::default()
}],
},
),
(
"consoleAdmin",
Policy {
id: "".into(),
version: DEFAULT_VERSION.into(),
statements: vec![
Statement {
sid: "".into(),
effect: Effect::Allow,
actions: ActionSet({
let mut hash_set = HashSet::new();
hash_set.insert(Action::AdminAction(AdminAction::AllAdminActions));
hash_set
}),
not_actions: ActionSet(Default::default()),
resources: ResourceSet(HashSet::new()),
conditions: Functions::default(),
..Default::default()
},
Statement {
sid: "".into(),
effect: Effect::Allow,
actions: ActionSet({
let mut hash_set = HashSet::new();
hash_set.insert(Action::KmsAction(KmsAction::AllActions));
hash_set
}),
not_actions: ActionSet(Default::default()),
resources: ResourceSet(HashSet::new()),
conditions: Functions::default(),
..Default::default()
},
Statement {
sid: "".into(),
effect: Effect::Allow,
actions: ActionSet({
let mut hash_set = HashSet::new();
hash_set.insert(Action::S3Action(S3Action::AllActions));
hash_set
}),
not_actions: ActionSet(Default::default()),
resources: ResourceSet({
let mut hash_set = HashSet::new();
hash_set.insert(Resource::S3("*".into()));
hash_set
}),
conditions: Functions::default(),
..Default::default()
},
],
},
),
]
});
}
#[cfg(test)]
mod test {
use super::*;
use crate::error::Result;
#[tokio::test]
async fn test_parse_policy() -> Result<()> {
let data = r#"
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": ["s3:GetObject"],
"Resource": ["arn:aws:s3:::dada/*"],
"Condition": {
"StringEquals": {
"s3:ExistingObjectTag/security": "public"
}
}
},
{
"Effect": "Allow",
"Action": ["s3:DeleteObjectTagging"],
"Resource": ["arn:aws:s3:::dada/*"],
"Condition": {
"StringEquals": {
"s3:ExistingObjectTag/security": "public"
}
}
},
{
"Effect": "Allow",
"Action": ["s3:DeleteObject"],
"Resource": ["arn:aws:s3:::dada/*"]
},
{
"Effect": "Allow",
"Action": [
"s3:PutObject"
],
"Resource": [
"arn:aws:s3:::dada/*"
],
"Condition": {
"ForAllValues:StringLike": {
"s3:RequestObjectTagKeys": [
"security",
"virus"
]
}
}
}
]
}
"#;
let p = Policy::parse_config(data.as_bytes())?;
let str = serde_json::to_string(&p)?;
let _p2 = Policy::parse_config(str.as_bytes())?;
// assert_eq!(p, p2);
Ok(())
}
#[tokio::test]
async fn test_parse_policy_with_single_string_action_and_resource() -> Result<()> {
// Test policy with single string Action and Resource (AWS IAM allows both formats)
let data = r#"
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": "s3:GetObject",
"Resource": "arn:aws:s3:::test/analytics/customers/*"
}
]
}
"#;
let p = Policy::parse_config(data.as_bytes())?;
assert!(!p.statements.is_empty());
assert!(!p.statements[0].actions.is_empty());
assert!(!p.statements[0].resources.is_empty());
// Test with array format (should still work)
let data_array = r#"
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": ["s3:GetObject"],
"Resource": ["arn:aws:s3:::test/analytics/customers/*"]
}
]
}
"#;
let p2 = Policy::parse_config(data_array.as_bytes())?;
assert!(!p2.statements.is_empty());
assert!(!p2.statements[0].actions.is_empty());
assert!(!p2.statements[0].resources.is_empty());
// Verify that both formats produce equivalent results
assert_eq!(
p.statements.len(),
p2.statements.len(),
"Both policies should have the same number of statements"
);
assert_eq!(
p.statements[0].actions, p2.statements[0].actions,
"ActionSet from string format should equal ActionSet from array format"
);
assert_eq!(
p.statements[0].resources, p2.statements[0].resources,
"ResourceSet from string format should equal ResourceSet from array format"
);
assert_eq!(
p.statements[0].effect, p2.statements[0].effect,
"Effect should be the same in both formats"
);
// Verify specific content
assert_eq!(p.statements[0].actions.len(), 1, "ActionSet should contain exactly one action");
assert_eq!(p.statements[0].resources.len(), 1, "ResourceSet should contain exactly one resource");
Ok(())
}
#[tokio::test]
async fn test_aws_username_policy_variable() -> Result<()> {
let data = r#"
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": ["s3:ListBucket"],
"Resource": ["arn:aws:s3:::${aws:username}-*"]
}
]
}
"#;
let policy = Policy::parse_config(data.as_bytes())?;
let conditions = HashMap::new();
// Test allowed case - user testuser accessing testuser-bucket
let mut claims1 = HashMap::new();
claims1.insert("username".to_string(), Value::String("testuser".to_string()));
let args1 = Args {
account: "testuser",
groups: &None,
action: Action::S3Action(crate::policy::action::S3Action::ListBucketAction),
bucket: "testuser-bucket",
conditions: &conditions,
is_owner: false,
object: "",
claims: &claims1,
deny_only: false,
};
// Test denied case - user otheruser accessing testuser-bucket
let mut claims2 = HashMap::new();
claims2.insert("username".to_string(), Value::String("otheruser".to_string()));
let args2 = Args {
account: "otheruser",
groups: &None,
action: Action::S3Action(crate::policy::action::S3Action::ListBucketAction),
bucket: "testuser-bucket",
conditions: &conditions,
is_owner: false,
object: "",
claims: &claims2,
deny_only: false,
};
assert!(pollster::block_on(policy.is_allowed(&args1)));
assert!(!pollster::block_on(policy.is_allowed(&args2)));
Ok(())
}
#[tokio::test]
async fn test_aws_userid_policy_variable() -> Result<()> {
let data = r#"
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": ["s3:ListBucket"],
"Resource": ["arn:aws:s3:::${aws:userid}-bucket"]
}
]
}
"#;
let policy = Policy::parse_config(data.as_bytes())?;
let mut claims = HashMap::new();
claims.insert("sub".to_string(), Value::String("AIDACKCEVSQ6C2EXAMPLE".to_string()));
let conditions = HashMap::new();
// Test allowed case
let args1 = Args {
account: "testuser",
groups: &None,
action: Action::S3Action(crate::policy::action::S3Action::ListBucketAction),
bucket: "AIDACKCEVSQ6C2EXAMPLE-bucket",
conditions: &conditions,
is_owner: false,
object: "",
claims: &claims,
deny_only: false,
};
// Test denied case
let args2 = Args {
account: "testuser",
groups: &None,
action: Action::S3Action(crate::policy::action::S3Action::ListBucketAction),
bucket: "OTHERUSER-bucket",
conditions: &conditions,
is_owner: false,
object: "",
claims: &claims,
deny_only: false,
};
assert!(pollster::block_on(policy.is_allowed(&args1)));
assert!(!pollster::block_on(policy.is_allowed(&args2)));
Ok(())
}
#[tokio::test]
async fn test_aws_policy_variables_concatenation() -> Result<()> {
let data = r#"
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": ["s3:ListBucket"],
"Resource": ["arn:aws:s3:::${aws:username}-${aws:userid}-bucket"]
}
]
}
"#;
let policy = Policy::parse_config(data.as_bytes())?;
let mut claims = HashMap::new();
claims.insert("username".to_string(), Value::String("testuser".to_string()));
claims.insert("sub".to_string(), Value::String("AIDACKCEVSQ6C2EXAMPLE".to_string()));
let conditions = HashMap::new();
// Test allowed case
let args1 = Args {
account: "testuser",
groups: &None,
action: Action::S3Action(crate::policy::action::S3Action::ListBucketAction),
bucket: "testuser-AIDACKCEVSQ6C2EXAMPLE-bucket",
conditions: &conditions,
is_owner: false,
object: "",
claims: &claims,
deny_only: false,
};
// Test denied case
let args2 = Args {
account: "testuser",
groups: &None,
action: Action::S3Action(crate::policy::action::S3Action::ListBucketAction),
bucket: "otheruser-AIDACKCEVSQ6C2EXAMPLE-bucket",
conditions: &conditions,
is_owner: false,
object: "",
claims: &claims,
deny_only: false,
};
assert!(pollster::block_on(policy.is_allowed(&args1)));
assert!(!pollster::block_on(policy.is_allowed(&args2)));
Ok(())
}
#[tokio::test]
async fn test_aws_policy_variables_nested() -> Result<()> {
let data = r#"
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": ["s3:ListBucket"],
"Resource": ["arn:aws:s3:::${${aws:PrincipalType}-${aws:userid}}"]
}
]
}
"#;
let policy = Policy::parse_config(data.as_bytes())?;
let mut claims = HashMap::new();
claims.insert("sub".to_string(), Value::String("AIDACKCEVSQ6C2EXAMPLE".to_string()));
// For PrincipalType, it will default to "User" when not explicitly set
let conditions = HashMap::new();
// Test allowed case
let args1 = Args {
account: "testuser",
groups: &None,
action: Action::S3Action(crate::policy::action::S3Action::ListBucketAction),
bucket: "User-AIDACKCEVSQ6C2EXAMPLE",
conditions: &conditions,
is_owner: false,
object: "",
claims: &claims,
deny_only: false,
};
// Test denied case
let args2 = Args {
account: "testuser",
groups: &None,
action: Action::S3Action(crate::policy::action::S3Action::ListBucketAction),
bucket: "User-OTHERUSER",
conditions: &conditions,
is_owner: false,
object: "",
claims: &claims,
deny_only: false,
};
assert!(pollster::block_on(policy.is_allowed(&args1)));
assert!(!pollster::block_on(policy.is_allowed(&args2)));
Ok(())
}
#[tokio::test]
async fn test_aws_policy_variables_multi_value() -> Result<()> {
let data = r#"
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": ["s3:ListBucket"],
"Resource": ["arn:aws:s3:::${aws:username}-bucket"]
}
]
}
"#;
let policy = Policy::parse_config(data.as_bytes())?;
let mut claims = HashMap::new();
// Test with array value for username
claims.insert(
"username".to_string(),
Value::Array(vec![Value::String("user1".to_string()), Value::String("user2".to_string())]),
);
let conditions = HashMap::new();
let args1 = Args {
account: "user1",
groups: &None,
action: Action::S3Action(crate::policy::action::S3Action::ListBucketAction),
bucket: "user1-bucket",
conditions: &conditions,
is_owner: false,
object: "",
claims: &claims,
deny_only: false,
};
let args2 = Args {
account: "user2",
groups: &None,
action: Action::S3Action(crate::policy::action::S3Action::ListBucketAction),
bucket: "user2-bucket",
conditions: &conditions,
is_owner: false,
object: "",
claims: &claims,
deny_only: false,
};
// Either user1 or user2 should be allowed
assert!(pollster::block_on(policy.is_allowed(&args1)) || pollster::block_on(policy.is_allowed(&args2)));
Ok(())
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/function.rs | crates/policy/src/policy/function.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::policy::function::condition::Condition;
use crate::policy::variables::PolicyVariableResolver;
use serde::ser::SerializeMap;
use serde::{Deserialize, Serialize, Serializer, de};
use std::collections::HashMap;
use std::collections::HashSet;
pub mod addr;
pub mod binary;
pub mod bool_null;
pub mod condition;
pub mod date;
pub mod func;
pub mod key;
pub mod key_name;
pub mod number;
pub mod string;
#[derive(Clone, Default, Debug)]
pub struct Functions {
for_any_value: Vec<Condition>,
for_all_values: Vec<Condition>,
for_normal: Vec<Condition>,
}
impl Functions {
pub async fn evaluate(&self, values: &HashMap<String, Vec<String>>) -> bool {
self.evaluate_with_resolver(values, None).await
}
pub async fn evaluate_with_resolver(
&self,
values: &HashMap<String, Vec<String>>,
resolver: Option<&dyn PolicyVariableResolver>,
) -> bool {
for c in self.for_any_value.iter() {
if !c.evaluate_with_resolver(false, values, resolver).await {
return false;
}
}
for c in self.for_all_values.iter() {
if !c.evaluate_with_resolver(true, values, resolver).await {
return false;
}
}
for c in self.for_normal.iter() {
if !c.evaluate_with_resolver(false, values, resolver).await {
return false;
}
}
true
}
pub fn is_empty(&self) -> bool {
self.for_all_values.is_empty() && self.for_any_value.is_empty() && self.for_normal.is_empty()
}
}
impl Serialize for Functions {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut se =
serializer.serialize_map(Some(self.for_any_value.len() + self.for_all_values.len() + self.for_normal.len()))?;
for conditions in self.for_all_values.iter() {
se.serialize_key(format!("ForAllValues:{}", conditions.to_key()).as_str())?;
conditions.serialize_map(&mut se)?;
}
for conditions in self.for_any_value.iter() {
se.serialize_key(format!("ForAnyValue:{}", conditions.to_key()).as_str())?;
conditions.serialize_map(&mut se)?;
}
for conditions in self.for_normal.iter() {
se.serialize_key(conditions.to_key())?;
conditions.serialize_map(&mut se)?;
}
se.end()
}
}
impl<'de> Deserialize<'de> for Functions {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
struct FuncVisitor;
use serde::de::Visitor;
impl<'de> Visitor<'de> for FuncVisitor {
type Value = Functions;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("Functions")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: de::MapAccess<'de>,
{
use serde::de::Error;
let mut hash = HashSet::with_capacity(map.size_hint().unwrap_or_default());
let mut inner_data = Functions::default();
while let Some(key) = map.next_key::<&str>()? {
if hash.contains(&key) {
return Err(Error::custom(format!("duplicate condition operator `{key}`")));
}
hash.insert(key);
let mut tokens = key.split(":");
let mut qualifier = tokens.next();
let mut name = tokens.next();
if name.is_none() {
name = qualifier;
qualifier = None;
}
if tokens.next().is_some() {
return Err(Error::custom("invalid condition operator"));
}
let Some(name) = name else { return Err(Error::custom("has no condition operator")) };
let condition = Condition::from_deserializer(name, &mut map)?;
match qualifier {
Some("ForAnyValue") => inner_data.for_any_value.push(condition),
Some("ForAllValues") => inner_data.for_all_values.push(condition),
Some(q) => return Err(Error::custom(format!("invalid qualifier `{q}`"))),
None => inner_data.for_normal.push(condition),
}
}
/* if inner_data.is_empty() {
return Err(Error::custom("has no condition element"));
} */
Ok(inner_data)
}
}
deserializer.deserialize_map(FuncVisitor)
}
}
impl PartialEq for Functions {
fn eq(&self, other: &Self) -> bool {
if !(self.for_all_values.len() == other.for_all_values.len()
&& self.for_any_value.len() == other.for_any_value.len()
&& self.for_normal.len() == other.for_normal.len())
{
return false;
}
self.for_any_value.iter().all(|x| other.for_any_value.contains(x))
&& self.for_all_values.iter().all(|x| other.for_all_values.contains(x))
&& self.for_normal.iter().all(|x| other.for_normal.contains(x))
}
}
#[derive(Clone, Serialize, Deserialize)]
#[allow(dead_code)]
pub struct Value;
#[cfg(test)]
mod tests {
use crate::policy::Functions;
use crate::policy::function::condition::Condition::*;
use crate::policy::function::func::FuncKeyValue;
use crate::policy::function::key::Key;
use crate::policy::function::string::StringFunc;
use crate::policy::function::string::StringFuncValue;
use test_case::test_case;
#[test_case(
r#"{
"Null": {
"s3:x-amz-server-side-encryption-customer-algorithm": true
},
"Null": {
"s3:x-amz-server-side-encryption-customer-algorithm": "true"
}
}"# => false; "1")]
#[test_case(r#"{}"# => true; "2")]
#[test_case(
r#"{
"StringLike": {
"s3:x-amz-metadata-directive": "REPL*"
},
"StringEquals": {
"s3:x-amz-copy-source": "mybucket/myobject"
},
"StringNotEquals": {
"s3:x-amz-server-side-encryption": "AES256"
},
"NotIpAddress": {
"aws:SourceIp": [
"10.1.10.0/24",
"10.10.1.0/24"
]
},
"StringNotLike": {
"s3:x-amz-storage-class": "STANDARD",
"s3:x-amz-server-side-encryption": "AES256"
},
"Null": {
"s3:x-amz-server-side-encryption-customer-algorithm": true
},
"IpAddress": {
"aws:SourceIp": [
"192.168.1.0/24",
"192.168.2.0/24"
]
}
}"# => true; "3"
)]
#[test_case(
r#"{
"StringLike": {
"s3:x-amz-metadata-directive": "REPL*"
},
"StringEquals": {
"s3:x-amz-copy-source": "mybucket/myobject",
"s3:prefix": [
"",
"home/"
],
"s3:delimiter": [
"/"
]
},
"StringNotEquals": {
"s3:x-amz-server-side-encryption": "AES256"
},
"NotIpAddress": {
"aws:SourceIp": [
"10.1.10.0/24",
"10.10.1.0/24"
]
},
"StringNotLike": {
"s3:x-amz-storage-class": "STANDARD"
},
"Null": {
"s3:x-amz-server-side-encryption-customer-algorithm": true
},
"IpAddress": {
"aws:SourceIp": [
"192.168.1.0/24",
"192.168.2.0/24"
]
}
}"# => true; "4"
)]
#[test_case(
r#"{
"IpAddress": {
"aws:SourceIp": [
"192.168.1.0/24"
]
},
"NotIpAddress": {
"aws:SourceIp": [
"10.1.10.0/24"
]
},
"Null": {
"s3:x-amz-server-side-encryption-customer-algorithm": [
true
]
},
"StringEquals": {
"s3:x-amz-copy-source": [
"mybucket/myobject"
]
},
"StringLike": {
"s3:x-amz-metadata-directive": [
"REPL*"
]
},
"StringNotEquals": {
"s3:x-amz-server-side-encryption": [
"AES256"
]
},
"StringNotLike": {
"s3:x-amz-storage-class": [
"STANDARD"
]
}
}"# => true;
"5"
)]
#[test_case(
r#"{
"IpAddress": {
"aws:SourceIp": [
"192.168.1.0/24"
]
},
"NotIpAddress": {
"aws:SourceIp": [
"10.1.10.0/24"
]
},
"Null": {
"s3:x-amz-server-side-encryption-customer-algorithm": [
true
]
},
"StringEquals": {
"s3:x-amz-copy-source": [
"mybucket/myobject"
]
},
"StringLike": {
"s3:x-amz-metadata-directive": [
"REPL*"
]
},
"StringNotEquals": {
"s3:x-amz-server-side-encryption": [
"aws:kms"
]
},
"StringNotLike": {
"s3:x-amz-storage-class": [
"STANDARD"
]
}
}"# => true;
"6"
)]
fn test_de(input: &str) -> bool {
serde_json::from_str::<Functions>(input)
.map_err(|e| eprintln!("{e:?}"))
.is_ok()
}
#[test_case(
Functions {
for_normal: vec![StringNotLike(StringFunc {
0: vec![FuncKeyValue {
key: Key::try_from("s3:LocationConstraint").unwrap(),
values: StringFuncValue(vec!["us-east-1"].into_iter().map(ToOwned::to_owned).collect()),
}],
})],
..Default::default()
},
r#"{"StringNotLike":{"s3:LocationConstraint":"us-east-1"}}"#;
"1"
)]
#[test_case(
Functions {
for_all_values: vec![StringNotLike(StringFunc {
0: vec![FuncKeyValue {
key: Key::try_from("s3:LocationConstraint").unwrap(),
values: StringFuncValue(vec!["us-east-1"].into_iter().map(ToOwned::to_owned).collect()),
}],
})],
..Default::default()
},
r#"{"ForAllValues:StringNotLike":{"s3:LocationConstraint":"us-east-1"}}"#;
"2"
)]
#[test_case(
Functions {
for_any_value: vec![StringNotLike(StringFunc {
0: vec![FuncKeyValue {
key: Key::try_from("s3:LocationConstraint").unwrap(),
values: StringFuncValue(vec!["us-east-1", "us-east-2"].into_iter().map(ToOwned::to_owned).collect()),
}],
})],
for_all_values: vec![StringNotLike(StringFunc {
0: vec![FuncKeyValue {
key: Key::try_from("s3:LocationConstraint").unwrap(),
values: StringFuncValue(vec!["us-east-1"].into_iter().map(ToOwned::to_owned).collect()),
}],
})],
for_normal: vec![StringNotLike(StringFunc {
0: vec![FuncKeyValue {
key: Key::try_from("s3:LocationConstraint").unwrap(),
values: StringFuncValue(vec!["us-east-1"].into_iter().map(ToOwned::to_owned).collect()),
}],
})],
},
r#"{"ForAllValues:StringNotLike":{"s3:LocationConstraint":"us-east-1"},"ForAnyValue:StringNotLike":{"s3:LocationConstraint":["us-east-1","us-east-2"]},"StringNotLike":{"s3:LocationConstraint":"us-east-1"}}"#;
"3"
)]
fn test_ser(input: Functions, expect: &str) {
assert_eq!(serde_json::to_string(&input).unwrap(), expect);
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/utils.rs | crates/policy/src/policy/utils.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashMap;
use serde_json::Value;
pub mod path;
pub mod wildcard;
pub fn _get_values_from_claims(claim: &HashMap<String, Value>, chaim_name: &str) -> (Vec<String>, bool) {
let mut result = vec![];
let Some(pname) = claim.get(chaim_name) else {
return (result, false);
};
let mut func = |pname_str: &str| {
for s in pname_str.split(',').map(str::trim) {
if s.is_empty() {
continue;
}
result.push(s.to_owned());
}
};
if let Some(arrays) = pname.as_array() {
for array in arrays {
let Some(pname_str) = array.as_str() else {
continue;
};
func(pname_str);
}
} else {
let Some(pname_str) = pname.as_str() else {
return (result, false);
};
func(pname_str);
}
(result, true)
}
pub fn _split_path(path: &str, second_index: bool) -> (&str, &str) {
let index = if second_index {
let Some(first) = path.find('/') else {
return (path, "");
};
let Some(second) = &(path[first + 1..]).find('/') else {
return (path, "");
};
Some(first + second + 1)
} else {
path.find('/')
};
let Some(index) = index else {
return (path, "");
};
(&path[..index + 1], &path[index + 1..])
}
#[cfg(test)]
mod tests {
use super::_split_path;
#[test_case::test_case("format.json", false => ("format.json", ""))]
#[test_case::test_case("users/tester.json", false => ("users/", "tester.json"))]
#[test_case::test_case("groups/test/group.json", false => ("groups/", "test/group.json"))]
#[test_case::test_case("policydb/groups/testgroup.json", true => ("policydb/groups/", "testgroup.json"))]
#[test_case::test_case(
"policydb/sts-users/uid=slash/user,ou=people,ou=swengg,dc=min,dc=io.json", true =>
("policydb/sts-users/", "uid=slash/user,ou=people,ou=swengg,dc=min,dc=io.json"))
]
#[test_case::test_case(
"policydb/sts-users/uid=slash/user/twice,ou=people,ou=swengg,dc=min,dc=io.json", true =>
("policydb/sts-users/", "uid=slash/user/twice,ou=people,ou=swengg,dc=min,dc=io.json"))
]
#[test_case::test_case(
"policydb/groups/cn=project/d,ou=groups,ou=swengg,dc=min,dc=io.json", true =>
("policydb/groups/", "cn=project/d,ou=groups,ou=swengg,dc=min,dc=io.json"))
]
fn test_split_path(path: &str, second_index: bool) -> (&str, &str) {
_split_path(path, second_index)
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/statement.rs | crates/policy/src/policy/statement.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::{
ActionSet, Args, BucketPolicyArgs, Effect, Error as IamError, Functions, ID, Principal, ResourceSet, Validator,
action::Action,
variables::{VariableContext, VariableResolver},
};
use crate::error::{Error, Result};
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Clone, Default, Debug)]
pub struct Statement {
#[serde(rename = "Sid", default)]
pub sid: ID,
#[serde(rename = "Effect")]
pub effect: Effect,
#[serde(rename = "Action")]
pub actions: ActionSet,
#[serde(rename = "NotAction", default)]
pub not_actions: ActionSet,
#[serde(rename = "Resource", default)]
pub resources: ResourceSet,
#[serde(rename = "NotResource", default)]
pub not_resources: ResourceSet,
#[serde(rename = "Condition", default)]
pub conditions: Functions,
}
impl Statement {
fn is_kms(&self) -> bool {
for act in self.actions.iter() {
if matches!(act, Action::KmsAction(_)) {
return true;
}
}
false
}
fn is_admin(&self) -> bool {
for act in self.actions.iter() {
if matches!(act, Action::AdminAction(_)) {
return true;
}
}
false
}
fn is_sts(&self) -> bool {
for act in self.actions.iter() {
if matches!(act, Action::StsAction(_)) {
return true;
}
}
false
}
pub async fn is_allowed(&self, args: &Args<'_>) -> bool {
let mut context = VariableContext::new();
context.claims = Some(args.claims.clone());
context.conditions = args.conditions.clone();
context.account_id = Some(args.account.to_string());
let username = if let Some(parent) = args.claims.get("parent").and_then(|v| v.as_str()) {
// For temp credentials or service account credentials, username is parent_user
parent.to_string()
} else {
// For regular user credentials, username is access_key
args.account.to_string()
};
context.username = Some(username);
let resolver = VariableResolver::new(context);
let check = 'c: {
if (!self.actions.is_match(&args.action) && !self.actions.is_empty()) || self.not_actions.is_match(&args.action) {
break 'c false;
}
let mut resource = String::from(args.bucket);
if !args.object.is_empty() {
if !args.object.starts_with('/') {
resource.push('/');
}
resource.push_str(args.object);
} else {
resource.push('/');
}
if self.is_kms() && (resource == "/" || self.resources.is_empty()) {
break 'c self.conditions.evaluate_with_resolver(args.conditions, Some(&resolver)).await;
}
if !self
.resources
.is_match_with_resolver(&resource, args.conditions, Some(&resolver))
.await
&& !self.is_admin()
&& !self.is_sts()
{
break 'c false;
}
self.conditions.evaluate_with_resolver(args.conditions, Some(&resolver)).await
};
self.effect.is_allowed(check)
}
}
impl Validator for Statement {
type Error = Error;
fn is_valid(&self) -> Result<()> {
self.effect.is_valid()?;
// check sid
self.sid.is_valid()?;
if self.actions.is_empty() && self.not_actions.is_empty() {
return Err(IamError::NonAction.into());
}
if self.resources.is_empty() {
return Err(IamError::NonResource.into());
}
self.actions.is_valid()?;
self.not_actions.is_valid()?;
self.resources.is_valid()?;
Ok(())
}
}
impl PartialEq for Statement {
fn eq(&self, other: &Self) -> bool {
self.effect == other.effect
&& self.actions == other.actions
&& self.not_actions == other.not_actions
&& self.resources == other.resources
&& self.conditions == other.conditions
}
}
#[derive(Debug, Deserialize, Serialize, Default, Clone)]
#[serde(rename_all = "PascalCase", default)]
pub struct BPStatement {
#[serde(rename = "Sid", default)]
pub sid: ID,
#[serde(rename = "Effect")]
pub effect: Effect,
#[serde(rename = "Principal")]
pub principal: Principal,
#[serde(rename = "Action")]
pub actions: ActionSet,
#[serde(rename = "NotAction", default)]
pub not_actions: ActionSet,
#[serde(rename = "Resource", default)]
pub resources: ResourceSet,
#[serde(rename = "NotResource", default)]
pub not_resources: ResourceSet,
#[serde(rename = "Condition", default)]
pub conditions: Functions,
}
impl BPStatement {
pub async fn is_allowed(&self, args: &BucketPolicyArgs<'_>) -> bool {
let check = 'c: {
if !self.principal.is_match(args.account) {
break 'c false;
}
if (!self.actions.is_match(&args.action) && !self.actions.is_empty()) || self.not_actions.is_match(&args.action) {
break 'c false;
}
let mut resource = String::from(args.bucket);
if !args.object.is_empty() {
if !args.object.starts_with('/') {
resource.push('/');
}
resource.push_str(args.object);
} else {
resource.push('/');
}
if !self.resources.is_empty() && !self.resources.is_match(&resource, args.conditions).await {
break 'c false;
}
if !self.not_resources.is_empty() && self.not_resources.is_match(&resource, args.conditions).await {
break 'c false;
}
self.conditions.evaluate(args.conditions).await
};
self.effect.is_allowed(check)
}
}
impl Validator for BPStatement {
type Error = Error;
fn is_valid(&self) -> Result<()> {
self.effect.is_valid()?;
// check sid
self.sid.is_valid()?;
self.principal.is_valid()?;
if self.actions.is_empty() && self.not_actions.is_empty() {
return Err(IamError::NonAction.into());
}
if self.resources.is_empty() {
return Err(IamError::NonResource.into());
}
self.actions.is_valid()?;
self.not_actions.is_valid()?;
self.resources.is_valid()?;
Ok(())
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/opa.rs | crates/policy/src/policy/opa.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::policy::Args as PArgs;
use rustfs_config::{ENV_PREFIX, opa::*};
use serde::Deserialize;
use serde_json::json;
use std::{collections::HashMap, env, time::Duration};
use tracing::{error, info};
#[derive(Debug, Clone, Default)]
pub struct Args {
pub url: String,
pub auth_token: String,
}
impl Args {
pub fn enable(&self) -> bool {
!self.url.is_empty()
}
}
#[derive(Debug, Clone)]
pub struct AuthZPlugin {
client: reqwest::Client,
args: Args,
}
fn check() -> Result<(), String> {
let env_list = env::vars();
let mut candidate = HashMap::new();
let prefix = format!("{ENV_PREFIX}{POLICY_PLUGIN_SUB_SYS}").to_uppercase();
for (key, value) in env_list {
if key.starts_with(&prefix) {
candidate.insert(key.to_string(), value);
}
}
//check required env vars
if candidate.remove(ENV_POLICY_PLUGIN_OPA_URL).is_none() {
return Err(format!("Missing required env var: {ENV_POLICY_PLUGIN_OPA_URL}"));
}
// check optional env vars
candidate.remove(ENV_POLICY_PLUGIN_AUTH_TOKEN);
if !candidate.is_empty() {
return Err(format!("Invalid env vars: {candidate:?}"));
}
Ok(())
}
async fn validate(config: &Args) -> Result<(), String> {
let client = reqwest::Client::new();
match client.post(&config.url).send().await {
Ok(resp) => {
match resp.status() {
reqwest::StatusCode::OK => {
info!("OPA is ready to accept requests.");
}
_ => {
return Err(format!("OPA returned an error: {}", resp.status()));
}
};
}
Err(err) => {
return Err(format!("Error connecting to OPA: {err}"));
}
};
Ok(())
}
pub async fn lookup_config() -> Result<Args, String> {
let args = Args::default();
let get_cfg =
|cfg: &str| -> Result<String, String> { env::var(cfg).map_err(|e| format!("Error getting env var {cfg}: {e:?}")) };
let url = get_cfg(ENV_POLICY_PLUGIN_OPA_URL);
if url.is_err() {
info!("OPA is not enabled.");
return Ok(args);
}
check()?;
let args = Args {
url: url.ok().unwrap(),
auth_token: get_cfg(ENV_POLICY_PLUGIN_AUTH_TOKEN).unwrap_or_default(),
};
validate(&args).await?;
Ok(args)
}
impl AuthZPlugin {
pub fn new(config: Args) -> Self {
let client = reqwest::Client::builder()
.timeout(Duration::from_secs(5))
.connect_timeout(Duration::from_secs(1))
.pool_max_idle_per_host(10)
.pool_idle_timeout(Some(Duration::from_secs(60)))
.tcp_keepalive(Some(Duration::from_secs(30)))
.tcp_nodelay(true)
.http2_keep_alive_interval(Some(Duration::from_secs(30)))
.http2_keep_alive_timeout(Duration::from_secs(15))
.build()
.unwrap();
Self { client, args: config }
}
pub async fn is_allowed(&self, args: &PArgs<'_>) -> bool {
let payload = self.build_opa_input(args);
let mut request = self.client.post(self.args.url.clone()).json(&payload);
if !self.args.auth_token.is_empty() {
request = request.header("Authorization", format!("Bearer {}", self.args.auth_token));
}
match request.send().await {
Ok(resp) => {
let status = resp.status();
if !status.is_success() {
error!("OPA returned non-success status: {}", status);
return false;
}
match resp.json::<OpaResponseEnum>().await {
Ok(response_enum) => match response_enum {
OpaResponseEnum::SimpleResult(result) => result.result,
OpaResponseEnum::AllowResult(response) => response.result.allow,
},
Err(err) => {
error!("Error parsing OPA response: {:?}", err);
false
}
}
}
Err(err) => {
error!("Error sending request to OPA: {:?}", err);
false
}
}
}
fn build_opa_input(&self, args: &PArgs<'_>) -> serde_json::Value {
let groups = match args.groups {
Some(g) => g.clone(),
None => vec![],
};
let action_str: &str = (&args.action).into();
json!({
// Core authorization parameters for OPA policy evaluation
"input":{
"identity": {
"account": args.account,
"groups": groups,
"is_owner": args.is_owner,
"claims": args.claims
},
"resource": {
"bucket": args.bucket,
"object": args.object,
"arn": if args.object.is_empty() {
format!("arn:aws:s3:::{}", args.bucket)
} else {
format!("arn:aws:s3:::{}/{}", args.bucket, args.object)
}
},
"action": action_str,
"context": {
"conditions": args.conditions,
"deny_only": args.deny_only,
"timestamp": chrono::Utc::now().to_rfc3339()
}
}
})
}
}
#[derive(Deserialize, Default)]
struct OpaResultAllow {
allow: bool,
}
#[derive(Deserialize, Default)]
struct OpaResult {
result: bool,
}
#[derive(Deserialize, Default)]
struct OpaResponse {
result: OpaResultAllow,
}
#[derive(Deserialize)]
#[serde(untagged)]
enum OpaResponseEnum {
SimpleResult(OpaResult),
AllowResult(OpaResponse),
}
#[cfg(test)]
mod tests {
use super::*;
use temp_env;
#[test]
fn test_check_valid_config() {
// Use temp_env to temporarily set environment variables
temp_env::with_vars(
[
("RUSTFS_POLICY_PLUGIN_URL", Some("http://localhost:8181/v1/data/rustfs/authz/allow")),
("RUSTFS_POLICY_PLUGIN_AUTH_TOKEN", Some("test-token")),
],
|| {
assert!(check().is_ok());
},
);
}
#[test]
fn test_check_missing_required_env() {
temp_env::with_var_unset("RUSTFS_POLICY_PLUGIN_URL", || {
temp_env::with_var("RUSTFS_POLICY_PLUGIN_AUTH_TOKEN", Some("test-token"), || {
let result = check();
assert!(result.is_err());
assert!(result.unwrap_err().contains("Missing required env var"));
});
});
}
#[test]
fn test_check_invalid_env_vars() {
temp_env::with_vars(
[
("RUSTFS_POLICY_PLUGIN_URL", Some("http://localhost:8181/v1/data/rustfs/authz/allow")),
("RUSTFS_POLICY_PLUGIN_INVALID", Some("invalid-value")),
],
|| {
let result = check();
assert!(result.is_err());
assert!(result.unwrap_err().contains("Invalid env vars"));
},
);
}
#[test]
fn test_lookup_config_not_enabled() {
temp_env::with_var_unset("RUSTFS_POLICY_PLUGIN_URL", || {
let rt = tokio::runtime::Runtime::new().unwrap();
let result = rt.block_on(async { lookup_config().await });
// Should return the default empty Args
assert!(result.is_ok());
let args = result.unwrap();
assert!(!args.enable());
assert_eq!(args.url, "");
assert_eq!(args.auth_token, "");
});
}
#[test]
fn test_args_enable() {
// Test Args enable method
let args_enabled = Args {
url: "http://localhost:8181".to_string(),
auth_token: "token".to_string(),
};
assert!(args_enabled.enable());
let args_disabled = Args {
url: "".to_string(),
auth_token: "".to_string(),
};
assert!(!args_disabled.enable());
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/action.rs | crates/policy/src/policy/action.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::error::{Error, Result};
use serde::{
Deserialize, Deserializer, Serialize,
de::{self, Error as DeError, Visitor},
};
use std::{collections::HashSet, fmt, ops::Deref};
use strum::{EnumString, IntoStaticStr};
use super::{Error as IamError, Validator, utils::wildcard};
#[derive(Serialize, Clone, Default, Debug)]
pub struct ActionSet(pub HashSet<Action>);
impl ActionSet {
pub fn is_match(&self, action: &Action) -> bool {
for act in self.0.iter() {
if act.is_match(action) {
return true;
}
if matches!(act, Action::S3Action(S3Action::GetObjectVersionAction))
&& matches!(action, Action::S3Action(S3Action::GetObjectAction))
{
return true;
}
}
false
}
}
impl Deref for ActionSet {
type Target = HashSet<Action>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Validator for ActionSet {
type Error = Error;
fn is_valid(&self) -> Result<()> {
Ok(())
}
}
impl PartialEq for ActionSet {
fn eq(&self, other: &Self) -> bool {
self.len() == other.len() && self.0.iter().all(|x| other.0.contains(x))
}
}
impl<'de> Deserialize<'de> for ActionSet {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct ActionOrVecVisitor;
impl<'de> Visitor<'de> for ActionOrVecVisitor {
type Value = ActionSet;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a string or an array of strings")
}
fn visit_str<E>(self, value: &str) -> std::result::Result<Self::Value, E>
where
E: de::Error,
{
let action = Action::try_from(value).map_err(|e| E::custom(format!("invalid action: {}", e)))?;
let mut set = HashSet::new();
set.insert(action);
Ok(ActionSet(set))
}
fn visit_seq<A>(self, mut seq: A) -> std::result::Result<Self::Value, A::Error>
where
A: de::SeqAccess<'de>,
A::Error: DeError,
{
let mut set = HashSet::with_capacity(seq.size_hint().unwrap_or(0));
while let Some(value) = seq.next_element::<String>()? {
match Action::try_from(value.as_str()) {
Ok(action) => {
set.insert(action);
}
Err(e) => {
return Err(A::Error::custom(format!("invalid action: {}", e)));
}
}
}
Ok(ActionSet(set))
}
}
deserializer.deserialize_any(ActionOrVecVisitor)
}
}
#[derive(Serialize, Deserialize, Hash, PartialEq, Eq, Clone, Debug, Copy)]
#[serde(try_from = "&str", untagged)]
pub enum Action {
S3Action(S3Action),
AdminAction(AdminAction),
StsAction(StsAction),
KmsAction(KmsAction),
None,
}
impl Action {
pub fn is_match(&self, action: &Action) -> bool {
wildcard::is_match::<&str, &str>(self.into(), action.into())
}
}
impl From<&Action> for &str {
fn from(value: &Action) -> &'static str {
match value {
Action::S3Action(s) => s.into(),
Action::AdminAction(s) => s.into(),
Action::StsAction(s) => s.into(),
Action::KmsAction(s) => s.into(),
Action::None => "",
}
}
}
impl Action {
const S3_PREFIX: &'static str = "s3:";
const ADMIN_PREFIX: &'static str = "admin:";
const STS_PREFIX: &'static str = "sts:";
const KMS_PREFIX: &'static str = "kms:";
}
impl TryFrom<&str> for Action {
type Error = Error;
fn try_from(value: &str) -> std::result::Result<Self, Self::Error> {
if value.starts_with(Self::S3_PREFIX) {
Ok(Self::S3Action(
S3Action::try_from(value).map_err(|_| IamError::InvalidAction(value.into()))?,
))
} else if value.starts_with(Self::ADMIN_PREFIX) {
Ok(Self::AdminAction(
AdminAction::try_from(value).map_err(|_| IamError::InvalidAction(value.into()))?,
))
} else if value.starts_with(Self::STS_PREFIX) {
Ok(Self::StsAction(
StsAction::try_from(value).map_err(|_| IamError::InvalidAction(value.into()))?,
))
} else if value.starts_with(Self::KMS_PREFIX) {
Ok(Self::KmsAction(
KmsAction::try_from(value).map_err(|_| IamError::InvalidAction(value.into()))?,
))
} else {
Err(IamError::InvalidAction(value.into()).into())
}
}
}
#[derive(Serialize, Deserialize, Hash, PartialEq, Eq, Clone, EnumString, IntoStaticStr, Debug, Copy)]
#[cfg_attr(test, derive(Default))]
#[serde(try_from = "&str", into = "&str")]
pub enum S3Action {
#[cfg_attr(test, default)]
#[strum(serialize = "s3:*")]
AllActions,
#[strum(serialize = "s3:AbortMultipartUpload")]
AbortMultipartUploadAction,
#[strum(serialize = "s3:CreateBucket")]
CreateBucketAction,
#[strum(serialize = "s3:DeleteBucket")]
DeleteBucketAction,
#[strum(serialize = "s3:ForceDeleteBucket")]
ForceDeleteBucketAction,
#[strum(serialize = "s3:DeleteBucketPolicy")]
DeleteBucketPolicyAction,
#[strum(serialize = "s3:DeleteBucketCors")]
DeleteBucketCorsAction,
#[strum(serialize = "s3:DeleteObject")]
DeleteObjectAction,
#[strum(serialize = "s3:GetBucketLocation")]
GetBucketLocationAction,
#[strum(serialize = "s3:GetBucketNotification")]
GetBucketNotificationAction,
#[strum(serialize = "s3:GetBucketPolicy")]
GetBucketPolicyAction,
#[strum(serialize = "s3:GetBucketCors")]
GetBucketCorsAction,
#[strum(serialize = "s3:GetObject")]
GetObjectAction,
#[strum(serialize = "s3:GetObjectAttributes")]
GetObjectAttributesAction,
#[strum(serialize = "s3:HeadBucket")]
HeadBucketAction,
#[strum(serialize = "s3:ListAllMyBuckets")]
ListAllMyBucketsAction,
#[strum(serialize = "s3:ListBucket")]
ListBucketAction,
#[strum(serialize = "s3:GetBucketPolicyStatus")]
GetBucketPolicyStatusAction,
#[strum(serialize = "s3:ListBucketVersions")]
ListBucketVersionsAction,
#[strum(serialize = "s3:ListBucketMultipartUploads")]
ListBucketMultipartUploadsAction,
#[strum(serialize = "s3:ListenNotification")]
ListenNotificationAction,
#[strum(serialize = "s3:ListenBucketNotification")]
ListenBucketNotificationAction,
#[strum(serialize = "s3:ListMultipartUploadParts")]
ListMultipartUploadPartsAction,
#[strum(serialize = "s3:PutBucketLifecycle")]
PutBucketLifecycleAction,
#[strum(serialize = "s3:GetBucketLifecycle")]
GetBucketLifecycleAction,
#[strum(serialize = "s3:PutBucketNotification")]
PutBucketNotificationAction,
#[strum(serialize = "s3:PutBucketPolicy")]
PutBucketPolicyAction,
#[strum(serialize = "s3:PutBucketCors")]
PutBucketCorsAction,
#[strum(serialize = "s3:PutObject")]
PutObjectAction,
#[strum(serialize = "s3:DeleteObjectVersion")]
DeleteObjectVersionAction,
#[strum(serialize = "s3:DeleteObjectVersionTagging")]
DeleteObjectVersionTaggingAction,
#[strum(serialize = "s3:GetObjectVersion")]
GetObjectVersionAction,
#[strum(serialize = "s3:GetObjectVersionAttributes")]
GetObjectVersionAttributesAction,
#[strum(serialize = "s3:GetObjectVersionTagging")]
GetObjectVersionTaggingAction,
#[strum(serialize = "s3:PutObjectVersionTagging")]
PutObjectVersionTaggingAction,
#[strum(serialize = "s3:BypassGovernanceRetention")]
BypassGovernanceRetentionAction,
#[strum(serialize = "s3:PutObjectRetention")]
PutObjectRetentionAction,
#[strum(serialize = "s3:GetObjectRetention")]
GetObjectRetentionAction,
#[strum(serialize = "s3:GetObjectLegalHold")]
GetObjectLegalHoldAction,
#[strum(serialize = "s3:PutObjectLegalHold")]
PutObjectLegalHoldAction,
#[strum(serialize = "s3:GetBucketObjectLockConfiguration")]
GetBucketObjectLockConfigurationAction,
#[strum(serialize = "s3:PutBucketObjectLockConfiguration")]
PutBucketObjectLockConfigurationAction,
#[strum(serialize = "s3:GetBucketTagging")]
GetBucketTaggingAction,
#[strum(serialize = "s3:PutBucketTagging")]
PutBucketTaggingAction,
#[strum(serialize = "s3:GetObjectTagging")]
GetObjectTaggingAction,
#[strum(serialize = "s3:PutObjectTagging")]
PutObjectTaggingAction,
#[strum(serialize = "s3:DeleteObjectTagging")]
DeleteObjectTaggingAction,
#[strum(serialize = "s3:PutBucketEncryption")]
PutBucketEncryptionAction,
#[strum(serialize = "s3:GetBucketEncryption")]
GetBucketEncryptionAction,
#[strum(serialize = "s3:PutBucketVersioning")]
PutBucketVersioningAction,
#[strum(serialize = "s3:GetBucketVersioning")]
GetBucketVersioningAction,
#[strum(serialize = "s3:GetReplicationConfiguration")]
GetReplicationConfigurationAction,
#[strum(serialize = "s3:PutReplicationConfiguration")]
PutReplicationConfigurationAction,
#[strum(serialize = "s3:ReplicateObject")]
ReplicateObjectAction,
#[strum(serialize = "s3:ReplicateDelete")]
ReplicateDeleteAction,
#[strum(serialize = "s3:ReplicateTags")]
ReplicateTagsAction,
#[strum(serialize = "s3:GetObjectVersionForReplication")]
GetObjectVersionForReplicationAction,
#[strum(serialize = "s3:RestoreObject")]
RestoreObjectAction,
#[strum(serialize = "s3:ResetBucketReplicationState")]
ResetBucketReplicationStateAction,
#[strum(serialize = "s3:PutObjectFanOut")]
PutObjectFanOutAction,
}
// #[derive(Serialize, Deserialize, Hash, PartialEq, Eq, Clone, EnumString, IntoStaticStr, Debug, Copy)]
// #[serde(try_from = "&str", into = "&str")]
// pub enum AdminAction {
// #[strum(serialize = "admin:*")]
// AllActions,
// #[strum(serialize = "admin:Profiling")]
// ProfilingAdminAction,
// #[strum(serialize = "admin:ServerTrace")]
// TraceAdminAction,
// #[strum(serialize = "admin:ConsoleLog")]
// ConsoleLogAdminAction,
// #[strum(serialize = "admin:ServerInfo")]
// ServerInfoAdminAction,
// #[strum(serialize = "admin:OBDInfo")]
// HealthInfoAdminAction,
// #[strum(serialize = "admin:TopLocksInfo")]
// TopLocksAdminAction,
// #[strum(serialize = "admin:LicenseInfo")]
// LicenseInfoAdminAction,
// #[strum(serialize = "admin:BandwidthMonitor")]
// BandwidthMonitorAction,
// #[strum(serialize = "admin:InspectData")]
// InspectDataAction,
// #[strum(serialize = "admin:Prometheus")]
// PrometheusAdminAction,
// #[strum(serialize = "admin:ListServiceAccounts")]
// ListServiceAccountsAdminAction,
// #[strum(serialize = "admin:CreateServiceAccount")]
// CreateServiceAccountAdminAction,
// }
// AdminAction - admin policy action.
#[derive(Serialize, Deserialize, Hash, PartialEq, Eq, Clone, EnumString, IntoStaticStr, Debug, Copy)]
#[serde(try_from = "&str", into = "&str")]
pub enum AdminAction {
#[strum(serialize = "admin:Heal")]
HealAdminAction,
#[strum(serialize = "admin:Decommission")]
DecommissionAdminAction,
#[strum(serialize = "admin:Rebalance")]
RebalanceAdminAction,
#[strum(serialize = "admin:StorageInfo")]
StorageInfoAdminAction,
#[strum(serialize = "admin:Prometheus")]
PrometheusAdminAction,
#[strum(serialize = "admin:DataUsageInfo")]
DataUsageInfoAdminAction,
#[strum(serialize = "admin:ForceUnlock")]
ForceUnlockAdminAction,
#[strum(serialize = "admin:TopLocksInfo")]
TopLocksAdminAction,
#[strum(serialize = "admin:Profiling")]
ProfilingAdminAction,
#[strum(serialize = "admin:ServerTrace")]
TraceAdminAction,
#[strum(serialize = "admin:ConsoleLog")]
ConsoleLogAdminAction,
#[strum(serialize = "admin:KMSCreateKey")]
KMSCreateKeyAdminAction,
#[strum(serialize = "admin:KMSKeyStatus")]
KMSKeyStatusAdminAction,
#[strum(serialize = "admin:ServerInfo")]
ServerInfoAdminAction,
#[strum(serialize = "admin:OBDInfo")]
HealthInfoAdminAction,
#[strum(serialize = "admin:LicenseInfo")]
LicenseInfoAdminAction,
#[strum(serialize = "admin:BandwidthMonitor")]
BandwidthMonitorAction,
#[strum(serialize = "admin:InspectData")]
InspectDataAction,
#[strum(serialize = "admin:ServerUpdate")]
ServerUpdateAdminAction,
#[strum(serialize = "admin:ServiceRestart")]
ServiceRestartAdminAction,
#[strum(serialize = "admin:ServiceStop")]
ServiceStopAdminAction,
#[strum(serialize = "admin:ServiceFreeze")]
ServiceFreezeAdminAction,
#[strum(serialize = "admin:ConfigUpdate")]
ConfigUpdateAdminAction,
#[strum(serialize = "admin:CreateUser")]
CreateUserAdminAction,
#[strum(serialize = "admin:DeleteUser")]
DeleteUserAdminAction,
#[strum(serialize = "admin:ListUsers")]
ListUsersAdminAction,
#[strum(serialize = "admin:EnableUser")]
EnableUserAdminAction,
#[strum(serialize = "admin:DisableUser")]
DisableUserAdminAction,
#[strum(serialize = "admin:GetUser")]
GetUserAdminAction,
#[strum(serialize = "admin:SiteReplicationAdd")]
SiteReplicationAddAction,
#[strum(serialize = "admin:SiteReplicationDisable")]
SiteReplicationDisableAction,
#[strum(serialize = "admin:SiteReplicationRemove")]
SiteReplicationRemoveAction,
#[strum(serialize = "admin:SiteReplicationResync")]
SiteReplicationResyncAction,
#[strum(serialize = "admin:SiteReplicationInfo")]
SiteReplicationInfoAction,
#[strum(serialize = "admin:SiteReplicationOperation")]
SiteReplicationOperationAction,
#[strum(serialize = "admin:CreateServiceAccount")]
CreateServiceAccountAdminAction,
#[strum(serialize = "admin:UpdateServiceAccount")]
UpdateServiceAccountAdminAction,
#[strum(serialize = "admin:RemoveServiceAccount")]
RemoveServiceAccountAdminAction,
#[strum(serialize = "admin:ListServiceAccounts")]
ListServiceAccountsAdminAction,
#[strum(serialize = "admin:ListTemporaryAccounts")]
ListTemporaryAccountsAdminAction,
#[strum(serialize = "admin:AddUserToGroup")]
AddUserToGroupAdminAction,
#[strum(serialize = "admin:RemoveUserFromGroup")]
RemoveUserFromGroupAdminAction,
#[strum(serialize = "admin:GetGroup")]
GetGroupAdminAction,
#[strum(serialize = "admin:ListGroups")]
ListGroupsAdminAction,
#[strum(serialize = "admin:EnableGroup")]
EnableGroupAdminAction,
#[strum(serialize = "admin:DisableGroup")]
DisableGroupAdminAction,
#[strum(serialize = "admin:CreatePolicy")]
CreatePolicyAdminAction,
#[strum(serialize = "admin:DeletePolicy")]
DeletePolicyAdminAction,
#[strum(serialize = "admin:GetPolicy")]
GetPolicyAdminAction,
#[strum(serialize = "admin:AttachUserOrGroupPolicy")]
AttachPolicyAdminAction,
#[strum(serialize = "admin:UpdatePolicyAssociation")]
UpdatePolicyAssociationAction,
#[strum(serialize = "admin:ListUserPolicies")]
ListUserPoliciesAdminAction,
#[strum(serialize = "admin:SetBucketQuota")]
SetBucketQuotaAdminAction,
#[strum(serialize = "admin:GetBucketQuota")]
GetBucketQuotaAdminAction,
#[strum(serialize = "admin:SetBucketTarget")]
SetBucketTargetAction,
#[strum(serialize = "admin:GetBucketTarget")]
GetBucketTargetAction,
#[strum(serialize = "admin:ReplicationDiff")]
ReplicationDiff,
#[strum(serialize = "admin:ImportBucketMetadata")]
ImportBucketMetadataAction,
#[strum(serialize = "admin:ExportBucketMetadata")]
ExportBucketMetadataAction,
#[strum(serialize = "admin:SetTier")]
SetTierAction,
#[strum(serialize = "admin:ListTier")]
ListTierAction,
#[strum(serialize = "admin:ExportIAM")]
ExportIAMAction,
#[strum(serialize = "admin:ImportIAM")]
ImportIAMAction,
#[strum(serialize = "admin:ListBatchJobs")]
ListBatchJobsAction,
#[strum(serialize = "admin:DescribeBatchJob")]
DescribeBatchJobAction,
#[strum(serialize = "admin:StartBatchJob")]
StartBatchJobAction,
#[strum(serialize = "admin:CancelBatchJob")]
CancelBatchJobAction,
#[strum(serialize = "admin:*")]
AllAdminActions,
}
impl AdminAction {
// IsValid - checks if action is valid or not.
pub fn is_valid(&self) -> bool {
matches!(
self,
AdminAction::HealAdminAction
| AdminAction::DecommissionAdminAction
| AdminAction::RebalanceAdminAction
| AdminAction::StorageInfoAdminAction
| AdminAction::PrometheusAdminAction
| AdminAction::DataUsageInfoAdminAction
| AdminAction::ForceUnlockAdminAction
| AdminAction::TopLocksAdminAction
| AdminAction::ProfilingAdminAction
| AdminAction::TraceAdminAction
| AdminAction::ConsoleLogAdminAction
| AdminAction::KMSCreateKeyAdminAction
| AdminAction::KMSKeyStatusAdminAction
| AdminAction::ServerInfoAdminAction
| AdminAction::HealthInfoAdminAction
| AdminAction::LicenseInfoAdminAction
| AdminAction::BandwidthMonitorAction
| AdminAction::InspectDataAction
| AdminAction::ServerUpdateAdminAction
| AdminAction::ServiceRestartAdminAction
| AdminAction::ServiceStopAdminAction
| AdminAction::ServiceFreezeAdminAction
| AdminAction::ConfigUpdateAdminAction
| AdminAction::CreateUserAdminAction
| AdminAction::DeleteUserAdminAction
| AdminAction::ListUsersAdminAction
| AdminAction::EnableUserAdminAction
| AdminAction::DisableUserAdminAction
| AdminAction::GetUserAdminAction
| AdminAction::SiteReplicationAddAction
| AdminAction::SiteReplicationDisableAction
| AdminAction::SiteReplicationRemoveAction
| AdminAction::SiteReplicationResyncAction
| AdminAction::SiteReplicationInfoAction
| AdminAction::SiteReplicationOperationAction
| AdminAction::CreateServiceAccountAdminAction
| AdminAction::UpdateServiceAccountAdminAction
| AdminAction::RemoveServiceAccountAdminAction
| AdminAction::ListServiceAccountsAdminAction
| AdminAction::ListTemporaryAccountsAdminAction
| AdminAction::AddUserToGroupAdminAction
| AdminAction::RemoveUserFromGroupAdminAction
| AdminAction::GetGroupAdminAction
| AdminAction::ListGroupsAdminAction
| AdminAction::EnableGroupAdminAction
| AdminAction::DisableGroupAdminAction
| AdminAction::CreatePolicyAdminAction
| AdminAction::DeletePolicyAdminAction
| AdminAction::GetPolicyAdminAction
| AdminAction::AttachPolicyAdminAction
| AdminAction::UpdatePolicyAssociationAction
| AdminAction::ListUserPoliciesAdminAction
| AdminAction::SetBucketQuotaAdminAction
| AdminAction::GetBucketQuotaAdminAction
| AdminAction::SetBucketTargetAction
| AdminAction::GetBucketTargetAction
| AdminAction::ReplicationDiff
| AdminAction::ImportBucketMetadataAction
| AdminAction::ExportBucketMetadataAction
| AdminAction::SetTierAction
| AdminAction::ListTierAction
| AdminAction::ExportIAMAction
| AdminAction::ImportIAMAction
| AdminAction::ListBatchJobsAction
| AdminAction::DescribeBatchJobAction
| AdminAction::StartBatchJobAction
| AdminAction::CancelBatchJobAction
| AdminAction::AllAdminActions
)
}
}
#[derive(Serialize, Deserialize, Hash, PartialEq, Eq, Clone, EnumString, IntoStaticStr, Debug, Copy)]
#[serde(try_from = "&str", into = "&str")]
pub enum StsAction {}
#[derive(Serialize, Deserialize, Hash, PartialEq, Eq, Clone, EnumString, IntoStaticStr, Debug, Copy)]
#[serde(try_from = "&str", into = "&str")]
pub enum KmsAction {
#[strum(serialize = "kms:*")]
AllActions,
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/principal.rs | crates/policy/src/policy/principal.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::{Validator, utils::wildcard};
use crate::error::Error;
use serde::Serialize;
use std::collections::HashSet;
#[derive(Debug, Clone, Serialize, Default, PartialEq, Eq)]
#[serde(rename_all = "PascalCase", default)]
pub struct Principal {
#[serde(rename = "AWS")]
aws: HashSet<String>,
}
#[derive(serde::Deserialize)]
#[serde(untagged)]
enum PrincipalFormat {
Wildcard(String),
AwsObject(PrincipalAwsObject),
}
#[derive(serde::Deserialize)]
struct PrincipalAwsObject {
#[serde(rename = "AWS")]
aws: AwsValues,
}
#[derive(serde::Deserialize)]
#[serde(untagged)]
enum AwsValues {
Single(String),
Multiple(HashSet<String>),
}
impl<'de> serde::Deserialize<'de> for Principal {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let format = PrincipalFormat::deserialize(deserializer)?;
match format {
PrincipalFormat::Wildcard(s) => {
if s == "*" {
Ok(Principal {
aws: HashSet::from(["*".to_string()]),
})
} else {
Err(serde::de::Error::custom(format!(
"invalid wildcard principal value: expected \"*\", got \"{}\"",
s
)))
}
}
PrincipalFormat::AwsObject(obj) => {
let aws = match obj.aws {
AwsValues::Single(s) => HashSet::from([s]),
AwsValues::Multiple(set) => set,
};
Ok(Principal { aws })
}
}
}
}
impl Principal {
pub fn is_match(&self, parincipal: &str) -> bool {
for pattern in self.aws.iter() {
if wildcard::is_simple_match(pattern, parincipal) {
return true;
}
}
false
}
}
impl Validator for Principal {
type Error = Error;
fn is_valid(&self) -> Result<(), Error> {
if self.aws.is_empty() {
return Err(Error::other("Principal is empty"));
}
Ok(())
}
}
#[cfg(test)]
mod test {
use super::*;
use serde_json;
use test_case::test_case;
#[test_case(r#""*""#, true ; "wildcard_string")]
#[test_case(r#"{"AWS": "*"}"#, true ; "aws_object_single_string")]
#[test_case(r#"{"AWS": ["*"]}"#, true ; "aws_object_array")]
#[test_case(r#""invalid""#, false ; "invalid_string")]
#[test_case(r#""""#, false ; "empty_string")]
#[test_case(r#"{"Other": "*"}"#, false ; "wrong_field")]
#[test_case(r#"{}"#, false ; "empty_object")]
fn test_principal_parsing(json: &str, should_succeed: bool) {
let result = match serde_json::from_str::<Principal>(json) {
Ok(principal) => {
assert!(principal.aws.contains("*"));
should_succeed
}
Err(_) => !should_succeed,
};
assert!(result);
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/utils/wildcard.rs | crates/policy/src/policy/utils/wildcard.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#[allow(dead_code)]
pub fn is_simple_match<P, N>(pattern: P, name: N) -> bool
where
P: AsRef<str>,
N: AsRef<str>,
{
inner_match(pattern, name, true)
}
pub fn is_match<P, N>(pattern: P, name: N) -> bool
where
P: AsRef<str>,
N: AsRef<str>,
{
inner_match(pattern, name, false)
}
#[allow(dead_code)]
pub fn is_match_as_pattern_prefix<P, N>(pattern: P, text: N) -> bool
where
P: AsRef<str>,
N: AsRef<str>,
{
let (mut p, mut t) = (pattern.as_ref().as_bytes().iter(), text.as_ref().as_bytes().iter());
while let (Some(&x), Some(&y)) = (p.next(), t.next()) {
if x == b'*' {
return true;
}
if x == b'?' {
continue;
}
if x != y {
return false;
}
}
text.as_ref().len() <= pattern.as_ref().len()
}
#[inline]
fn inner_match(pattern: impl AsRef<str>, name: impl AsRef<str>, simple: bool) -> bool {
let (pattern, name) = (pattern.as_ref(), name.as_ref());
if pattern.is_empty() {
return pattern == name;
}
if pattern == "*" {
return true;
}
deep_match(pattern.as_bytes(), name.as_bytes(), simple)
}
fn deep_match(mut pattern: &[u8], mut name: &[u8], simple: bool) -> bool {
while !pattern.is_empty() {
match pattern[0] {
b'?' => {
if name.is_empty() {
return simple;
}
}
b'*' => {
return pattern.len() == 1
|| deep_match(&pattern[1..], name, simple)
|| (!name.is_empty() && deep_match(pattern, &name[1..], simple));
}
_ => {
if name.is_empty() || name[0] != pattern[0] {
return false;
}
}
}
name = &name[1..];
pattern = &pattern[1..];
}
name.is_empty() && pattern.is_empty()
}
#[cfg(test)]
mod tests {
use super::{is_match, is_match_as_pattern_prefix, is_simple_match};
#[test_case::test_case("*", "s3:GetObject" => true ; "1")]
#[test_case::test_case("", "s3:GetObject" => false ; "2")]
#[test_case::test_case("", "" => true; "3")]
#[test_case::test_case("s3:*", "s3:ListMultipartUploadParts" => true; "4")]
#[test_case::test_case("s3:ListBucketMultipartUploads", "s3:ListBucket" => false; "5")]
#[test_case::test_case("s3:ListBucket", "s3:ListBucket" => true; "6")]
#[test_case::test_case("s3:ListBucketMultipartUploads", "s3:ListBucketMultipartUploads" => true; "7")]
#[test_case::test_case("my-bucket/oo*", "my-bucket/oo" => true; "8")]
#[test_case::test_case("my-bucket/In*", "my-bucket/India/Karnataka/" => true; "9")]
#[test_case::test_case("my-bucket/In*", "my-bucket/Karnataka/India/" => false; "10")]
#[test_case::test_case("my-bucket/In*/Ka*/Ban", "my-bucket/India/Karnataka/Ban" => true; "11")]
#[test_case::test_case("my-bucket/In*/Ka*/Ban", "my-bucket/India/Karnataka/Ban/Ban/Ban/Ban/Ban" => true; "12")]
#[test_case::test_case("my-bucket/In*/Ka*/Ban", "my-bucket/India/Karnataka/Area1/Area2/Area3/Ban" => true; "13")]
#[test_case::test_case( "my-bucket/In*/Ka*/Ba", "my-bucket/India/State1/State2/Karnataka/Area1/Area2/Area3/Ban" => ignore["will fail"] true; "14")]
#[test_case::test_case("my-bucket/In*/Ka*/Ban", "my-bucket/India/Karnataka/Bangalore" => false; "15")]
#[test_case::test_case("my-bucket/In*/Ka*/Ban*", "my-bucket/India/Karnataka/Bangalore" => true; "16")]
#[test_case::test_case("my-bucket/*", "my-bucket/India" => true; "17")]
#[test_case::test_case("my-bucket/oo*", "my-bucket/odo" => false; "18")]
#[test_case::test_case("my-bucket?/abc*", "mybucket/abc" => false; "19")]
#[test_case::test_case("my-bucket?/abc*", "my-bucket1/abc" => true; "20")]
#[test_case::test_case("my-?-bucket/abc*", "my--bucket/abc" => false; "21")]
#[test_case::test_case("my-?-bucket/abc*", "my-1-bucket/abc" => true; "22")]
#[test_case::test_case("my-?-bucket/abc*", "my-k-bucket/abc" => true; "23")]
#[test_case::test_case("my??bucket/abc*", "mybucket/abc" => false; "24")]
#[test_case::test_case("my??bucket/abc*", "my4abucket/abc" => true; "25")]
#[test_case::test_case("my-bucket?abc*", "my-bucket/abc" => true; "26")]
#[test_case::test_case("my-bucket/abc?efg", "my-bucket/abcdefg" => true; "27")]
#[test_case::test_case("my-bucket/abc?efg", "my-bucket/abc/efg" => true; "28")]
#[test_case::test_case("my-bucket/abc????", "my-bucket/abcde" => false; "29")]
#[test_case::test_case("my-bucket/abc????", "my-bucket/abcdefg" => true; "30")]
#[test_case::test_case("my-bucket/abc?", "my-bucket/abc" => false; "31")]
#[test_case::test_case("my-bucket/abc?", "my-bucket/abcd" => true; "32")]
#[test_case::test_case("my-bucket/abc?", "my-bucket/abcde" => false; "33")]
#[test_case::test_case("my-bucket/mnop*?", "my-bucket/mnop" => false; "34")]
#[test_case::test_case("my-bucket/mnop*?", "my-bucket/mnopqrst/mnopqr" => true; "35")]
#[test_case::test_case("my-bucket/mnop*?", "my-bucket/mnopqrst/mnopqrs" => true; "36")]
#[test_case::test_case("my-bucket/mnop*?", "my-bucket/mnop" => false; "37")]
#[test_case::test_case("my-bucket/mnop*?", "my-bucket/mnopq" => true; "38")]
#[test_case::test_case("my-bucket/mnop*?", "my-bucket/mnopqr" => true; "39")]
#[test_case::test_case("my-bucket/mnop*?and", "my-bucket/mnopqand" => true; "40")]
#[test_case::test_case("my-bucket/mnop*?and", "my-bucket/mnopand" => false; "41")]
#[test_case::test_case("my-bucket/mnop*?and", "my-bucket/mnopqand" => true; "42")]
#[test_case::test_case("my-bucket/mnop*?", "my-bucket/mn" => false; "43")]
#[test_case::test_case("my-bucket/mnop*?", "my-bucket/mnopqrst/mnopqrs" => true; "44")]
#[test_case::test_case("my-bucket/mnop*??", "my-bucket/mnopqrst" => true; "45")]
#[test_case::test_case("my-bucket/mnop*qrst", "my-bucket/mnopabcdegqrst" => true; "46")]
#[test_case::test_case("my-bucket/mnop*?and", "my-bucket/mnopqand" => true; "47")]
#[test_case::test_case("my-bucket/mnop*?and", "my-bucket/mnopand" => false; "48")]
#[test_case::test_case("my-bucket/mnop*?and?", "my-bucket/mnopqanda" => true; "49")]
#[test_case::test_case("my-bucket/mnop*?and", "my-bucket/mnopqanda" => false; "50")]
#[test_case::test_case("my-?-bucket/abc*", "my-bucket/mnopqanda" => false; "51")]
#[test_case::test_case("a?", "a" => false; "52")]
#[test_case::test_case("*", "mybucket/myobject" => true; "53")]
fn test_is_match(pattern: &str, text: &str) -> bool {
is_match(pattern, text)
}
#[test_case::test_case("*", "s3:GetObject" => true ; "1")]
#[test_case::test_case("", "s3:GetObject" => false ; "2")]
#[test_case::test_case("", "" => true ; "3")]
#[test_case::test_case("s3:*", "s3:ListMultipartUploadParts" => true ; "4")]
#[test_case::test_case("s3:ListBucketMultipartUploads", "s3:ListBucket" => false ; "5")]
#[test_case::test_case("s3:ListBucket", "s3:ListBucket" => true ; "6")]
#[test_case::test_case("s3:ListBucketMultipartUploads", "s3:ListBucketMultipartUploads" => true ; "7")]
#[test_case::test_case("my-bucket/oo*", "my-bucket/oo" => true ; "8")]
#[test_case::test_case("my-bucket/In*", "my-bucket/India/Karnataka/" => true ; "9")]
#[test_case::test_case("my-bucket/In*", "my-bucket/Karnataka/India/" => false ; "10")]
#[test_case::test_case("my-bucket/In*/Ka*/Ban", "my-bucket/India/Karnataka/Ban" => true ; "11")]
#[test_case::test_case("my-bucket/In*/Ka*/Ban", "my-bucket/India/Karnataka/Ban/Ban/Ban/Ban/Ban" => true ; "12")]
#[test_case::test_case("my-bucket/In*/Ka*/Ban", "my-bucket/India/Karnataka/Area1/Area2/Area3/Ban" => true ; "13")]
#[test_case::test_case("my-bucket/In*/Ka*/Ban", "my-bucket/India/State1/State2/Karnataka/Area1/Area2/Area3/Ban" => true ; "14")]
#[test_case::test_case("my-bucket/In*/Ka*/Ban", "my-bucket/India/Karnataka/Bangalore" => false ; "15")]
#[test_case::test_case("my-bucket/In*/Ka*/Ban*", "my-bucket/India/Karnataka/Bangalore" => true ; "16")]
#[test_case::test_case("my-bucket/*", "my-bucket/India" => true ; "17")]
#[test_case::test_case("my-bucket/oo*", "my-bucket/odo" => false ; "18")]
#[test_case::test_case("my-bucket/oo?*", "my-bucket/oo???" => true ; "19")]
#[test_case::test_case("my-bucket/oo??*", "my-bucket/odo" => false ; "20")]
#[test_case::test_case("?h?*", "?h?hello" => true ; "21")]
#[test_case::test_case("a?", "a" => true ; "22")]
fn test_is_simple_match(pattern: &str, text: &str) -> bool {
is_simple_match(pattern, text)
}
#[test_case::test_case("", "" => true ; "1")]
#[test_case::test_case("a", "" => true ; "2")]
#[test_case::test_case("a", "b" => false ; "3")]
#[test_case::test_case("abc", "ab" => true ; "4")]
#[test_case::test_case("ab*", "ab" => true ; "5")]
#[test_case::test_case("abc*", "ab" => true ; "6")]
#[test_case::test_case("abc?", "ab" => true ; "7")]
#[test_case::test_case("abc*", "abd" => false ; "8")]
#[test_case::test_case("abc*c", "abcd" => true ; "9")]
#[test_case::test_case("ab*??d", "abxxc" => true ; "10")]
#[test_case::test_case("ab*??", "abxc" => true ; "11")]
#[test_case::test_case("ab??", "abxc" => true ; "12")]
#[test_case::test_case("ab??", "abx" => true ; "13")]
#[test_case::test_case("ab??d", "abcxd" => true ; "14")]
#[test_case::test_case("ab??d", "abcxdd" => false ; "15")]
#[test_case::test_case("", "b" => false ; "16")]
fn test_is_match_as_pattern_prefix(pattern: &str, text: &str) -> bool {
is_match_as_pattern_prefix(pattern, text)
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/utils/path.rs | crates/policy/src/policy/utils/path.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
struct LazyBuf<'a> {
s: &'a str,
buf: Option<Vec<u8>>,
w: usize,
}
impl<'a> LazyBuf<'a> {
pub fn new(s: &'a str) -> Self {
Self { s, buf: None, w: 0 }
}
fn index(&self, i: usize) -> u8 {
self.buf.as_ref().map(|x| x[i]).unwrap_or_else(|| self.s.as_bytes()[i])
}
fn append(&mut self, c: u8) {
if self.buf.is_none() {
if self.w < self.s.len() && self.s.as_bytes()[self.w] == c {
self.w += 1;
return;
}
self.buf = Some({
let mut buf = vec![0u8; self.s.len()];
buf[..self.w].copy_from_slice(&self.s.as_bytes()[..self.w]);
buf
});
}
self.buf.as_mut().unwrap()[self.w] = c;
self.w += 1;
}
fn string(&self) -> String {
match self.buf {
Some(ref s) => String::from_utf8_lossy(&s[..self.w]).to_string(),
None => String::from_utf8_lossy(&self.s.as_bytes()[..self.w]).to_string(),
}
}
}
/// copy from golang(path.Clean)
pub fn clean(path: &str) -> String {
if path.is_empty() {
return ".".into();
}
let p = path.as_bytes();
let (rooted, n, mut out, mut r, mut dotdot) = (p[0] == b'/', path.len(), LazyBuf::new(path), 0, 0);
if rooted {
out.append(b'/');
r = 1;
dotdot = 1;
}
while r < n {
if p[r] == b'/' || (p[r] == b'.' && (r + 1 == n || p[r + 1] == b'/')) {
r += 1;
} else if p[r] == b'.' && p[r + 1] == b'.' && (r + 2 == n || p[r + 2] == b'/') {
r += 2;
if out.w > dotdot {
out.w -= 1;
while out.w > dotdot && out.index(out.w) != b'/' {
out.w -= 1;
}
} else if !rooted {
if out.w > 0 {
out.append(b'/');
}
out.append(b'.');
out.append(b'.');
dotdot = out.w;
}
} else {
if rooted && out.w != 1 || !rooted && out.w != 0 {
out.append(b'/');
}
while r < n && p[r] != b'/' {
out.append(p[r]);
r += 1;
}
}
}
if out.w == 0 { ".".into() } else { out.string() }
}
#[cfg(test)]
mod tests {
use super::clean;
#[test_case::test_case("", "."; "1")]
#[test_case::test_case("abc", "abc"; "2")]
#[test_case::test_case("abc/def", "abc/def"; "3")]
#[test_case::test_case("a/b/c", "a/b/c"; "4")]
#[test_case::test_case(".", "."; "5")]
#[test_case::test_case("..", ".."; "6")]
#[test_case::test_case("../..", "../.."; "7")]
#[test_case::test_case("../../abc", "../../abc"; "8")]
#[test_case::test_case("/abc", "/abc"; "9")]
#[test_case::test_case("/", "/"; "10")]
#[test_case::test_case("abc/", "abc"; "11")]
#[test_case::test_case("abc/def/", "abc/def"; "12")]
#[test_case::test_case("a/b/c/", "a/b/c"; "13")]
#[test_case::test_case("./", "."; "14")]
#[test_case::test_case("../", ".."; "15")]
#[test_case::test_case("../../", "../.."; "16")]
#[test_case::test_case("/abc/", "/abc"; "17")]
#[test_case::test_case("abc//def//ghi", "abc/def/ghi"; "18")]
#[test_case::test_case("//abc", "/abc"; "19")]
#[test_case::test_case("///abc", "/abc"; "20")]
#[test_case::test_case("//abc//", "/abc"; "21")]
#[test_case::test_case("abc//", "abc"; "22")]
#[test_case::test_case("abc/./def", "abc/def"; "23")]
#[test_case::test_case("/./abc/def", "/abc/def"; "24")]
#[test_case::test_case("abc/.", "abc"; "25")]
#[test_case::test_case("abc/def/ghi/../jkl", "abc/def/jkl"; "26")]
#[test_case::test_case("abc/def/../ghi/../jkl", "abc/jkl"; "27")]
#[test_case::test_case("abc/def/..", "abc"; "28")]
#[test_case::test_case("abc/def/../..", "."; "29")]
#[test_case::test_case("/abc/def/../..", "/"; "30")]
#[test_case::test_case("abc/def/../../..", ".."; "31")]
#[test_case::test_case("/abc/def/../../..", "/"; "32")]
#[test_case::test_case("abc/def/../../../ghi/jkl/../../../mno", "../../mno"; "33")]
#[test_case::test_case("abc/./../def", "def"; "34")]
#[test_case::test_case("abc//./../def", "def"; "35")]
#[test_case::test_case("abc/../../././../def", "../../def"; "36")]
fn test_clean(path: &str, result: &str) {
assert_eq!(clean(path), result.to_owned());
assert_eq!(clean(result), result.to_owned());
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/function/func.rs | crates/policy/src/policy/function/func.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::marker::PhantomData;
use serde::{
Deserialize, Deserializer, Serialize,
de::{self, Visitor},
};
use super::key::Key;
#[derive(PartialEq, Eq, Debug)]
pub struct InnerFunc<T>(pub(crate) Vec<FuncKeyValue<T>>);
#[derive(PartialEq, Eq, Debug)]
pub struct FuncKeyValue<T> {
pub key: Key,
pub values: T,
}
impl<T: Clone> Clone for FuncKeyValue<T> {
fn clone(&self) -> Self {
Self {
key: self.key.clone(),
values: self.values.clone(),
}
}
}
impl<T: Clone> Clone for InnerFunc<T> {
fn clone(&self) -> Self {
Self(self.0.clone())
}
}
impl<T: Serialize> Serialize for InnerFunc<T> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
use serde::ser::SerializeMap;
let mut map = serializer.serialize_map(Some(self.0.len()))?;
for kv in self.0.iter() {
map.serialize_key(&kv.key)?;
map.serialize_value(&kv.values)?;
}
map.end()
}
}
impl<'de, T> Deserialize<'de> for InnerFunc<T>
where
T: Deserialize<'de>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct FuncVisitor<T>(PhantomData<T>);
impl<'v, T> Visitor<'v> for FuncVisitor<T>
where
T: Deserialize<'v>,
{
type Value = InnerFunc<T>;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("struct StringFunc")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: de::MapAccess<'v>,
{
use serde::de::Error;
let mut inner = Vec::with_capacity(map.size_hint().unwrap_or(0));
while let Some((key, values)) = map.next_entry::<Key, T>()? {
inner.push(FuncKeyValue { key, values });
}
if inner.is_empty() {
return Err(Error::custom("has no condition key"));
}
Ok(InnerFunc(inner))
}
}
deserializer.deserialize_map(FuncVisitor::<T>(PhantomData))
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/function/key.rs | crates/policy/src/policy/function/key.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::key_name::KeyName;
use crate::error::Error;
use crate::policy::{Error as PolicyError, Validator};
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
#[serde(into = "String")]
#[serde(try_from = "&str")]
pub struct Key {
pub name: KeyName,
pub variable: Option<String>,
}
impl Validator for Key {
type Error = Error;
}
impl Key {
pub fn is(&self, other: &KeyName) -> bool {
self.name.eq(other)
}
pub fn var_name(&self) -> String {
self.name.var_name()
}
pub fn name(&self) -> String {
if let Some(ref x) = self.variable {
format!("{}/{}", self.name.name(), x)
} else {
self.name.name().to_owned()
}
}
}
impl From<Key> for String {
fn from(value: Key) -> Self {
let mut data = String::from(Into::<&str>::into(&value.name));
if let Some(x) = value.variable.as_ref() {
data.push('/');
data.push_str(x);
}
data
}
}
impl TryFrom<&str> for Key {
type Error = Error;
fn try_from(value: &str) -> Result<Self, Self::Error> {
let mut iter = value.splitn(2, '/');
let name = iter.next().ok_or_else(|| PolicyError::InvalidKey(value.to_string()))?;
let variable = iter.next().map(Into::into);
Ok(Self {
name: KeyName::try_from(name)?,
variable,
})
}
}
#[cfg(test)]
mod tests {
use super::Key;
use test_case::test_case;
fn new_key(name: &str, value: Option<&str>) -> Key {
Key {
name: name.try_into().unwrap(),
variable: value.map(ToString::to_string),
}
}
#[test_case(new_key("s3:x-amz-copy-source", Some("aaa")), r#""s3:x-amz-copy-source/aaa""#)]
#[test_case(new_key("s3:x-amz-copy-source", None), r#""s3:x-amz-copy-source""#)]
#[test_case(new_key("aws:Referer", Some("bbb")), r#""aws:Referer/bbb""#)]
#[test_case(new_key("aws:Referer", None), r#""aws:Referer""#)]
#[test_case(new_key("jwt:website", None), r#""jwt:website""#)]
#[test_case(new_key("jwt:website", Some("aaa")), r#""jwt:website/aaa""#)]
#[test_case(new_key("svc:DurationSeconds", None), r#""svc:DurationSeconds""#)]
#[test_case(new_key("svc:DurationSeconds", Some("aaa")), r#""svc:DurationSeconds/aaa""#)]
fn test_serialize_successful(key: Key, except: &str) -> Result<(), serde_json::Error> {
let val = serde_json::to_string(&key)?;
assert_eq!(val.as_str(), except);
Ok(())
}
#[test_case("s3:x-amz-copy-source1/aaa")]
#[test_case("s33:x-amz-copy-source")]
#[test_case("aw2s:Referer/bbb")]
#[test_case("aws:Referera")]
#[test_case("jwdt:website")]
#[test_case("jwt:dwebsite/aaa")]
#[test_case("sfvc:DuratdionSeconds")]
#[test_case("svc:DursationSeconds/aaa")]
fn test_deserialize_failed(key: &str) {
let val = serde_json::from_str::<Key>(key);
assert!(val.is_err());
}
#[test_case(new_key("s3:x-amz-copy-source", Some("aaa")), r#""s3:x-amz-copy-source/aaa""#)]
#[test_case(new_key("s3:x-amz-copy-source", None), r#""s3:x-amz-copy-source""#)]
#[test_case(new_key("aws:Referer", Some("bbb")), r#""aws:Referer/bbb""#)]
#[test_case(new_key("aws:Referer", None), r#""aws:Referer""#)]
#[test_case(new_key("jwt:website", None), r#""jwt:website""#)]
#[test_case(new_key("jwt:website", Some("aaa")), r#""jwt:website/aaa""#)]
#[test_case(new_key("svc:DurationSeconds", None), r#""svc:DurationSeconds""#)]
#[test_case(new_key("svc:DurationSeconds", Some("aaa")), r#""svc:DurationSeconds/aaa""#)]
fn test_deserialize(except: Key, input: &str) -> Result<(), serde_json::Error> {
let v = serde_json::from_str::<Key>(input)?;
assert_eq!(v.name, except.name);
assert_eq!(v.variable, except.variable);
Ok(())
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/function/date.rs | crates/policy/src/policy/function/date.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::func::InnerFunc;
use serde::{Deserialize, Deserializer, Serialize, de};
use std::{collections::HashMap, fmt};
use time::{OffsetDateTime, format_description::well_known::Rfc3339};
pub type DateFunc = InnerFunc<DateFuncValue>;
impl DateFunc {
pub fn evaluate(&self, op: impl Fn(&OffsetDateTime, &OffsetDateTime) -> bool, values: &HashMap<String, Vec<String>>) -> bool {
for inner in self.0.iter() {
let v = match values.get(inner.key.name().as_str()).and_then(|x| x.first()) {
Some(x) => x,
None => return false,
};
let Ok(rv) = OffsetDateTime::parse(v, &Rfc3339) else {
return false;
};
if !op(&inner.values.0, &rv) {
return false;
}
}
true
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct DateFuncValue(OffsetDateTime);
impl Serialize for DateFuncValue {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
use serde::ser::Error;
serializer.serialize_str(
&self
.0
.format(&Rfc3339)
.map_err(|e| Error::custom(format!("format datetime failed: {e:?}")))?,
)
}
}
impl<'de> Deserialize<'de> for DateFuncValue {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct DateVisitor;
impl de::Visitor<'_> for DateVisitor {
type Value = DateFuncValue;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a data string that is representable in RFC 3339 format.")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
Ok(DateFuncValue(
OffsetDateTime::parse(value, &Rfc3339).map_err(|e| E::custom(format!("{e:?}")))?,
))
}
}
deserializer.deserialize_str(DateVisitor)
}
}
#[cfg(test)]
mod tests {
use super::{DateFunc, DateFuncValue};
use crate::policy::function::func::FuncKeyValue;
use crate::policy::function::{
key::Key,
key_name::KeyName::{self, *},
key_name::S3KeyName::*,
};
use test_case::test_case;
use time::{OffsetDateTime, format_description::well_known::Rfc3339};
fn new_func(name: KeyName, variable: Option<String>, value: &str) -> DateFunc {
DateFunc {
0: vec![FuncKeyValue {
key: Key { name, variable },
values: DateFuncValue(OffsetDateTime::parse(value, &Rfc3339).unwrap()),
}],
}
}
#[test_case(r#"{"s3:object-lock-retain-until-date": "2009-11-10T15:00:00Z"}"#, new_func(S3(S3ObjectLockRetainUntilDate), None, "2009-11-10T15:00:00Z"); "1")]
#[test_case(r#"{"s3:object-lock-retain-until-date/a": "2009-11-10T15:00:00Z"}"#, new_func(S3(S3ObjectLockRetainUntilDate), Some("a".into()), "2009-11-10T15:00:00Z"); "2")]
fn test_deser(input: &str, expect: DateFunc) -> Result<(), serde_json::Error> {
let v: DateFunc = serde_json::from_str(input)?;
assert_eq!(v, expect);
Ok(())
}
#[test_case(r#"{"s3:object-lock-retain-until-date":"2009-11-10T15:00:00Z"}"#, new_func(S3(S3ObjectLockRetainUntilDate), None, "2009-11-10T15:00:00Z"); "1")]
#[test_case(r#"{"s3:object-lock-retain-until-date/a":"2009-11-10T15:00:00Z"}"#, new_func(S3(S3ObjectLockRetainUntilDate), Some("a".into()), "2009-11-10T15:00:00Z"); "2")]
fn test_ser(expect: &str, input: DateFunc) -> Result<(), serde_json::Error> {
let v = serde_json::to_string(&input)?;
assert_eq!(v, expect);
Ok(())
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/function/number.rs | crates/policy/src/policy/function/number.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashMap;
use super::func::InnerFunc;
use serde::{
Deserialize, Deserializer, Serialize,
de::{Error, Visitor},
};
pub type NumberFunc = InnerFunc<NumberFuncValue>;
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct NumberFuncValue(i64);
impl NumberFunc {
pub fn evaluate(&self, op: impl Fn(&i64, &i64) -> bool, if_exists: bool, values: &HashMap<String, Vec<String>>) -> bool {
for inner in self.0.iter() {
let v = match values.get(inner.key.name().as_str()).and_then(|x| x.first()) {
Some(x) => x,
None => return if_exists,
};
let Ok(rv) = v.parse::<i64>() else {
return false;
};
if !op(&rv, &inner.values.0) {
return false;
}
}
true
}
}
impl Serialize for NumberFuncValue {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(self.0.to_string().as_str())
}
}
impl<'de> Deserialize<'de> for NumberFuncValue {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct NumberVisitor;
impl Visitor<'_> for NumberVisitor {
type Value = NumberFuncValue;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a number or a string that can be represented as a number.")
}
fn visit_i64<E>(self, value: i64) -> Result<Self::Value, E>
where
E: Error,
{
Ok(NumberFuncValue(value))
}
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
where
E: Error,
{
Ok(NumberFuncValue(value as i64))
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: Error,
{
Ok(NumberFuncValue(value.parse().map_err(|e| E::custom(format!("{e:?}")))?))
}
}
deserializer.deserialize_any(NumberVisitor)
}
}
#[cfg(test)]
mod tests {
use super::{NumberFunc, NumberFuncValue};
use crate::policy::function::func::FuncKeyValue;
use crate::policy::function::{
key::Key,
key_name::KeyName::{self, *},
key_name::S3KeyName::*,
};
use test_case::test_case;
fn new_func(name: KeyName, variable: Option<String>, value: i64) -> NumberFunc {
NumberFunc {
0: vec![FuncKeyValue {
key: Key { name, variable },
values: NumberFuncValue(value),
}],
}
}
#[test_case(r#"{"s3:max-keys": 1}"#, new_func(S3(S3MaxKeys), None, 1); "1")]
#[test_case(r#"{"s3:max-keys/a": 1}"#, new_func(S3(S3MaxKeys), Some("a".into()), 1); "2")]
#[test_case(r#"{"s3:max-keys": "1"}"#, new_func(S3(S3MaxKeys), None, 1); "3")]
#[test_case(r#"{"s3:max-keys/a": "1"}"#, new_func(S3(S3MaxKeys), Some("a".into()), 1); "4")]
fn test_deser(input: &str, expect: NumberFunc) -> Result<(), serde_json::Error> {
let v: NumberFunc = serde_json::from_str(input)?;
assert_eq!(v, expect);
Ok(())
}
#[test_case(r#"{"s3:max-keys":"1"}"#, new_func(S3(S3MaxKeys), None, 1); "1")]
#[test_case(r#"{"s3:max-keys/a":"1"}"#, new_func(S3(S3MaxKeys), Some("a".into()), 1); "2")]
fn test_ser(expect: &str, input: NumberFunc) -> Result<(), serde_json::Error> {
let v = serde_json::to_string(&input)?;
assert_eq!(v, expect);
Ok(())
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/function/string.rs | crates/policy/src/policy/function/string.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#[cfg(test)]
use std::collections::BTreeSet as Set;
#[cfg(not(test))]
use std::collections::HashSet as Set;
use std::fmt;
use std::{borrow::Cow, collections::HashMap};
use crate::policy::function::func::FuncKeyValue;
use crate::policy::utils::wildcard;
use futures::future;
use serde::{Deserialize, Deserializer, Serialize, de, ser::SerializeSeq};
use super::{func::InnerFunc, key_name::KeyName};
use crate::policy::variables::PolicyVariableResolver;
pub type StringFunc = InnerFunc<StringFuncValue>;
impl StringFunc {
#[allow(clippy::too_many_arguments)]
pub(crate) async fn evaluate_with_resolver(
&self,
for_all: bool,
ignore_case: bool,
like: bool,
negate: bool,
values: &HashMap<String, Vec<String>>,
resolver: Option<&dyn PolicyVariableResolver>,
) -> bool {
for inner in self.0.iter() {
let result = if like {
inner.eval_like(for_all, values, resolver).await ^ negate
} else {
inner.eval(for_all, ignore_case, values, resolver).await ^ negate
};
if !result {
return false;
}
}
true
}
}
impl FuncKeyValue<StringFuncValue> {
async fn eval(
&self,
for_all: bool,
ignore_case: bool,
values: &HashMap<String, Vec<String>>,
resolver: Option<&dyn PolicyVariableResolver>,
) -> bool {
let rvalues = values
// http.CanonicalHeaderKey ?
.get(self.key.name().as_str())
.map(|t| {
t.iter()
.map(|x| {
if ignore_case {
Cow::Owned(x.to_lowercase())
} else {
Cow::from(x)
}
})
.collect::<Set<_>>()
})
.unwrap_or_default();
let resolved_values: Vec<Vec<String>> = futures::future::join_all(self.values.0.iter().map(|c| async {
if let Some(res) = resolver {
super::super::variables::resolve_aws_variables(c, res).await
} else {
vec![c.to_string()]
}
}))
.await;
let fvalues = resolved_values
.into_iter()
.flatten()
.map(|resolved_c| {
let mut c = Cow::from(resolved_c);
for key in KeyName::COMMON_KEYS {
match values.get(key.name()).and_then(|x| x.first()) {
Some(v) if !v.is_empty() => return Cow::Owned(c.to_mut().replace(&key.var_name(), v)),
_ => continue,
};
}
c
})
.map(|x| if ignore_case { Cow::Owned(x.to_lowercase()) } else { x })
.collect::<Set<_>>();
let ivalues = rvalues.intersection(&fvalues);
if for_all {
rvalues.is_empty() || rvalues.len() == ivalues.count()
} else {
ivalues.count() > 0
}
}
async fn eval_like(
&self,
for_all: bool,
values: &HashMap<String, Vec<String>>,
resolver: Option<&dyn PolicyVariableResolver>,
) -> bool {
if let Some(rvalues) = values.get(self.key.name().as_str()) {
for v in rvalues.iter() {
let resolved_futures: Vec<_> = self
.values
.0
.iter()
.map(|c| async {
if let Some(res) = resolver {
super::super::variables::resolve_aws_variables(c, res).await
} else {
vec![c.to_string()]
}
})
.collect();
let resolved_values = future::join_all(resolved_futures).await;
let matched = resolved_values
.into_iter()
.flatten()
.map(|resolved_c| {
let mut c = Cow::from(resolved_c);
for key in KeyName::COMMON_KEYS {
match values.get(key.name()).and_then(|x| x.first()) {
Some(v) if !v.is_empty() => return Cow::Owned(c.to_mut().replace(&key.var_name(), v)),
_ => continue,
};
}
c
})
.any(|x| wildcard::is_match(x, v));
if for_all {
if !matched {
return false;
}
} else if matched {
return true;
}
}
}
for_all
}
}
/// Parse values field
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct StringFuncValue(pub Set<String>);
impl Serialize for StringFuncValue {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
if self.0.len() == 1 {
serializer.serialize_some(&self.0.iter().next())
} else {
let mut seq = serializer.serialize_seq(Some(self.0.len()))?;
for element in &self.0 {
seq.serialize_element(element)?;
}
seq.end()
}
}
}
impl<'d> Deserialize<'d> for StringFuncValue {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'d>,
{
struct StringOrVecVisitor;
impl<'de> de::Visitor<'de> for StringOrVecVisitor {
type Value = StringFuncValue;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a string or an array of strings")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
Ok({
let mut hash = Set::new();
hash.insert(value.to_string());
StringFuncValue(hash)
})
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: de::SeqAccess<'de>,
{
#[cfg(test)]
let mut values = Set::new();
#[cfg(not(test))]
let mut values = Set::with_capacity(seq.size_hint().unwrap_or(0));
while let Some(value) = seq.next_element::<String>()? {
values.insert(value);
}
Ok(StringFuncValue(values))
}
}
let result = deserializer.deserialize_any(StringOrVecVisitor)?;
if result.0.is_empty() {
use serde::de::Error;
return Err(Error::custom("empty"));
}
Ok(result)
}
}
#[cfg(test)]
mod tests {
use super::{StringFunc, StringFuncValue};
use crate::policy::function::func::FuncKeyValue;
use crate::policy::function::{
key::Key,
key_name::AwsKeyName::*,
key_name::KeyName::{self, *},
};
use std::collections::HashMap;
use crate::policy::function::key_name::S3KeyName::S3LocationConstraint;
use test_case::test_case;
fn new_func(name: KeyName, variable: Option<String>, values: Vec<&str>) -> StringFunc {
StringFunc {
0: vec![FuncKeyValue {
key: Key { name, variable },
values: StringFuncValue(values.into_iter().map(|x| x.to_owned()).collect()),
}],
}
}
#[test_case(r#"{"aws:username": "johndoe"}"#,
new_func(Aws(AWSUsername), None, vec!["johndoe"])
)]
#[test_case(r#"{"aws:username": ["johndoe", "aaa"]}"#, new_func(Aws(AWSUsername), None, vec!["johndoe", "aaa"]
))]
#[test_case(r#"{"aws:username/value": "johndoe"}"#, new_func(Aws(AWSUsername), Some("value".into()), vec!["johndoe"]
))]
#[test_case(r#"{"aws:username/value": ["johndoe", "aaa"]}"#, new_func(Aws(AWSUsername), Some("value".into()), vec!["johndoe", "aaa"]
))]
fn test_deser(input: &str, expect: StringFunc) -> Result<(), serde_json::Error> {
let v: StringFunc = serde_json::from_str(input)?;
assert_eq!(v, expect);
Ok(())
}
#[test_case(r#"{"aws:usernamea":"johndoe"}"#)]
#[test_case(r#"{"aws:username":[]}"#)] // Empty
#[test_case(r#"{"aws:usernamea/value":"johndoe"}"#)]
#[test_case(r#"{"aws:usernamea/value":["johndoe", "aaa"]}"#)]
#[test_case(r#""aaa""#)]
fn test_deser_failed(input: &str) {
assert!(serde_json::from_str::<StringFunc>(input).is_err());
}
#[test_case(r#"{"aws:username":"johndoe"}"#, new_func(Aws(AWSUsername), None, vec!["johndoe"]))]
#[test_case(r#"{"aws:username":["aaa","johndoe"]}"#, new_func(Aws(AWSUsername), None, vec!["johndoe", "aaa"]))]
#[test_case(r#"{"aws:username/value":"johndoe"}"#, new_func(Aws(AWSUsername), Some("value".into()), vec!["johndoe"]))]
#[test_case(r#"{"aws:username/value":["aaa","johndoe"]}"#, new_func(Aws(AWSUsername), Some("value".into()), vec!["johndoe", "aaa"]))]
fn test_ser(expect: &str, input: StringFunc) -> Result<(), serde_json::Error> {
let v = serde_json::to_string(&input)?;
assert_eq!(v.as_str(), expect);
Ok(())
}
fn new_fkv(name: &str, values: Vec<&str>) -> FuncKeyValue<StringFuncValue> {
FuncKeyValue {
key: name.try_into().unwrap(),
values: StringFuncValue(values.into_iter().map(ToOwned::to_owned).collect()),
}
}
fn test_eval(
s: FuncKeyValue<StringFuncValue>,
for_all: bool,
ignore_case: bool,
negate: bool,
values: Vec<(&str, Vec<&str>)>,
) -> bool {
let map: HashMap<String, Vec<String>> = values
.into_iter()
.map(|(k, v)| (k.to_owned(), v.into_iter().map(ToOwned::to_owned).collect::<Vec<String>>()))
.collect();
let result = s.eval(for_all, ignore_case, &map, None);
pollster::block_on(result) ^ negate
}
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/myobject"]), false, vec![("x-amz-copy-source", vec!["mybucket/myobject"])] => true ; "1")]
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/myobject"]), false, vec![("x-amz-copy-source", vec!["yourbucket/myobject"])] => false ; "2")]
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/myobject"]), false, vec![] => false ; "3")]
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/myobject"]), false, vec![("delimiter", vec!["/"])] => false ; "4")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-1", "ap-southeast-1"]), false, vec![("LocationConstraint", vec!["eu-west-1"])] => true ; "5")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-1", "ap-southeast-1"]), false, vec![("LocationConstraint", vec!["ap-southeast-1"])] => true ; "6")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-1", "ap-southeast-1"]), false, vec![("LocationConstraint", vec!["us-east-1"])] => false ; "7")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-1", "ap-southeast-1"]), false, vec![] => false ; "8")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-1", "ap-southeast-1"]), false, vec![("delimiter", vec!["/"])] => false ; "9")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art"]), true, vec![("groups", vec!["prod", "art"])] => true ; "10")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art"]), true, vec![("groups", vec!["art"])] => true ; "11")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art"]), true, vec![] => true ; "12")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art"]), true, vec![("delimiter", vec!["/"])] => true ; "13")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art"]), false, vec![("groups", vec!["prod", "art"])] => true ; "14")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art"]), false, vec![("groups", vec!["art"])] => true ; "15")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art"]), false, vec![] => false ; "16")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art"]), false, vec![("delimiter", vec!["/"])] => false ; "17")]
#[test_case(new_fkv("s3:LocationConstraint", vec![KeyName::S3(S3LocationConstraint).var_name().as_str()]), false, vec![("LocationConstraint", vec!["us-west-1"])] => true ; "18")]
#[test_case(new_fkv("s3:ExistingObjectTag/security", vec!["public"]), false, vec![("ExistingObjectTag/security", vec!["public"])] => true ; "19")]
#[test_case(new_fkv("s3:ExistingObjectTag/security", vec!["public"]), false, vec![("ExistingObjectTag/security", vec!["private"])] => false ; "20")]
#[test_case(new_fkv("s3:ExistingObjectTag/security", vec!["public"]), false, vec![("ExistingObjectTag/project", vec!["webapp"])] => false ; "21")]
fn test_string_equals(s: FuncKeyValue<StringFuncValue>, for_all: bool, values: Vec<(&str, Vec<&str>)>) -> bool {
test_eval(s, for_all, false, false, values)
}
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/myobject"]), false, vec![("x-amz-copy-source", vec!["mybucket/myobject"])] => false ; "1")]
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/myobject"]), false, vec![("x-amz-copy-source", vec!["yourbucket/myobject"])] => true ; "2")]
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/myobject"]), false, vec![] => true ; "3")]
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/myobject"]), false, vec![("delimiter", vec!["/"])] => true ; "4")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-1", "ap-southeast-1"]), false, vec![("LocationConstraint", vec!["eu-west-1"])] => false ; "5")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-1", "ap-southeast-1"]), false, vec![("LocationConstraint", vec!["ap-southeast-1"])] => false ; "6")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-1", "ap-southeast-1"]), false, vec![("LocationConstraint", vec!["us-east-1"])] => true ; "7")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-1", "ap-southeast-1"]), false, vec![] => true ; "8")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-1", "ap-southeast-1"]), false, vec![("delimiter", vec!["/"])] => true ; "9")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art"]), true, vec![("groups", vec!["prod", "art"])] => false ; "10")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art"]), true, vec![("groups", vec!["art"])] => false ; "11")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art"]), true, vec![] => false ; "12")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art"]), true, vec![("delimiter", vec!["/"])] => false ; "13")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art"]), false, vec![("groups", vec!["prod", "art"])] => false ; "14")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art"]), false, vec![("groups", vec!["art"])] => false ; "15")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art"]), false, vec![] => true ; "16")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art"]), false, vec![("delimiter", vec!["/"])] => true ; "17")]
fn test_string_not_equals(s: FuncKeyValue<StringFuncValue>, for_all: bool, values: Vec<(&str, Vec<&str>)>) -> bool {
test_eval(s, for_all, false, true, values)
}
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/MYOBJECT"]), false, vec![("x-amz-copy-source", vec!["mybucket/myobject"])] => true ; "1")]
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/MYOBJECT"]), false, vec![("x-amz-copy-source", vec!["yourbucket/myobject"])] => false ; "2")]
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/MYOBJECT"]), false, vec![] => false ; "3")]
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/MYOBJECT"]), false, vec![("delimiter", vec!["/"])] => false ; "4")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["EU-WEST-1", "AP-southeast-1"]), false, vec![("LocationConstraint", vec!["eu-west-1"])] => true ; "5")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["EU-WEST-1", "AP-southeast-1"]), false, vec![("LocationConstraint", vec!["ap-southeast-1"])] => true ; "6")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["EU-WEST-1", "AP-southeast-1"]), false, vec![("LocationConstraint", vec!["us-east-1"])] => false ; "7")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["EU-WEST-1", "AP-southeast-1"]), false, vec![] => false ; "8")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["EU-WEST-1", "AP-southeast-1"]), false, vec![("delimiter", vec!["/"])] => false ; "9")]
#[test_case(new_fkv("jwt:groups", vec!["Prod", "Art"]), true, vec![("groups", vec!["prod", "art"])] => true ; "10")]
#[test_case(new_fkv("jwt:groups", vec!["Prod", "Art"]), true, vec![("groups", vec!["art"])] => true ; "11")]
#[test_case(new_fkv("jwt:groups", vec!["Prod", "Art"]), true, vec![] => true ; "12")]
#[test_case(new_fkv("jwt:groups", vec!["Prod", "Art"]), true, vec![("delimiter", vec!["/"])] => true ; "13")]
#[test_case(new_fkv("jwt:groups", vec!["Prod", "Art"]), false, vec![("groups", vec!["prod", "art"])] => true ; "14")]
#[test_case(new_fkv("jwt:groups", vec!["Prod", "Art"]), false, vec![("groups", vec!["art"])] => true ; "15")]
#[test_case(new_fkv("jwt:groups", vec!["Prod", "Art"]), false, vec![] => false ; "16")]
#[test_case(new_fkv("jwt:groups", vec!["Prod", "Art"]), false, vec![("delimiter", vec!["/"])] => false ; "17")]
fn test_string_equals_ignore_case(s: FuncKeyValue<StringFuncValue>, for_all: bool, values: Vec<(&str, Vec<&str>)>) -> bool {
test_eval(s, for_all, true, false, values)
}
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/MYOBJECT"]), false, vec![("x-amz-copy-source", vec!["mybucket/myobject"])] => false ; "1")]
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/MYOBJECT"]), false, vec![("x-amz-copy-source", vec!["yourbucket/myobject"])] => true ; "2")]
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/MYOBJECT"]), false, vec![] => true ; "3")]
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/MYOBJECT"]), false, vec![("delimiter", vec!["/"])] => true ; "4")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["EU-WEST-1", "AP-southeast-1"]), false, vec![("LocationConstraint", vec!["eu-west-1"])] => false ; "5")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["EU-WEST-1", "AP-southeast-1"]), false, vec![("LocationConstraint", vec!["ap-southeast-1"])] => false ; "6")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["EU-WEST-1", "AP-southeast-1"]), false, vec![("LocationConstraint", vec!["us-east-1"])] => true ; "7")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["EU-WEST-1", "AP-southeast-1"]), false, vec![] => true ; "8")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["EU-WEST-1", "AP-southeast-1"]), false, vec![("delimiter", vec!["/"])] => true ; "9")]
#[test_case(new_fkv("jwt:groups", vec!["Prod", "Art"]), true, vec![("groups", vec!["prod", "art"])] => false ; "10")]
#[test_case(new_fkv("jwt:groups", vec!["Prod", "Art"]), true, vec![("groups", vec!["art"])] => false ; "11")]
#[test_case(new_fkv("jwt:groups", vec!["Prod", "Art"]), true, vec![] => false ; "12")]
#[test_case(new_fkv("jwt:groups", vec!["Prod", "Art"]), true, vec![("delimiter", vec!["/"])] => false ; "13")]
#[test_case(new_fkv("jwt:groups", vec!["Prod", "Art"]), false, vec![("groups", vec!["prod", "art"])] => false ; "14")]
#[test_case(new_fkv("jwt:groups", vec!["Prod", "Art"]), false, vec![("groups", vec!["art"])] => false ; "15")]
#[test_case(new_fkv("jwt:groups", vec!["Prod", "Art"]), false, vec![] => true ; "16")]
#[test_case(new_fkv("jwt:groups", vec!["Prod", "Art"]), false, vec![("delimiter", vec!["/"])] => true ; "17")]
fn test_string_not_equals_ignore_case(
s: FuncKeyValue<StringFuncValue>,
for_all: bool,
values: Vec<(&str, Vec<&str>)>,
) -> bool {
test_eval(s, for_all, true, true, values)
}
fn test_eval_like(s: FuncKeyValue<StringFuncValue>, for_all: bool, negate: bool, values: Vec<(&str, Vec<&str>)>) -> bool {
let map: HashMap<String, Vec<String>> = values
.into_iter()
.map(|(k, v)| (k.to_owned(), v.into_iter().map(ToOwned::to_owned).collect::<Vec<String>>()))
.collect();
let result = s.eval_like(for_all, &map, None);
pollster::block_on(result) ^ negate
}
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/myobject"]), false, vec![("x-amz-copy-source", vec!["mybucket/myobject"])] => true ; "1")]
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/myobject"]), false, vec![("x-amz-copy-source", vec!["yourbucket/myobject"])] => false ; "2")]
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/myobject"]), false, vec![] => false ; "3")]
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/myobject"]), false, vec![("delimiter", vec!["/"])] => false ; "4")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-*", "ap-southeast-1"]), false, vec![("LocationConstraint", vec!["eu-west-1"])] => true ; "5")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-*", "ap-southeast-1"]), false, vec![("LocationConstraint", vec!["ap-southeast-1"])] => true ; "6")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-*", "ap-southeast-1"]), false, vec![("LocationConstraint", vec!["us-east-1"])] => false ; "7")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-*", "ap-southeast-1"]), false, vec![] => false ; "8")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-*", "ap-southeast-1"]), false, vec![("delimiter", vec!["/"])] => false ; "9")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-*", "ap-southeast-1"]), false, vec![("LocationConstraint", vec!["eu-west-2"])] => true ; "10")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art*"]), true, vec![("groups", vec!["prod", "art"])] => true ; "11")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art*"]), true, vec![("groups", vec!["art"])] => true ; "12")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art*"]), true, vec![] => true ; "13")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art*"]), true, vec![("delimiter", vec!["/"])] => true ; "14")]
#[test_case(new_fkv("jwt:groups", vec!["prod*", "art"]), false, vec![("groups", vec!["prod", "art"])] => true ; "15")]
#[test_case(new_fkv("jwt:groups", vec!["prod*", "art"]), false, vec![("groups", vec!["art"])] => true ; "16")]
#[test_case(new_fkv("jwt:groups", vec!["prod*", "art"]), false, vec![] => false ; "17")]
#[test_case(new_fkv("jwt:groups", vec!["prod*", "art"]), false, vec![("delimiter", vec!["/"])] => false ; "18")]
fn test_string_like(s: FuncKeyValue<StringFuncValue>, for_all: bool, values: Vec<(&str, Vec<&str>)>) -> bool {
test_eval_like(s, for_all, false, values)
}
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/myobject"]), false, vec![("x-amz-copy-source", vec!["mybucket/myobject"])] => false ; "1")]
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/myobject"]), false, vec![("x-amz-copy-source", vec!["yourbucket/myobject"])] => true ; "2")]
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/myobject"]), false, vec![] => true ; "3")]
#[test_case(new_fkv("s3:x-amz-copy-source", vec!["mybucket/myobject"]), false, vec![("delimiter", vec!["/"])] => true ; "4")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-*", "ap-southeast-1"]), false, vec![("LocationConstraint", vec!["eu-west-1"])] => false ; "5")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-*", "ap-southeast-1"]), false, vec![("LocationConstraint", vec!["ap-southeast-1"])] => false ; "6")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-*", "ap-southeast-1"]), false, vec![("LocationConstraint", vec!["us-east-1"])] => true ; "7")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-*", "ap-southeast-1"]), false, vec![] => true ; "8")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-*", "ap-southeast-1"]), false, vec![("delimiter", vec!["/"])] => true ; "9")]
#[test_case(new_fkv("s3:LocationConstraint", vec!["eu-west-*", "ap-southeast-1"]), false, vec![("LocationConstraint", vec!["eu-west-2"])] => false ; "10")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art*"]), true, vec![("groups", vec!["prod", "art"])] => false ; "11")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art*"]), true, vec![("groups", vec!["art"])] => false ; "12")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art*"]), true, vec![] => false ; "13")]
#[test_case(new_fkv("jwt:groups", vec!["prod", "art*"]), true, vec![("delimiter", vec!["/"])] => false ; "14")]
#[test_case(new_fkv("jwt:groups", vec!["prod*", "art"]), false, vec![("groups", vec!["prod", "art"])] => false ; "15")]
#[test_case(new_fkv("jwt:groups", vec!["prod*", "art"]), false, vec![("groups", vec!["art"])] => false ; "16")]
#[test_case(new_fkv("jwt:groups", vec!["prod*", "art"]), false, vec![] => true ; "17")]
#[test_case(new_fkv("jwt:groups", vec!["prod*", "art"]), false, vec![("delimiter", vec!["/"])] => true ; "18")]
fn test_string_not_like(s: FuncKeyValue<StringFuncValue>, for_all: bool, values: Vec<(&str, Vec<&str>)>) -> bool {
test_eval_like(s, for_all, true, values)
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/function/binary.rs | crates/policy/src/policy/function/binary.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashMap;
use serde::{Deserialize, Serialize};
use super::func::InnerFunc;
pub type BinaryFunc = InnerFunc<BinaryFuncValue>;
// todo implement it
#[derive(Serialize, Deserialize, Clone, PartialEq, Eq, Debug)]
#[serde(transparent)]
pub struct BinaryFuncValue(String);
impl BinaryFunc {
pub fn evaluate(&self, _values: &HashMap<String, Vec<String>>) -> bool {
todo!()
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/function/condition.rs | crates/policy/src/policy/function/condition.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::policy::variables::PolicyVariableResolver;
use serde::Deserialize;
use serde::de::{Error, MapAccess};
use serde::ser::SerializeMap;
use std::collections::HashMap;
use time::OffsetDateTime;
use super::{addr::AddrFunc, binary::BinaryFunc, bool_null::BoolFunc, date::DateFunc, number::NumberFunc, string::StringFunc};
#[derive(Clone, Deserialize, Debug)]
pub enum Condition {
StringEquals(StringFunc),
StringNotEquals(StringFunc),
StringEqualsIgnoreCase(StringFunc),
StringNotEqualsIgnoreCase(StringFunc),
StringLike(StringFunc),
StringNotLike(StringFunc),
BinaryEquals(BinaryFunc),
IpAddress(AddrFunc),
NotIpAddress(AddrFunc),
Null(BoolFunc),
Bool(BoolFunc),
NumericEquals(NumberFunc),
NumericNotEquals(NumberFunc),
NumericLessThan(NumberFunc),
NumericLessThanEquals(NumberFunc),
NumericGreaterThan(NumberFunc),
NumericGreaterThanIfExists(NumberFunc),
NumericGreaterThanEquals(NumberFunc),
DateEquals(DateFunc),
DateNotEquals(DateFunc),
DateLessThan(DateFunc),
DateLessThanEquals(DateFunc),
DateGreaterThan(DateFunc),
DateGreaterThanEquals(DateFunc),
}
impl Condition {
pub fn from_deserializer<'a, D: MapAccess<'a>>(key: &str, d: &mut D) -> Result<Self, D::Error> {
Ok(match key {
"StringEquals" => Self::StringEquals(d.next_value()?),
"StringNotEquals" => Self::StringNotEquals(d.next_value()?),
"StringEqualsIgnoreCase" => Self::StringEqualsIgnoreCase(d.next_value()?),
"StringNotEqualsIgnoreCase" => Self::StringNotEqualsIgnoreCase(d.next_value()?),
"StringLike" => Self::StringLike(d.next_value()?),
"StringNotLike" => Self::StringNotLike(d.next_value()?),
"BinaryEquals" => Self::BinaryEquals(d.next_value()?),
"IpAddress" => Self::IpAddress(d.next_value()?),
"NotIpAddress" => Self::NotIpAddress(d.next_value()?),
"Null" => Self::Null(d.next_value()?),
"Bool" => Self::Bool(d.next_value()?),
"NumericEquals" => Self::NumericEquals(d.next_value()?),
"NumericNotEquals" => Self::NumericNotEquals(d.next_value()?),
"NumericLessThan" => Self::NumericLessThan(d.next_value()?),
"NumericGreaterThan" => Self::NumericGreaterThan(d.next_value()?),
"NumericGreaterThanIfExists" => Self::NumericGreaterThanIfExists(d.next_value()?),
"NumericGreaterThanEquals" => Self::NumericGreaterThanEquals(d.next_value()?),
"DateEquals" => Self::DateEquals(d.next_value()?),
"DateNotEquals" => Self::DateNotEquals(d.next_value()?),
"DateLessThanEquals" => Self::DateLessThanEquals(d.next_value()?),
"DateGreaterThan" => Self::DateGreaterThan(d.next_value()?),
"DateGreaterThanEquals" => Self::DateGreaterThanEquals(d.next_value()?),
_ => Err(Error::custom(format!("unknown key: {key}")))?,
})
}
pub fn to_key(&self) -> &'static str {
match self {
Condition::StringEquals(_) => "StringEquals",
Condition::StringNotEquals(_) => "StringNotEquals",
Condition::StringEqualsIgnoreCase(_) => "StringEqualsIgnoreCase",
Condition::StringNotEqualsIgnoreCase(_) => "StringNotEqualsIgnoreCase",
Condition::StringLike(_) => "StringLike",
Condition::StringNotLike(_) => "StringNotLike",
Condition::BinaryEquals(_) => "BinaryEquals",
Condition::IpAddress(_) => "IpAddress",
Condition::NotIpAddress(_) => "NotIpAddress",
Condition::Null(_) => "Null",
Condition::Bool(_) => "Bool",
Condition::NumericEquals(_) => "NumericEquals",
Condition::NumericNotEquals(_) => "NumericNotEquals",
Condition::NumericLessThan(_) => "NumericLessThan",
Condition::NumericLessThanEquals(_) => "NumericLessThanEquals",
Condition::NumericGreaterThan(_) => "NumericGreaterThan",
Condition::NumericGreaterThanIfExists(_) => "NumericGreaterThanIfExists",
Condition::NumericGreaterThanEquals(_) => "NumericGreaterThanEquals",
Condition::DateEquals(_) => "DateEquals",
Condition::DateNotEquals(_) => "DateNotEquals",
Condition::DateLessThan(_) => "DateLessThan",
Condition::DateLessThanEquals(_) => "DateLessThanEquals",
Condition::DateGreaterThan(_) => "DateGreaterThan",
Condition::DateGreaterThanEquals(_) => "DateGreaterThanEquals",
}
}
pub async fn evaluate_with_resolver(
&self,
for_all: bool,
values: &HashMap<String, Vec<String>>,
resolver: Option<&dyn PolicyVariableResolver>,
) -> bool {
use Condition::*;
let r = match self {
StringEquals(s) => s.evaluate_with_resolver(for_all, false, false, false, values, resolver).await,
StringNotEquals(s) => s.evaluate_with_resolver(for_all, false, false, true, values, resolver).await,
StringEqualsIgnoreCase(s) => s.evaluate_with_resolver(for_all, true, false, false, values, resolver).await,
StringNotEqualsIgnoreCase(s) => s.evaluate_with_resolver(for_all, true, false, true, values, resolver).await,
StringLike(s) => s.evaluate_with_resolver(for_all, false, true, false, values, resolver).await,
StringNotLike(s) => s.evaluate_with_resolver(for_all, false, true, true, values, resolver).await,
BinaryEquals(s) => s.evaluate(values),
IpAddress(s) => s.evaluate(values),
NotIpAddress(s) => s.evaluate(values),
Null(s) => s.evaluate_null(values),
Bool(s) => s.evaluate_bool(values),
NumericEquals(s) => s.evaluate(i64::eq, false, values),
NumericNotEquals(s) => s.evaluate(i64::ne, false, values),
NumericLessThan(s) => s.evaluate(i64::lt, false, values),
NumericLessThanEquals(s) => s.evaluate(i64::le, false, values),
NumericGreaterThan(s) => s.evaluate(i64::gt, false, values),
NumericGreaterThanIfExists(s) => s.evaluate(i64::ge, true, values),
NumericGreaterThanEquals(s) => s.evaluate(i64::ge, false, values),
DateEquals(s) => s.evaluate(OffsetDateTime::eq, values),
DateNotEquals(s) => s.evaluate(OffsetDateTime::ne, values),
DateLessThan(s) => s.evaluate(OffsetDateTime::lt, values),
DateLessThanEquals(s) => s.evaluate(OffsetDateTime::le, values),
DateGreaterThan(s) => s.evaluate(OffsetDateTime::gt, values),
DateGreaterThanEquals(s) => s.evaluate(OffsetDateTime::ge, values),
};
if self.is_negate() { !r } else { r }
}
#[inline]
pub fn is_negate(&self) -> bool {
use Condition::*;
matches!(self, StringNotEquals(_) | StringNotEqualsIgnoreCase(_) | NotIpAddress(_))
}
pub fn serialize_map<T: SerializeMap>(&self, se: &mut T) -> Result<(), T::Error> {
match self {
Condition::StringEquals(s) => se.serialize_value(s),
Condition::StringNotEquals(s) => se.serialize_value(s),
Condition::StringEqualsIgnoreCase(s) => se.serialize_value(s),
Condition::StringNotEqualsIgnoreCase(s) => se.serialize_value(s),
Condition::StringLike(s) => se.serialize_value(s),
Condition::StringNotLike(s) => se.serialize_value(s),
Condition::BinaryEquals(s) => se.serialize_value(s),
Condition::IpAddress(s) => se.serialize_value(s),
Condition::NotIpAddress(s) => se.serialize_value(s),
Condition::Null(s) => se.serialize_value(s),
Condition::Bool(s) => se.serialize_value(s),
Condition::NumericEquals(s) => se.serialize_value(s),
Condition::NumericNotEquals(s) => se.serialize_value(s),
Condition::NumericLessThan(s) => se.serialize_value(s),
Condition::NumericLessThanEquals(s) => se.serialize_value(s),
Condition::NumericGreaterThan(s) => se.serialize_value(s),
Condition::NumericGreaterThanIfExists(s) => se.serialize_value(s),
Condition::NumericGreaterThanEquals(s) => se.serialize_value(s),
Condition::DateEquals(s) => se.serialize_value(s),
Condition::DateNotEquals(s) => se.serialize_value(s),
Condition::DateLessThan(s) => se.serialize_value(s),
Condition::DateLessThanEquals(s) => se.serialize_value(s),
Condition::DateGreaterThan(s) => se.serialize_value(s),
Condition::DateGreaterThanEquals(s) => se.serialize_value(s),
}
}
}
impl PartialEq for Condition {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Self::StringEquals(l0), Self::StringEquals(r0)) => l0 == r0,
(Self::StringNotEquals(l0), Self::StringNotEquals(r0)) => l0 == r0,
(Self::StringEqualsIgnoreCase(l0), Self::StringEqualsIgnoreCase(r0)) => l0 == r0,
(Self::StringNotEqualsIgnoreCase(l0), Self::StringNotEqualsIgnoreCase(r0)) => l0 == r0,
(Self::StringLike(l0), Self::StringLike(r0)) => l0 == r0,
(Self::StringNotLike(l0), Self::StringNotLike(r0)) => l0 == r0,
(Self::BinaryEquals(l0), Self::BinaryEquals(r0)) => l0 == r0,
(Self::IpAddress(l0), Self::IpAddress(r0)) => l0 == r0,
(Self::NotIpAddress(l0), Self::NotIpAddress(r0)) => l0 == r0,
(Self::Null(l0), Self::Null(r0)) => l0 == r0,
(Self::Bool(l0), Self::Bool(r0)) => l0 == r0,
(Self::NumericEquals(l0), Self::NumericEquals(r0)) => l0 == r0,
(Self::NumericNotEquals(l0), Self::NumericNotEquals(r0)) => l0 == r0,
(Self::NumericLessThan(l0), Self::NumericLessThan(r0)) => l0 == r0,
(Self::NumericLessThanEquals(l0), Self::NumericLessThanEquals(r0)) => l0 == r0,
(Self::NumericGreaterThan(l0), Self::NumericGreaterThan(r0)) => l0 == r0,
(Self::NumericGreaterThanIfExists(l0), Self::NumericGreaterThanIfExists(r0)) => l0 == r0,
(Self::NumericGreaterThanEquals(l0), Self::NumericGreaterThanEquals(r0)) => l0 == r0,
(Self::DateEquals(l0), Self::DateEquals(r0)) => l0 == r0,
(Self::DateNotEquals(l0), Self::DateNotEquals(r0)) => l0 == r0,
(Self::DateLessThan(l0), Self::DateLessThan(r0)) => l0 == r0,
(Self::DateLessThanEquals(l0), Self::DateLessThanEquals(r0)) => l0 == r0,
(Self::DateGreaterThan(l0), Self::DateGreaterThan(r0)) => l0 == r0,
(Self::DateGreaterThanEquals(l0), Self::DateGreaterThanEquals(r0)) => l0 == r0,
_ => false,
}
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/function/addr.rs | crates/policy/src/policy/function/addr.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::func::InnerFunc;
use ipnetwork::IpNetwork;
use serde::{Deserialize, Serialize, de::Visitor};
use std::{borrow::Cow, collections::HashMap, net::IpAddr};
pub type AddrFunc = InnerFunc<AddrFuncValue>;
impl AddrFunc {
pub(crate) fn evaluate(&self, values: &HashMap<String, Vec<String>>) -> bool {
for inner in self.0.iter() {
let rvalues = values.get(inner.key.name().as_str()).map(|t| t.iter()).unwrap_or_default();
for r in rvalues {
let Ok(ip) = r.parse::<IpAddr>() else {
return false;
};
for ip_net in inner.values.0.iter() {
if ip_net.contains(ip) {
return true;
}
}
}
}
false
}
}
#[derive(Serialize, Clone, PartialEq, Eq, Debug)]
#[serde(transparent)]
pub struct AddrFuncValue(Vec<IpNetwork>);
impl<'de> Deserialize<'de> for AddrFuncValue {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
struct AddrFuncValueVisitor;
impl<'d> Visitor<'d> for AddrFuncValueVisitor {
type Value = AddrFuncValue;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("cidr string")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(AddrFuncValue(vec![Self::cidr::<E>(v)?]))
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: serde::de::SeqAccess<'d>,
{
Ok(AddrFuncValue({
let mut data = Vec::with_capacity(seq.size_hint().unwrap_or_default());
while let Some(v) = seq.next_element::<&str>()? {
data.push(Self::cidr::<A::Error>(v)?)
}
data
}))
}
}
impl AddrFuncValueVisitor {
fn cidr<E: serde::de::Error>(v: &str) -> Result<IpNetwork, E> {
let mut cidr_str = Cow::from(v);
if v.find('/').is_none() {
cidr_str.to_mut().push_str("/32");
}
cidr_str
.parse::<IpNetwork>()
.map_err(|_| E::custom(format!("{v} can not be parsed to CIDR")))
}
}
deserializer.deserialize_any(AddrFuncValueVisitor)
}
}
#[cfg(test)]
mod tests {
use super::{AddrFunc, AddrFuncValue};
use crate::policy::function::func::FuncKeyValue;
use crate::policy::function::{
key::Key,
key_name::AwsKeyName::*,
key_name::KeyName::{self, *},
};
use test_case::test_case;
fn new_func(name: KeyName, variable: Option<String>, value: Vec<&str>) -> AddrFunc {
AddrFunc {
0: vec![FuncKeyValue {
key: Key { name, variable },
values: AddrFuncValue(value.into_iter().filter_map(|x| x.parse().ok()).collect()),
}],
}
}
#[test_case(r#"{"aws:SourceIp": "203.0.113.0/24"}"#, new_func(Aws(AWSSourceIP), None, vec!["203.0.113.0/24"]); "1")]
#[test_case(r#"{"aws:SourceIp": "203.0.113.0"}"#, new_func(Aws(AWSSourceIP), None, vec!["203.0.113.0/32"]); "2")]
#[test_case(r#"{"aws:SourceIp": "2001:DB8:1234:5678::/64"}"#, new_func(Aws(AWSSourceIP),None, vec!["2001:DB8:1234:5678::/64"]); "3")]
#[test_case(r#"{"aws:SourceIp": "2001:DB8:1234:5678::"}"#, new_func(Aws(AWSSourceIP), None, vec!["2001:DB8:1234:5678::/32"]); "4")]
#[test_case(r#"{"aws:SourceIp": ["203.0.113.0/24","203.0.113.0"]}"#, new_func(Aws(AWSSourceIP), None, vec!["203.0.113.0/24", "203.0.113.0/32"]); "5")]
#[test_case(r#"{"aws:SourceIp": ["2001:DB8:1234:5678::/64","203.0.113.0/24"]}"#, new_func(Aws(AWSSourceIP), None, vec!["2001:DB8:1234:5678::/64", "203.0.113.0/24"]); "6")]
#[test_case(r#"{"aws:SourceIp": ["2001:DB8:1234:5678::/64", "2001:DB8:1234:5678::"]}"#, new_func(Aws(AWSSourceIP),None, vec!["2001:DB8:1234:5678::/64", "2001:DB8:1234:5678::/32"]); "7")]
#[test_case(r#"{"aws:SourceIp": ["2001:DB8:1234:5678::", "203.0.113.0"]}"#, new_func(Aws(AWSSourceIP), None, vec!["2001:DB8:1234:5678::/32", "203.0.113.0/32"]); "8")]
#[test_case(r#"{"aws:SourceIp/a": "203.0.113.0/24"}"#, new_func(Aws(AWSSourceIP), Some("a".into()), vec!["203.0.113.0/24"]); "9")]
#[test_case(r#"{"aws:SourceIp/a": "203.0.113.0/24"}"#, new_func(Aws(AWSSourceIP), Some("a".into()), vec!["203.0.113.0/24"]); "10")]
#[test_case(r#"{"aws:SourceIp/a": "203.0.113.0"}"#, new_func(Aws(AWSSourceIP), Some("a".into()), vec!["203.0.113.0/32"]); "11")]
#[test_case(r#"{"aws:SourceIp/a": "2001:DB8:1234:5678::/64"}"#, new_func(Aws(AWSSourceIP),Some("a".into()), vec!["2001:DB8:1234:5678::/64"]); "12")]
#[test_case(r#"{"aws:SourceIp/a": "2001:DB8:1234:5678::"}"#, new_func(Aws(AWSSourceIP), Some("a".into()), vec!["2001:DB8:1234:5678::/32"]); "13")]
#[test_case(r#"{"aws:SourceIp/a": ["203.0.113.0/24", "203.0.113.0"]}"#, new_func(Aws(AWSSourceIP), Some("a".into()), vec!["203.0.113.0/24", "203.0.113.0/32"]); "14")]
#[test_case(r#"{"aws:SourceIp/a": ["2001:DB8:1234:5678::/64", "203.0.113.0/24"]}"#, new_func(Aws(AWSSourceIP), Some("a".into()), vec!["2001:DB8:1234:5678::/64", "203.0.113.0/24"]); "15")]
#[test_case(r#"{"aws:SourceIp/a": ["2001:DB8:1234:5678::/64", "2001:DB8:1234:5678::"]}"#, new_func(Aws(AWSSourceIP),Some("a".into()), vec!["2001:DB8:1234:5678::/64", "2001:DB8:1234:5678::/32"]); "16")]
#[test_case(r#"{"aws:SourceIp/a": ["2001:DB8:1234:5678::", "203.0.113.0"]}"#, new_func(Aws(AWSSourceIP), Some("a".into()), vec!["2001:DB8:1234:5678::/32", "203.0.113.0/32"]); "17")]
fn test_deser(input: &str, expect: AddrFunc) -> Result<(), serde_json::Error> {
let v: AddrFunc = serde_json::from_str(input)?;
assert_eq!(v, expect);
Ok(())
}
#[test_case(r#"{"aws:SourceIp":["203.0.113.0/24"]}"#, new_func(Aws(AWSSourceIP), None, vec!["203.0.113.0/24"]); "1")]
#[test_case(r#"{"aws:SourceIp":["203.0.113.0/32"]}"#, new_func(Aws(AWSSourceIP), None, vec!["203.0.113.0/32"]); "2")]
#[test_case(r#"{"aws:SourceIp":["2001:db8:1234:5678::/64"]}"#, new_func(Aws(AWSSourceIP),None, vec!["2001:DB8:1234:5678::/64"]); "3")]
#[test_case(r#"{"aws:SourceIp":["2001:db8:1234:5678::/32"]}"#, new_func(Aws(AWSSourceIP), None, vec!["2001:DB8:1234:5678::/32"]); "4")]
#[test_case(r#"{"aws:SourceIp":["203.0.113.0/24","203.0.113.0/32"]}"#, new_func(Aws(AWSSourceIP), None, vec!["203.0.113.0/24", "203.0.113.0/32"]); "5")]
#[test_case(r#"{"aws:SourceIp":["2001:db8:1234:5678::/64","203.0.113.0/24"]}"#, new_func(Aws(AWSSourceIP), None, vec!["2001:DB8:1234:5678::/64", "203.0.113.0/24"]); "6")]
#[test_case(r#"{"aws:SourceIp":["2001:db8:1234:5678::/64","2001:db8:1234:5678::/32"]}"#, new_func(Aws(AWSSourceIP),None, vec!["2001:DB8:1234:5678::/64", "2001:DB8:1234:5678::/32"]); "7")]
#[test_case(r#"{"aws:SourceIp":["2001:db8:1234:5678::/32","203.0.113.0/32"]}"#, new_func(Aws(AWSSourceIP), None, vec!["2001:DB8:1234:5678::/32", "203.0.113.0/32"]); "8")]
#[test_case(r#"{"aws:SourceIp/a":["203.0.113.0/24"]}"#, new_func(Aws(AWSSourceIP), Some("a".into()), vec!["203.0.113.0/24"]); "9")]
#[test_case(r#"{"aws:SourceIp/a":["203.0.113.0/24"]}"#, new_func(Aws(AWSSourceIP), Some("a".into()), vec!["203.0.113.0/24"]); "10")]
#[test_case(r#"{"aws:SourceIp/a":["203.0.113.0/32"]}"#, new_func(Aws(AWSSourceIP), Some("a".into()), vec!["203.0.113.0/32"]); "11")]
#[test_case(r#"{"aws:SourceIp/a":["2001:db8:1234:5678::/64"]}"#, new_func(Aws(AWSSourceIP),Some("a".into()), vec!["2001:DB8:1234:5678::/64"]); "12")]
#[test_case(r#"{"aws:SourceIp/a":["2001:db8:1234:5678::/32"]}"#, new_func(Aws(AWSSourceIP), Some("a".into()), vec!["2001:DB8:1234:5678::/32"]); "13")]
#[test_case(r#"{"aws:SourceIp/a":["203.0.113.0/24","203.0.113.0/32"]}"#, new_func(Aws(AWSSourceIP), Some("a".into()), vec!["203.0.113.0/24", "203.0.113.0/32"]); "14")]
#[test_case(r#"{"aws:SourceIp/a":["2001:db8:1234:5678::/64","203.0.113.0/24"]}"#, new_func(Aws(AWSSourceIP), Some("a".into()), vec!["2001:DB8:1234:5678::/64", "203.0.113.0/24"]); "15")]
#[test_case(r#"{"aws:SourceIp/a":["2001:db8:1234:5678::/64","2001:db8:1234:5678::/32"]}"#, new_func(Aws(AWSSourceIP),Some("a".into()), vec!["2001:DB8:1234:5678::/64", "2001:DB8:1234:5678::/32"]); "16")]
#[test_case(r#"{"aws:SourceIp/a":["2001:db8:1234:5678::/32","203.0.113.0/32"]}"#, new_func(Aws(AWSSourceIP), Some("a".into()), vec!["2001:DB8:1234:5678::/32", "203.0.113.0/32"]); "17")]
fn test_ser(expect: &str, input: AddrFunc) -> Result<(), serde_json::Error> {
let v = serde_json::to_string(&input)?;
assert_eq!(v, expect);
Ok(())
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/function/key_name.rs | crates/policy/src/policy/function/key_name.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::policy::Error::{self, InvalidKeyName};
use serde::{Deserialize, Serialize};
use strum::{EnumString, IntoStaticStr};
#[derive(Clone, Eq, PartialEq, Debug, Serialize, Deserialize)]
#[serde(try_from = "&str", untagged)]
pub enum KeyName {
Aws(AwsKeyName),
Jwt(JwtKeyName),
Ldap(LdapKeyName),
Sts(StsKeyName),
Svc(SvcKeyName),
S3(S3KeyName),
}
impl TryFrom<&str> for KeyName {
type Error = Error;
fn try_from(value: &str) -> Result<Self, Self::Error> {
Ok(if value.starts_with("s3:") {
Self::S3(S3KeyName::try_from(value).map_err(|_| InvalidKeyName(value.into()))?)
} else if value.starts_with("aws:") {
Self::Aws(AwsKeyName::try_from(value).map_err(|_| InvalidKeyName(value.into()))?)
} else if value.starts_with("ldap:") {
Self::Ldap(LdapKeyName::try_from(value).map_err(|_| InvalidKeyName(value.into()))?)
} else if value.starts_with("sts:") {
Self::Sts(StsKeyName::try_from(value).map_err(|_| InvalidKeyName(value.into()))?)
} else if value.starts_with("jwt:") {
Self::Jwt(JwtKeyName::try_from(value).map_err(|_| InvalidKeyName(value.into()))?)
} else if value.starts_with("svc:") {
Self::Svc(SvcKeyName::try_from(value).map_err(|_| InvalidKeyName(value.into()))?)
} else {
Err(InvalidKeyName(value.into()))?
})
}
}
impl KeyName {
pub const COMMON_KEYS: &'static [KeyName] = &[
// s3
KeyName::S3(S3KeyName::S3SignatureVersion),
KeyName::S3(S3KeyName::S3AuthType),
KeyName::S3(S3KeyName::S3SignatureAge),
KeyName::S3(S3KeyName::S3XAmzContentSha256),
KeyName::S3(S3KeyName::S3LocationConstraint),
//aws
KeyName::Aws(AwsKeyName::AWSReferer),
KeyName::Aws(AwsKeyName::AWSSourceIP),
KeyName::Aws(AwsKeyName::AWSUserAgent),
KeyName::Aws(AwsKeyName::AWSSecureTransport),
KeyName::Aws(AwsKeyName::AWSCurrentTime),
KeyName::Aws(AwsKeyName::AWSEpochTime),
KeyName::Aws(AwsKeyName::AWSPrincipalType),
KeyName::Aws(AwsKeyName::AWSUserID),
KeyName::Aws(AwsKeyName::AWSUsername),
KeyName::Aws(AwsKeyName::AWSGroups),
// ldap
KeyName::Ldap(LdapKeyName::User),
KeyName::Ldap(LdapKeyName::Username),
KeyName::Ldap(LdapKeyName::Groups),
// jwt
KeyName::Jwt(JwtKeyName::JWTSub),
KeyName::Jwt(JwtKeyName::JWTIss),
KeyName::Jwt(JwtKeyName::JWTAud),
KeyName::Jwt(JwtKeyName::JWTJti),
KeyName::Jwt(JwtKeyName::JWTName),
KeyName::Jwt(JwtKeyName::JWTUpn),
KeyName::Jwt(JwtKeyName::JWTGroups),
KeyName::Jwt(JwtKeyName::JWTGivenName),
KeyName::Jwt(JwtKeyName::JWTFamilyName),
KeyName::Jwt(JwtKeyName::JWTMiddleName),
KeyName::Jwt(JwtKeyName::JWTNickName),
KeyName::Jwt(JwtKeyName::JWTPrefUsername),
KeyName::Jwt(JwtKeyName::JWTProfile),
KeyName::Jwt(JwtKeyName::JWTPicture),
KeyName::Jwt(JwtKeyName::JWTWebsite),
KeyName::Jwt(JwtKeyName::JWTEmail),
KeyName::Jwt(JwtKeyName::JWTGender),
KeyName::Jwt(JwtKeyName::JWTBirthdate),
KeyName::Jwt(JwtKeyName::JWTPhoneNumber),
KeyName::Jwt(JwtKeyName::JWTAddress),
KeyName::Jwt(JwtKeyName::JWTScope),
KeyName::Jwt(JwtKeyName::JWTClientID),
];
pub const fn prefix(&self) -> usize {
match self {
KeyName::Aws(_) => "aws:".len(),
KeyName::Jwt(_) => "jwt:".len(),
KeyName::Ldap(_) => "ldap:".len(),
KeyName::Sts(_) => "sts:".len(),
KeyName::Svc(_) => "svc:".len(),
KeyName::S3(_) => "s3:".len(),
}
}
pub fn name(&self) -> &str {
&Into::<&str>::into(self)[self.prefix()..]
}
pub fn var_name(&self) -> String {
match self {
KeyName::Aws(s) => format!("${{aws:{}}}", Into::<&str>::into(s)),
KeyName::Jwt(s) => format!("${{jwt:{}}}", Into::<&str>::into(s)),
KeyName::Ldap(s) => format!("${{ldap:{}}}", Into::<&str>::into(s)),
KeyName::Sts(s) => format!("${{sts:{}}}", Into::<&str>::into(s)),
KeyName::Svc(s) => format!("${{svc:{}}}", Into::<&str>::into(s)),
KeyName::S3(s) => format!("${{s3:{}}}", Into::<&str>::into(s)),
}
}
}
impl From<&KeyName> for &'static str {
fn from(k: &KeyName) -> Self {
match k {
KeyName::Aws(aws) => aws.into(),
KeyName::Jwt(jwt) => jwt.into(),
KeyName::Ldap(ldap) => ldap.into(),
KeyName::Sts(sts) => sts.into(),
KeyName::Svc(svc) => svc.into(),
KeyName::S3(s3) => s3.into(),
}
}
}
#[derive(Clone, EnumString, Debug, IntoStaticStr, Eq, PartialEq, Serialize, Deserialize)]
#[serde(try_from = "&str", into = "&str")]
pub enum S3KeyName {
#[strum(serialize = "s3:x-amz-copy-source")]
S3XAmzCopySource,
#[strum(serialize = "s3:x-amz-server-side-encryption")]
S3XAmzServerSideEncryption,
#[strum(serialize = "s3:x-amz-server-side-encryption-customer-algorithm")]
S3XAmzServerSideEncryptionCustomerAlgorithm,
#[strum(serialize = "s3:signatureversion")]
S3SignatureVersion,
#[strum(serialize = "s3:authType")]
S3AuthType,
#[strum(serialize = "s3:signatureAge")]
S3SignatureAge,
#[strum(serialize = "s3:x-amz-content-sha256")]
S3XAmzContentSha256,
#[strum(serialize = "s3:LocationConstraint")]
S3LocationConstraint,
#[strum(serialize = "s3:object-lock-retain-until-date")]
S3ObjectLockRetainUntilDate,
#[strum(serialize = "s3:max-keys")]
S3MaxKeys,
#[strum(serialize = "s3:x-amz-metadata-directive")]
S3XAmzMetadataDirective,
#[strum(serialize = "s3:x-amz-storage-class")]
S3XAmzStorageClass,
#[strum(serialize = "s3:prefix")]
S3Prefix,
#[strum(serialize = "s3:delimiter")]
S3Delimiter,
#[strum(serialize = "s3:ExistingObjectTag")]
S3ExistingObjectTag,
#[strum(serialize = "s3:RequestObjectTagKeys")]
S3RequestObjectTagKeys,
#[strum(serialize = "s3:RequestObjectTag")]
S3RequestObjectTag,
}
#[derive(Clone, EnumString, Debug, IntoStaticStr, Eq, PartialEq, Serialize, Deserialize)]
#[serde(try_from = "&str", into = "&str")]
pub enum JwtKeyName {
#[strum(serialize = "jwt:sub")]
JWTSub,
#[strum(serialize = "jwt:iss")]
JWTIss,
#[strum(serialize = "jwt:aud")]
JWTAud,
#[strum(serialize = "jwt:jti")]
JWTJti,
#[strum(serialize = "jwt:name")]
JWTName,
#[strum(serialize = "jwt:upn")]
JWTUpn,
#[strum(serialize = "jwt:groups")]
JWTGroups,
#[strum(serialize = "jwt:given_name")]
JWTGivenName,
#[strum(serialize = "jwt:family_name")]
JWTFamilyName,
#[strum(serialize = "jwt:middle_name")]
JWTMiddleName,
#[strum(serialize = "jwt:nickname")]
JWTNickName,
#[strum(serialize = "jwt:preferred_username")]
JWTPrefUsername,
#[strum(serialize = "jwt:profile")]
JWTProfile,
#[strum(serialize = "jwt:picture")]
JWTPicture,
#[strum(serialize = "jwt:website")]
JWTWebsite,
#[strum(serialize = "jwt:email")]
JWTEmail,
#[strum(serialize = "jwt:gender")]
JWTGender,
#[strum(serialize = "jwt:birthdate")]
JWTBirthdate,
#[strum(serialize = "jwt:phone_number")]
JWTPhoneNumber,
#[strum(serialize = "jwt:address")]
JWTAddress,
#[strum(serialize = "jwt:scope")]
JWTScope,
#[strum(serialize = "jwt:client_id")]
JWTClientID,
}
#[derive(Clone, EnumString, Debug, IntoStaticStr, Eq, PartialEq, Serialize, Deserialize)]
#[serde(try_from = "&str", into = "&str")]
pub enum SvcKeyName {
#[strum(serialize = "svc:DurationSeconds")]
SVCDurationSeconds,
}
#[derive(Clone, EnumString, Debug, IntoStaticStr, Eq, PartialEq, Serialize, Deserialize)]
#[serde(try_from = "&str", into = "&str")]
pub enum LdapKeyName {
#[strum(serialize = "ldap:user")]
User,
#[strum(serialize = "ldap:username")]
Username,
#[strum(serialize = "ldap:groups")]
Groups,
}
#[derive(Clone, EnumString, Debug, IntoStaticStr, Eq, PartialEq, Serialize, Deserialize)]
#[serde(try_from = "&str", into = "&str")]
pub enum StsKeyName {
#[strum(serialize = "sts:DurationSeconds")]
STSDurationSeconds,
}
#[derive(Clone, EnumString, Debug, IntoStaticStr, Eq, PartialEq, Serialize, Deserialize)]
#[serde(try_from = "&str", into = "&str")]
pub enum AwsKeyName {
#[strum(serialize = "aws:Referer")]
AWSReferer,
#[strum(serialize = "aws:SourceIp")]
AWSSourceIP,
#[strum(serialize = "aws:UserAgent")]
AWSUserAgent,
#[strum(serialize = "aws:SecureTransport")]
AWSSecureTransport,
#[strum(serialize = "aws:CurrentTime")]
AWSCurrentTime,
#[strum(serialize = "aws:EpochTime")]
AWSEpochTime,
#[strum(serialize = "aws:principaltype")]
AWSPrincipalType,
#[strum(serialize = "aws:userid")]
AWSUserID,
#[strum(serialize = "aws:username")]
AWSUsername,
#[strum(serialize = "aws:groups")]
AWSGroups,
}
#[cfg(test)]
mod tests {
use super::*;
use serde::Deserialize;
use test_case::test_case;
#[test_case("s3:x-amz-copy-source", KeyName::S3(S3KeyName::S3XAmzCopySource))]
#[test_case("aws:SecureTransport", KeyName::Aws(AwsKeyName::AWSSecureTransport))]
#[test_case("jwt:sub", KeyName::Jwt(JwtKeyName::JWTSub))]
#[test_case("ldap:user", KeyName::Ldap(LdapKeyName::User))]
#[test_case("sts:DurationSeconds", KeyName::Sts(StsKeyName::STSDurationSeconds))]
#[test_case("svc:DurationSeconds", KeyName::Svc(SvcKeyName::SVCDurationSeconds))]
fn key_name_from_str_successful(val: &str, except: KeyName) {
let key_name = KeyName::try_from(val);
assert_eq!(key_name, Ok(except));
}
#[test_case("S3:x-amz-copy-source")]
#[test_case("aWs:SecureTransport")]
#[test_case("jwt:suB")]
#[test_case("ldap:us")]
#[test_case("DurationSeconds")]
fn key_name_from_str_failed(val: &str) {
assert_eq!(KeyName::try_from(val), Err(InvalidKeyName(val.to_string())));
}
#[test_case("s3:x-amz-copy-source", KeyName::S3(S3KeyName::S3XAmzCopySource))]
#[test_case("aws:SecureTransport", KeyName::Aws(AwsKeyName::AWSSecureTransport))]
#[test_case("jwt:sub", KeyName::Jwt(JwtKeyName::JWTSub))]
#[test_case("ldap:user", KeyName::Ldap(LdapKeyName::User))]
#[test_case("sts:DurationSeconds", KeyName::Sts(StsKeyName::STSDurationSeconds))]
#[test_case("svc:DurationSeconds", KeyName::Svc(SvcKeyName::SVCDurationSeconds))]
fn key_name_deserialize(val: &str, except: KeyName) {
#[derive(Deserialize)]
struct TestCase {
data: KeyName,
}
let data = format!("{{\"data\":\"{val}\"}}");
let data: TestCase = serde_json::from_str(data.as_str()).expect("unmarshal failed");
assert_eq!(data.data, except);
}
#[test_case("s3:x-amz-copy-source", KeyName::S3(S3KeyName::S3XAmzCopySource))]
#[test_case("aws:SecureTransport", KeyName::Aws(AwsKeyName::AWSSecureTransport))]
#[test_case("jwt:sub", KeyName::Jwt(JwtKeyName::JWTSub))]
#[test_case("ldap:user", KeyName::Ldap(LdapKeyName::User))]
#[test_case("sts:DurationSeconds", KeyName::Sts(StsKeyName::STSDurationSeconds))]
#[test_case("svc:DurationSeconds", KeyName::Svc(SvcKeyName::SVCDurationSeconds))]
fn key_name_serialize(except: &str, value: KeyName) {
#[derive(Serialize)]
struct TestCase {
data: KeyName,
}
let except = format!("{{\"data\":\"{except}\"}}");
let data = serde_json::to_string(&TestCase { data: value }).expect("marshal failed");
assert_eq!(data, except);
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/src/policy/function/bool_null.rs | crates/policy/src/policy/function/bool_null.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::func::InnerFunc;
use serde::de::{Error, IgnoredAny, SeqAccess};
use serde::{Deserialize, Deserializer, Serialize, de};
use std::{collections::HashMap, fmt};
pub type BoolFunc = InnerFunc<BoolFuncValue>;
impl BoolFunc {
pub fn evaluate_bool(&self, values: &HashMap<String, Vec<String>>) -> bool {
for inner in self.0.iter() {
if !match values.get(inner.key.name().as_str()).and_then(|x| x.first()) {
Some(x) => inner.values.0.to_string().as_str() == x,
None => false,
} {
return false;
}
}
true
}
pub fn evaluate_null(&self, values: &HashMap<String, Vec<String>>) -> bool {
for inner in self.0.iter() {
let len = values.get(inner.key.name().as_str()).map(Vec::len).unwrap_or(0);
let r = if inner.values.0 { len == 0 } else { len != 0 };
if !r {
return false;
}
}
true
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct BoolFuncValue(bool);
impl Serialize for BoolFuncValue {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.0.to_string())
}
}
impl<'de> Deserialize<'de> for BoolFuncValue {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct BoolOrStringVisitor;
impl<'de> de::Visitor<'de> for BoolOrStringVisitor {
type Value = BoolFuncValue;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a boolean or a string representing 'true' or 'false'")
}
fn visit_bool<E>(self, value: bool) -> Result<Self::Value, E>
where
E: Error,
{
Ok(BoolFuncValue(value))
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: Error,
{
Ok(BoolFuncValue(value.parse::<bool>().map_err(|e| E::custom(format!("{e:?}")))?))
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
let Some(v) = seq.next_element::<BoolFuncValue>()? else {
return Err(Error::custom("no value for boolean"));
};
if seq.next_element::<IgnoredAny>()?.is_some() {
return Err(Error::custom("only allow one boolean value"));
}
Ok(v)
}
}
deserializer.deserialize_any(BoolOrStringVisitor)
}
}
#[cfg(test)]
mod tests {
use super::{BoolFunc, BoolFuncValue};
use crate::policy::function::func::FuncKeyValue;
use crate::policy::function::{
key::Key,
key_name::AwsKeyName::*,
key_name::KeyName::{self, *},
};
use test_case::test_case;
fn new_func(name: KeyName, variable: Option<String>, value: bool) -> BoolFunc {
BoolFunc {
0: vec![FuncKeyValue {
key: Key { name, variable },
values: BoolFuncValue(value),
}],
}
}
#[test_case(r#"{"aws:SecureTransport": "true"}"#, new_func(Aws(AWSSecureTransport), None, true); "1")]
#[test_case(r#"{"aws:SecureTransport": "false"}"#, new_func(Aws(AWSSecureTransport), None, false); "2")]
#[test_case(r#"{"aws:SecureTransport": true}"#, new_func(Aws(AWSSecureTransport), None, true); "3")]
#[test_case(r#"{"aws:SecureTransport": false}"#, new_func(Aws(AWSSecureTransport), None, false); "4")]
#[test_case(r#"{"aws:SecureTransport/a": "true"}"#, new_func(Aws(AWSSecureTransport), Some("a".into()), true); "9")]
#[test_case(r#"{"aws:SecureTransport/a": "false"}"#, new_func(Aws(AWSSecureTransport), Some("a".into()), false); "10")]
#[test_case(r#"{"aws:SecureTransport/a": true}"#, new_func(Aws(AWSSecureTransport), Some("a".into()), true); "11")]
#[test_case(r#"{"aws:SecureTransport/a": false}"#, new_func(Aws(AWSSecureTransport), Some("a".into()), false); "12")]
#[test_case(r#"{"aws:SecureTransport/a": [true]}"#, new_func(Aws(AWSSecureTransport), Some("a".into()), true); "13")]
#[test_case(r#"{"aws:SecureTransport/a": ["false"]}"#, new_func(Aws(AWSSecureTransport), Some("a".into()), false); "14")]
fn test_deser(input: &str, expect: BoolFunc) -> Result<(), serde_json::Error> {
let v: BoolFunc = serde_json::from_str(input)?;
assert_eq!(v, expect);
Ok(())
}
#[test_case(r#"{"aws:usernamea":"johndoe"}"#)]
#[test_case(r#"{"aws:username":[]}"#)] // Empty
#[test_case(r#"{"aws:usernamea/value":"johndoe"}"#)]
#[test_case(r#"{"aws:usernamea/value":["johndoe", "aaa"]}"#)]
#[test_case(r#""aaa""#)]
#[test_case(r#"{"aws:SecureTransport/a": ["false", "true"]}"#)]
#[test_case(r#"{"aws:SecureTransport/a": [true, false]}"#)]
#[test_case(r#"{"aws:SecureTransport/a": ["aa"]}"#)]
fn test_deser_failed(input: &str) {
assert!(serde_json::from_str::<BoolFunc>(input).is_err());
}
#[test_case(r#"{"aws:SecureTransport":"true"}"#, new_func(Aws(AWSSecureTransport), None, true); "1")]
#[test_case(r#"{"aws:SecureTransport":"false"}"#, new_func(Aws(AWSSecureTransport), None, false);"2")]
#[test_case(r#"{"aws:SecureTransport/aa":"true"}"#, new_func(Aws(AWSSecureTransport),Some("aa".into()), true);"3")]
#[test_case(r#"{"aws:SecureTransport/aa":"false"}"#, new_func(Aws(AWSSecureTransport), Some("aa".into()), false);"4")]
# [test_case(r#"{"aws:SecureTransport/aa":"false"}"#, new_func(Aws(AWSSecureTransport), Some("aa".into()), false); "5")]
fn test_ser(expect: &str, input: BoolFunc) -> Result<(), serde_json::Error> {
let v = serde_json::to_string(&input)?;
assert_eq!(v.as_str(), expect);
Ok(())
}
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
rustfs/rustfs | https://github.com/rustfs/rustfs/blob/666c0a9a38636eb6653dff7d9c98ff7122601ce2/crates/policy/tests/policy_is_allowed.rs | crates/policy/tests/policy_is_allowed.rs | // Copyright 2024 RustFS Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use rustfs_policy::policy::Effect::*;
use rustfs_policy::policy::action::S3Action::*;
use rustfs_policy::policy::*;
use serde_json::Value;
use std::collections::HashMap;
use test_case::test_case;
#[derive(Default)]
struct ArgsBuilder {
pub account: String,
pub groups: Vec<String>,
pub action: String,
pub bucket: String,
pub conditions: HashMap<String, Vec<String>>,
pub is_owner: bool,
pub object: String,
pub claims: HashMap<String, Value>,
pub deny_only: bool,
}
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
rustfs_policy::policy::Statement{
effect: Allow,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(PutObjectAction), rustfs_policy::policy::action::Action::S3Action(GetBucketLocationAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::*".try_into().unwrap()].into_iter().collect()),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:GetBucketLocation".into(),
bucket: "mybucket".into(),
..Default::default()
} => true;
"1"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Allow,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(PutObjectAction), rustfs_policy::policy::action::Action::S3Action(GetBucketLocationAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::*".try_into().unwrap()].into_iter().collect()),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:PutObject".into(),
bucket: "mybucket".into(),
conditions: {
let mut h = HashMap::new();
h.insert("x-amz-copy-source".into(), vec!["mybucket/myobject".into()]);
h.insert("SourceIp".into(), vec!["192.168.1.10".into()]);
h
},
..Default::default()
} => true;
"2"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Allow,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(PutObjectAction), rustfs_policy::policy::action::Action::S3Action(GetBucketLocationAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::*".try_into().unwrap()].into_iter().collect()),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:GetObject".into(),
bucket: "mybucket".into(),
object: "myobject".into(),
..Default::default()
} => false;
"3"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Allow,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(PutObjectAction), rustfs_policy::policy::action::Action::S3Action(GetBucketLocationAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::*".try_into().unwrap()].into_iter().collect()),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:GetBucketLocation".into(),
bucket: "mybucket".into(),
..Default::default()
} => true;
"4"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Allow,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(PutObjectAction), rustfs_policy::policy::action::Action::S3Action(GetBucketLocationAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::*".try_into().unwrap()].into_iter().collect()),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:PutObject".into(),
bucket: "mybucket".into(),
object: "myobject".into(),
conditions: {
let mut h = HashMap::new();
h.insert("x-amz-copy-source".into(), vec!["mybucket/myobject".into()]);
h.insert("SourceIp".into(), vec!["192.168.1.10".into()]);
h
},
..Default::default()
} => true;
"5"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Allow,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(PutObjectAction), rustfs_policy::policy::action::Action::S3Action(GetBucketLocationAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::*".try_into().unwrap()].into_iter().collect()),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:GetObject".into(),
bucket: "mybucket".into(),
object: "myobject".into(),
..Default::default()
} => false;
"6"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Allow,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(GetObjectAction), rustfs_policy::policy::action::Action::S3Action(PutObjectAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::mybucket/myobject*".try_into().unwrap()].into_iter().collect()),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:GetBucketLocation".into(),
bucket: "mybucket".into(),
..Default::default()
} => false;
"7"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Allow,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(GetObjectAction), rustfs_policy::policy::action::Action::S3Action(PutObjectAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::mybucket/myobject*".try_into().unwrap()].into_iter().collect()),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:PutObject".into(),
bucket: "mybucket".into(),
object: "myobject".into(),
conditions: {
let mut h = HashMap::new();
h.insert("x-amz-copy-source".into(), vec!["mybucket/myobject".into()]);
h.insert("SourceIp".into(), vec!["192.168.1.10".into()]);
h
},
..Default::default()
} => true;
"8"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Allow,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(GetObjectAction), rustfs_policy::policy::action::Action::S3Action(PutObjectAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::mybucket/myobject*".try_into().unwrap()].into_iter().collect()),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:GetObject".into(),
bucket: "mybucket".into(),
object: "myobject".into(),
..Default::default()
} => true;
"9"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Allow,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(GetObjectAction), rustfs_policy::policy::action::Action::S3Action(PutObjectAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::mybucket/myobject*".try_into().unwrap()].into_iter().collect()),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:GetBucketLocation".into(),
bucket: "mybucket".into(),
..Default::default()
} => false;
"10"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Allow,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(GetObjectAction), rustfs_policy::policy::action::Action::S3Action(PutObjectAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::mybucket/myobject*".try_into().unwrap()].into_iter().collect()),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:PutObject".into(),
bucket: "mybucket".into(),
object: "myobject".into(),
conditions: {
let mut h = HashMap::new();
h.insert("x-amz-copy-source".into(), vec!["mybucket/myobject".into()]);
h.insert("SourceIp".into(), vec!["192.168.1.10".into()]);
h
},
..Default::default()
} => true;
"11"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Allow,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(GetObjectAction), rustfs_policy::policy::action::Action::S3Action(PutObjectAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::mybucket/myobject*".try_into().unwrap()].into_iter().collect()),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:GetObject".into(),
bucket: "mybucket".into(),
object: "myobject".into(),
..Default::default()
} => true;
"12"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Allow,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(GetObjectAction), rustfs_policy::policy::action::Action::S3Action(PutObjectAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::mybucket/myobject*".try_into().unwrap()].into_iter().collect()),
conditions: serde_json::from_str(r#"{"IpAddress": {"aws:SourceIp": "192.168.1.0/24"}}"#).unwrap(),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:GetBucketLocation".into(),
bucket: "mybucket".into(),
..Default::default()
} => false;
"13"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Allow,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(GetObjectAction), rustfs_policy::policy::action::Action::S3Action(PutObjectAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::mybucket/myobject*".try_into().unwrap()].into_iter().collect()),
conditions: serde_json::from_str(r#"{"IpAddress": {"aws:SourceIp": "192.168.1.0/24"}}"#).unwrap(),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:PutObject".into(),
bucket: "mybucket".into(),
object: "myobject".into(),
conditions: {
let mut h = HashMap::new();
h.insert("x-amz-copy-source".into(), vec!["mybucket/myobject".into()]);
h.insert("SourceIp".into(), vec!["192.168.1.10".into()]);
h
},
..Default::default()
} => true;
"14"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Allow,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(GetObjectAction), rustfs_policy::policy::action::Action::S3Action(PutObjectAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::mybucket/myobject*".try_into().unwrap()].into_iter().collect()),
conditions: serde_json::from_str(r#"{"IpAddress": {"aws:SourceIp": "192.168.1.0/24"}}"#).unwrap(),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:GetObject".into(),
bucket: "mybucket".into(),
object: "myobject".into(),
..Default::default()
} => false;
"15"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Allow,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(GetObjectAction), rustfs_policy::policy::action::Action::S3Action(PutObjectAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::mybucket/myobject*".try_into().unwrap()].into_iter().collect()),
conditions: serde_json::from_str(r#"{"IpAddress": {"aws:SourceIp": "192.168.1.0/24"}}"#).unwrap(),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:GetBucketLocation".into(),
bucket: "mybucket".into(),
..Default::default()
} => false;
"16"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Allow,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(GetObjectAction), rustfs_policy::policy::action::Action::S3Action(PutObjectAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::mybucket/myobject*".try_into().unwrap()].into_iter().collect()),
conditions: serde_json::from_str(r#"{"IpAddress": {"aws:SourceIp": "192.168.1.0/24"}}"#).unwrap(),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:PutObject".into(),
bucket: "mybucket".into(),
object: "myobject".into(),
conditions: {
let mut h = HashMap::new();
h.insert("x-amz-copy-source".into(), vec!["mybucket/myobject".into()]);
h.insert("SourceIp".into(), vec!["192.168.1.10".into()]);
h
},
..Default::default()
} => true;
"17"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Allow,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(GetObjectAction), rustfs_policy::policy::action::Action::S3Action(PutObjectAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::mybucket/myobject*".try_into().unwrap()].into_iter().collect()),
conditions: serde_json::from_str(r#"{"IpAddress": {"aws:SourceIp": "192.168.1.0/24"}}"#).unwrap(),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:GetObject".into(),
bucket: "mybucket".into(),
object: "myobject".into(),
..Default::default()
} => false;
"18"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Deny,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(GetObjectAction), rustfs_policy::policy::action::Action::S3Action(PutObjectAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::mybucket/myobject*".try_into().unwrap()].into_iter().collect()),
conditions: serde_json::from_str(r#"{"IpAddress": {"aws:SourceIp": "192.168.1.0/24"}}"#).unwrap(),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:GetBucketLocation".into(),
bucket: "mybucket".into(),
..Default::default()
} => false;
"19"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Deny,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(GetObjectAction), rustfs_policy::policy::action::Action::S3Action(PutObjectAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::mybucket/myobject*".try_into().unwrap()].into_iter().collect()),
conditions: serde_json::from_str(r#"{"IpAddress": {"aws:SourceIp": "192.168.1.0/24"}}"#).unwrap(),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:PutObject".into(),
bucket: "mybucket".into(),
object: "myobject".into(),
conditions: {
let mut h = HashMap::new();
h.insert("x-amz-copy-source".into(), vec!["mybucket/myobject".into()]);
h.insert("SourceIp".into(), vec!["192.168.1.10".into()]);
h
},
..Default::default()
} => false;
"20"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Deny,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(GetObjectAction), rustfs_policy::policy::action::Action::S3Action(PutObjectAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::mybucket/myobject*".try_into().unwrap()].into_iter().collect()),
conditions: serde_json::from_str(r#"{"IpAddress": {"aws:SourceIp": "192.168.1.0/24"}}"#).unwrap(),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:GetObject".into(),
bucket: "mybucket".into(),
object: "myobject".into(),
..Default::default()
} => false;
"21"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Deny,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(GetObjectAction), rustfs_policy::policy::action::Action::S3Action(PutObjectAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::mybucket/myobject*".try_into().unwrap()].into_iter().collect()),
conditions: serde_json::from_str(r#"{"IpAddress": {"aws:SourceIp": "192.168.1.0/24"}}"#).unwrap(),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:GetBucketLocation".into(),
bucket: "mybucket".into(),
..Default::default()
} => false;
"22"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Deny,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(GetObjectAction), rustfs_policy::policy::action::Action::S3Action(PutObjectAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::mybucket/myobject*".try_into().unwrap()].into_iter().collect()),
conditions: serde_json::from_str(r#"{"IpAddress": {"aws:SourceIp": "192.168.1.0/24"}}"#).unwrap(),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:PutObject".into(),
bucket: "mybucket".into(),
object: "myobject".into(),
conditions: {
let mut h = HashMap::new();
h.insert("x-amz-copy-source".into(), vec!["mybucket/myobject".into()]);
h.insert("SourceIp".into(), vec!["192.168.1.10".into()]);
h
},
..Default::default()
} => false;
"23"
)]
#[test_case(
Policy{
version: DEFAULT_VERSION.into(),
statements: vec![
Statement{
effect: Deny,
actions: ActionSet(vec![rustfs_policy::policy::action::Action::S3Action(GetObjectAction), rustfs_policy::policy::action::Action::S3Action(PutObjectAction)].into_iter().collect()),
resources: ResourceSet(vec!["arn:aws:s3:::mybucket/myobject*".try_into().unwrap()].into_iter().collect()),
conditions: serde_json::from_str(r#"{"IpAddress": {"aws:SourceIp": "192.168.1.0/24"}}"#).unwrap(),
..Default::default()
}
],
..Default::default()
},
ArgsBuilder{
account: "Q3AM3UQ867SPQQA43P2F".into(),
action: "s3:GetObject".into(),
bucket: "mybucket".into(),
object: "myobject".into(),
..Default::default()
} => false;
"24"
)]
fn policy_is_allowed(policy: Policy, args: ArgsBuilder) -> bool {
pollster::block_on(policy.is_allowed(&Args {
account: &args.account,
groups: &{
if args.groups.is_empty() {
None
} else {
Some(args.groups.clone())
}
},
action: args.action.as_str().try_into().unwrap(),
bucket: &args.bucket,
conditions: &args.conditions,
is_owner: args.is_owner,
object: &args.object,
claims: &args.claims,
deny_only: args.deny_only,
}))
}
| rust | Apache-2.0 | 666c0a9a38636eb6653dff7d9c98ff7122601ce2 | 2026-01-04T15:42:12.458416Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/gleam-bin/build.rs | gleam-bin/build.rs | fn main() {
#[cfg(windows)]
static_vcruntime::metabuild();
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/gleam-bin/src/main.rs | gleam-bin/src/main.rs | pub fn main() {
gleam_cli::main();
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/reference.rs | language-server/src/reference.rs | use std::collections::{HashMap, HashSet};
use ecow::EcoString;
use lsp_types::Location;
use gleam_core::{
analyse,
ast::{
self, ArgNames, BitArraySize, CustomType, Function, ModuleConstant, Pattern,
RecordConstructor, SrcSpan, TypedExpr, TypedModule, visit::Visit,
},
build::Located,
type_::{
ModuleInterface, ModuleValueConstructor, Type, ValueConstructor, ValueConstructorVariant,
error::{Named, VariableOrigin},
},
};
use super::{
compiler::ModuleSourceInformation, rename::RenameTarget, src_span_to_lsp_range, url_from_path,
};
pub enum Referenced {
LocalVariable {
definition_location: SrcSpan,
location: SrcSpan,
origin: Option<VariableOrigin>,
name: EcoString,
},
ModuleValue {
module: EcoString,
name: EcoString,
location: SrcSpan,
name_kind: Named,
target_kind: RenameTarget,
},
ModuleType {
module: EcoString,
name: EcoString,
location: SrcSpan,
target_kind: RenameTarget,
},
}
pub fn reference_for_ast_node(
found: Located<'_>,
current_module: &EcoString,
) -> Option<Referenced> {
match found {
Located::Expression {
expression:
TypedExpr::Var {
constructor:
ValueConstructor {
variant:
ValueConstructorVariant::LocalVariable {
location: definition_location,
origin,
},
..
},
location,
name,
},
..
} => Some(Referenced::LocalVariable {
definition_location: *definition_location,
location: *location,
origin: Some(origin.clone()),
name: name.clone(),
}),
Located::Pattern(Pattern::Variable {
location,
origin,
name,
..
}) => Some(Referenced::LocalVariable {
definition_location: *location,
location: *location,
origin: Some(origin.clone()),
name: name.clone(),
}),
Located::Pattern(Pattern::BitArraySize(BitArraySize::Variable {
constructor,
location,
name,
..
})) => constructor
.as_ref()
.and_then(|constructor| match &constructor.variant {
ValueConstructorVariant::LocalVariable {
location: definition_location,
origin,
} => Some(Referenced::LocalVariable {
definition_location: *definition_location,
location: *location,
origin: Some(origin.clone()),
name: name.clone(),
}),
ValueConstructorVariant::ModuleConstant { .. }
| ValueConstructorVariant::ModuleFn { .. }
| ValueConstructorVariant::Record { .. } => None,
}),
Located::Pattern(Pattern::Assign { location, name, .. }) => {
Some(Referenced::LocalVariable {
definition_location: *location,
location: *location,
origin: None,
name: name.clone(),
})
}
Located::Arg(arg) => match &arg.names {
ArgNames::Named { location, name }
| ArgNames::NamedLabelled {
name_location: location,
name,
..
} => Some(Referenced::LocalVariable {
definition_location: *location,
location: *location,
origin: None,
name: name.clone(),
}),
ArgNames::Discard { .. } | ArgNames::LabelledDiscard { .. } => None,
},
Located::Expression {
expression:
TypedExpr::Var {
constructor:
ValueConstructor {
variant:
ValueConstructorVariant::ModuleConstant { module, .. }
| ValueConstructorVariant::ModuleFn { module, .. },
..
},
name,
location,
..
},
..
} => Some(Referenced::ModuleValue {
module: module.clone(),
name: name.clone(),
location: *location,
name_kind: Named::Function,
target_kind: RenameTarget::Unqualified,
}),
Located::Expression {
expression:
TypedExpr::ModuleSelect {
module_name,
label,
constructor:
ModuleValueConstructor::Fn { .. } | ModuleValueConstructor::Constant { .. },
location,
field_start,
..
},
..
} => Some(Referenced::ModuleValue {
module: module_name.clone(),
name: label.clone(),
location: SrcSpan::new(*field_start, location.end),
name_kind: Named::Function,
target_kind: RenameTarget::Qualified,
}),
Located::ModuleFunction(Function {
name: Some((location, name)),
..
})
| Located::ModuleConstant(ModuleConstant {
name,
name_location: location,
..
}) => Some(Referenced::ModuleValue {
module: current_module.clone(),
name: name.clone(),
location: *location,
name_kind: Named::Function,
target_kind: RenameTarget::Definition,
}),
Located::Expression {
expression:
TypedExpr::Var {
constructor:
ValueConstructor {
variant: ValueConstructorVariant::Record { module, name, .. },
..
},
location,
..
},
..
} => Some(Referenced::ModuleValue {
module: module.clone(),
name: name.clone(),
location: *location,
name_kind: Named::CustomTypeVariant,
target_kind: RenameTarget::Unqualified,
}),
Located::Expression {
expression:
TypedExpr::ModuleSelect {
module_name,
label,
constructor: ModuleValueConstructor::Record { .. },
location,
field_start,
..
},
..
} => Some(Referenced::ModuleValue {
module: module_name.clone(),
name: label.clone(),
location: SrcSpan::new(*field_start, location.end),
name_kind: Named::CustomTypeVariant,
target_kind: RenameTarget::Qualified,
}),
Located::VariantConstructorDefinition(RecordConstructor {
name,
name_location,
..
}) => Some(Referenced::ModuleValue {
module: current_module.clone(),
name: name.clone(),
location: *name_location,
name_kind: Named::CustomTypeVariant,
target_kind: RenameTarget::Definition,
}),
Located::Pattern(Pattern::Constructor {
constructor: analyse::Inferred::Known(constructor),
module: module_select,
name_location: location,
..
}) => Some(Referenced::ModuleValue {
module: constructor.module.clone(),
name: constructor.name.clone(),
location: *location,
name_kind: Named::CustomTypeVariant,
target_kind: if module_select.is_some() {
RenameTarget::Qualified
} else {
RenameTarget::Unqualified
},
}),
Located::Annotation { ast, type_ } => match type_.named_type_name() {
Some((module, name)) => {
let (target_kind, location) = match ast {
ast::TypeAst::Constructor(constructor) => {
let kind = if constructor.module.is_some() {
RenameTarget::Qualified
} else {
RenameTarget::Unqualified
};
(kind, constructor.name_location)
}
ast::TypeAst::Fn(_)
| ast::TypeAst::Var(_)
| ast::TypeAst::Tuple(_)
| ast::TypeAst::Hole(_) => (RenameTarget::Unqualified, ast.location()),
};
Some(Referenced::ModuleType {
module,
name,
location,
target_kind,
})
}
None => None,
},
Located::ModuleCustomType(CustomType {
name,
name_location,
..
}) => Some(Referenced::ModuleType {
module: current_module.clone(),
name: name.clone(),
location: *name_location,
target_kind: RenameTarget::Definition,
}),
Located::Pattern(_)
| Located::PatternSpread { .. }
| Located::Statement(_)
| Located::Expression { .. }
| Located::FunctionBody(_)
| Located::UnqualifiedImport(_)
| Located::Label(..)
| Located::ModuleName { .. }
| Located::Constant(_)
| Located::ModuleFunction(_)
| Located::ModuleImport(_)
| Located::ModuleTypeAlias(_) => None,
}
}
pub fn find_module_references(
module_name: EcoString,
name: EcoString,
modules: &im::HashMap<EcoString, ModuleInterface>,
sources: &HashMap<EcoString, ModuleSourceInformation>,
layer: ast::Layer,
) -> Vec<Location> {
let mut reference_locations = Vec::new();
for module in modules.values() {
if module.name == module_name || module.references.imported_modules.contains(&module_name) {
let Some(source_information) = sources.get(&module.name) else {
continue;
};
find_references_in_module(
&module_name,
&name,
module,
source_information,
&mut reference_locations,
layer,
);
}
}
reference_locations
}
fn find_references_in_module(
module_name: &EcoString,
name: &EcoString,
module: &ModuleInterface,
source_information: &ModuleSourceInformation,
reference_locations: &mut Vec<Location>,
layer: ast::Layer,
) {
let reference_map = match layer {
ast::Layer::Value => &module.references.value_references,
ast::Layer::Type => &module.references.type_references,
};
let Some(references) = reference_map.get(&(module_name.clone(), name.clone())) else {
return;
};
let Some(uri) = url_from_path(source_information.path.as_str()) else {
return;
};
for reference in references {
reference_locations.push(Location {
uri: uri.clone(),
range: src_span_to_lsp_range(reference.location, &source_information.line_numbers),
});
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct VariableReference {
pub location: SrcSpan,
pub kind: VariableReferenceKind,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum VariableReferenceKind {
Variable,
LabelShorthand,
}
/// How to treat variables defined in alternative patterns
enum AlternativeVariable {
Track,
Ignore,
}
pub struct FindVariableReferences {
// Due to the structure of some AST nodes (for example, record updates),
// when we traverse the AST it is possible to accidentally duplicate references.
// To avoid this, we use a `HashSet` instead of a `Vec` here.
// See: https://github.com/gleam-lang/gleam/issues/4859 and the linked PR.
references: HashSet<VariableReference>,
definition_location: DefinitionLocation,
alternative_variable: AlternativeVariable,
name: EcoString,
}
/// Where the variable we're finding references for is defined.
///
enum DefinitionLocation {
/// This is the location where the variable is defined, nothing special is
/// going on here. For example:
///
/// ```gleam
/// let wibble = 1
/// // ^^^^^^ Definition location for `wibble`
/// wibble + 1
/// ^^^^^^
/// // `wibble` used here, defined earlier
/// ```
///
Regular { location: SrcSpan },
/// When dealing with alternative patterns and aliases we need special care:
/// each usage wil always reference the first alternative where a variable
/// is defined and not the following ones. For example:
///
/// ```gleam
/// case wibble {
/// [] as var | [_] as var -> var
/// // ^^^ ^^^ If we look where `var` thinks it's defined
/// // It will say it's defined here!
/// }
/// ```
///
/// This poses a problem if we start the renaming from the second
/// alternative pattern:
///
/// ```gleam
/// case wibble {
/// [] as var | [_] as var -> var
/// // ^^^ Since `var` uses the first alternative as its
/// // definition location, this would not be considered
/// // a reference to that same var.
/// }
/// ```
///
/// So we keep track of the location of this definition, but we also need
/// to store the location of the first definition in the alternative case
/// (that's `first_alternative_location`), so that when we look for
/// references we can check against this one that is canonically used by
/// expressions in the AST
///
Alternative {
location: SrcSpan,
first_alternative_location: SrcSpan,
},
}
impl FindVariableReferences {
pub fn new(variable_definition_location: SrcSpan, variable_name: EcoString) -> Self {
Self {
references: HashSet::new(),
definition_location: DefinitionLocation::Regular {
location: variable_definition_location,
},
alternative_variable: AlternativeVariable::Ignore,
name: variable_name,
}
}
/// Where the definition for which we're accumulating references is
/// originally defined. In case of alternative patterns this will point to
/// the first occurrence of that name! Look at the docs for
/// `DefinitionLocation` to learn more on why this is needed.
///
fn definition_origin_location(&self) -> SrcSpan {
match self.definition_location {
DefinitionLocation::Regular { location }
| DefinitionLocation::Alternative {
first_alternative_location: location,
..
} => location,
}
}
/// This is the location of the definition for which we're accumulating
/// references. In most cases you'll want to use `definition_origin_location`.
/// The difference between the two is explained in greater detail in the docs
/// for `DefinitionLocation`.
///
fn definition_location(&self) -> SrcSpan {
match self.definition_location {
DefinitionLocation::Regular { location }
| DefinitionLocation::Alternative { location, .. } => location,
}
}
fn update_alternative_origin(&mut self, alternative_location: SrcSpan) {
match self.definition_location {
// We've found the location of the origin of an alternative pattern.
DefinitionLocation::Regular { location } if alternative_location < location => {
self.definition_location = DefinitionLocation::Alternative {
location,
first_alternative_location: alternative_location,
};
}
// Since the new alternative location we've found is smaller, that
// is the actual first one for the alternative pattern!
DefinitionLocation::Alternative {
location,
first_alternative_location,
} if alternative_location < first_alternative_location => {
self.definition_location = DefinitionLocation::Alternative {
location,
first_alternative_location: alternative_location,
};
}
DefinitionLocation::Regular { .. } | DefinitionLocation::Alternative { .. } => (),
};
}
pub fn find_in_module(mut self, module: &TypedModule) -> HashSet<VariableReference> {
self.visit_typed_module(module);
self.references
}
pub fn find(mut self, expression: &TypedExpr) -> HashSet<VariableReference> {
self.visit_typed_expr(expression);
self.references
}
}
impl<'ast> Visit<'ast> for FindVariableReferences {
fn visit_typed_function(&mut self, fun: &'ast ast::TypedFunction) {
if fun
.full_location()
.contains(self.definition_origin_location().start)
{
ast::visit::visit_typed_function(self, fun);
}
}
fn visit_typed_expr_var(
&mut self,
location: &'ast SrcSpan,
constructor: &'ast ValueConstructor,
_name: &'ast EcoString,
) {
match constructor.variant {
ValueConstructorVariant::LocalVariable {
location: definition_location,
..
} if definition_location == self.definition_origin_location() => {
_ = self.references.insert(VariableReference {
location: *location,
kind: VariableReferenceKind::Variable,
});
}
ValueConstructorVariant::LocalVariable { .. }
| ValueConstructorVariant::ModuleConstant { .. }
| ValueConstructorVariant::ModuleFn { .. }
| ValueConstructorVariant::Record { .. } => {}
}
}
fn visit_typed_clause_guard_var(
&mut self,
location: &'ast SrcSpan,
_name: &'ast EcoString,
_type_: &'ast std::sync::Arc<Type>,
definition_location: &'ast SrcSpan,
) {
if *definition_location == self.definition_origin_location() {
_ = self.references.insert(VariableReference {
location: *location,
kind: VariableReferenceKind::Variable,
});
}
}
fn visit_typed_clause(&mut self, clause: &'ast ast::TypedClause) {
// If this alternative pattern contains the variable we are finding
// references for, we track that so we can find alternative definitions
// of the target variable.
if clause
.pattern_location()
.contains(self.definition_origin_location().start)
{
self.alternative_variable = AlternativeVariable::Track;
}
for pattern in clause.pattern.iter() {
self.visit_typed_pattern(pattern);
}
for patterns in clause.alternative_patterns.iter() {
for pattern in patterns {
self.visit_typed_pattern(pattern);
}
}
self.alternative_variable = AlternativeVariable::Ignore;
if let Some(guard) = &clause.guard {
self.visit_typed_clause_guard(guard);
}
self.visit_typed_expr(&clause.then);
}
fn visit_typed_pattern_variable(
&mut self,
location: &'ast SrcSpan,
name: &'ast EcoString,
_type_: &'ast std::sync::Arc<Type>,
_origin: &'ast VariableOrigin,
) {
match self.alternative_variable {
// If we are inside the same alternative pattern as the target
// variable and the name is the same, this is an alternative definition
// of the same variable. We don't register the reference if this is
// the exact variable though, as that would result in a duplicated
// reference.
AlternativeVariable::Track
if *name == self.name && *location != self.definition_location() =>
{
self.update_alternative_origin(*location);
_ = self.references.insert(VariableReference {
location: *location,
kind: VariableReferenceKind::Variable,
});
}
AlternativeVariable::Track | AlternativeVariable::Ignore => {}
}
}
fn visit_typed_pattern_assign(
&mut self,
location: &'ast SrcSpan,
name: &'ast EcoString,
pattern: &'ast ast::TypedPattern,
) {
match self.alternative_variable {
// If we are inside the same alternative pattern as the target
// variable and the name is the same, this is an alternative definition
// of the same variable. We don't register the reference if this is
// the exact variable though, as that would result in a duplicated
// reference.
AlternativeVariable::Track
if *name == self.name && *location != self.definition_location() =>
{
self.update_alternative_origin(*location);
_ = self.references.insert(VariableReference {
location: *location,
kind: VariableReferenceKind::Variable,
});
}
AlternativeVariable::Track | AlternativeVariable::Ignore => {}
}
ast::visit::visit_typed_pattern_assign(self, location, name, pattern);
}
fn visit_typed_bit_array_size_variable(
&mut self,
location: &'ast SrcSpan,
_name: &'ast EcoString,
constructor: &'ast Option<Box<ValueConstructor>>,
_type_: &'ast std::sync::Arc<Type>,
) {
let variant = match constructor {
Some(constructor) => &constructor.variant,
None => return,
};
match variant {
ValueConstructorVariant::LocalVariable {
location: definition_location,
..
} if *definition_location == self.definition_origin_location() => {
_ = self.references.insert(VariableReference {
location: *location,
kind: VariableReferenceKind::Variable,
});
}
ValueConstructorVariant::LocalVariable { .. }
| ValueConstructorVariant::ModuleConstant { .. }
| ValueConstructorVariant::ModuleFn { .. }
| ValueConstructorVariant::Record { .. } => {}
}
}
fn visit_typed_call_arg(&mut self, arg: &'ast gleam_core::type_::TypedCallArg) {
if let TypedExpr::Var {
location,
constructor,
..
} = &arg.value
{
match &constructor.variant {
ValueConstructorVariant::LocalVariable {
location: definition_location,
..
} if arg.uses_label_shorthand()
&& *definition_location == self.definition_origin_location() =>
{
_ = self.references.insert(VariableReference {
location: *location,
kind: VariableReferenceKind::LabelShorthand,
});
return;
}
ValueConstructorVariant::LocalVariable { .. }
| ValueConstructorVariant::ModuleConstant { .. }
| ValueConstructorVariant::ModuleFn { .. }
| ValueConstructorVariant::Record { .. } => {}
}
}
ast::visit::visit_typed_call_arg(self, arg);
}
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/messages.rs | language-server/src/messages.rs | use camino::Utf8PathBuf;
use lsp::{
notification::{DidChangeWatchedFiles, DidOpenTextDocument},
request::GotoDefinition,
};
use lsp_types::{
self as lsp,
notification::{DidChangeTextDocument, DidCloseTextDocument, DidSaveTextDocument},
request::{
CodeActionRequest, Completion, DocumentSymbolRequest, Formatting, GotoTypeDefinition,
HoverRequest, PrepareRenameRequest, References, Rename, SignatureHelpRequest,
},
};
use std::time::Duration;
#[derive(Debug)]
pub enum Message {
Request(lsp_server::RequestId, Request),
Notification(Notification),
}
#[derive(Debug)]
pub enum Request {
Format(lsp::DocumentFormattingParams),
Hover(lsp::HoverParams),
GoToDefinition(lsp::GotoDefinitionParams),
GoToTypeDefinition(lsp::GotoDefinitionParams),
Completion(lsp::CompletionParams),
CodeAction(lsp::CodeActionParams),
SignatureHelp(lsp::SignatureHelpParams),
DocumentSymbol(lsp::DocumentSymbolParams),
PrepareRename(lsp::TextDocumentPositionParams),
Rename(lsp::RenameParams),
FindReferences(lsp::ReferenceParams),
}
impl Request {
fn extract(request: lsp_server::Request) -> Option<Message> {
let id = request.id.clone();
match request.method.as_str() {
"textDocument/formatting" => {
let params = cast_request::<Formatting>(request);
Some(Message::Request(id, Request::Format(params)))
}
"textDocument/hover" => {
let params = cast_request::<HoverRequest>(request);
Some(Message::Request(id, Request::Hover(params)))
}
"textDocument/definition" => {
let params = cast_request::<GotoDefinition>(request);
Some(Message::Request(id, Request::GoToDefinition(params)))
}
"textDocument/completion" => {
let params = cast_request::<Completion>(request);
Some(Message::Request(id, Request::Completion(params)))
}
"textDocument/codeAction" => {
let params = cast_request::<CodeActionRequest>(request);
Some(Message::Request(id, Request::CodeAction(params)))
}
"textDocument/signatureHelp" => {
let params = cast_request::<SignatureHelpRequest>(request);
Some(Message::Request(id, Request::SignatureHelp(params)))
}
"textDocument/documentSymbol" => {
let params = cast_request::<DocumentSymbolRequest>(request);
Some(Message::Request(id, Request::DocumentSymbol(params)))
}
"textDocument/rename" => {
let params = cast_request::<Rename>(request);
Some(Message::Request(id, Request::Rename(params)))
}
"textDocument/prepareRename" => {
let params = cast_request::<PrepareRenameRequest>(request);
Some(Message::Request(id, Request::PrepareRename(params)))
}
"textDocument/typeDefinition" => {
let params = cast_request::<GotoTypeDefinition>(request);
Some(Message::Request(id, Request::GoToTypeDefinition(params)))
}
"textDocument/references" => {
let params = cast_request::<References>(request);
Some(Message::Request(id, Request::FindReferences(params)))
}
_ => None,
}
}
}
#[derive(Debug)]
pub enum Notification {
/// A Gleam file has been modified in memory, and the new text is provided.
SourceFileChangedInMemory { path: Utf8PathBuf, text: String },
/// A Gleam file has been saved or closed in the editor.
SourceFileMatchesDisc { path: Utf8PathBuf },
/// gleam.toml has changed.
ConfigFileChanged { path: Utf8PathBuf },
/// It's time to compile all open projects.
CompilePlease,
}
impl Notification {
fn extract(notification: lsp_server::Notification) -> Option<Message> {
match notification.method.as_str() {
"textDocument/didOpen" => {
let params = cast_notification::<DidOpenTextDocument>(notification);
let notification = Notification::SourceFileChangedInMemory {
path: super::path(¶ms.text_document.uri),
text: params.text_document.text,
};
Some(Message::Notification(notification))
}
"textDocument/didChange" => {
let params = cast_notification::<DidChangeTextDocument>(notification);
let notification = Notification::SourceFileChangedInMemory {
path: super::path(¶ms.text_document.uri),
text: params.content_changes.into_iter().next_back()?.text,
};
Some(Message::Notification(notification))
}
"textDocument/didSave" => {
let params = cast_notification::<DidSaveTextDocument>(notification);
let notification = Notification::SourceFileMatchesDisc {
path: super::path(¶ms.text_document.uri),
};
Some(Message::Notification(notification))
}
"textDocument/didClose" => {
let params = cast_notification::<DidCloseTextDocument>(notification);
let notification = Notification::SourceFileMatchesDisc {
path: super::path(¶ms.text_document.uri),
};
Some(Message::Notification(notification))
}
"workspace/didChangeWatchedFiles" => {
let params = cast_notification::<DidChangeWatchedFiles>(notification);
let notification = Notification::ConfigFileChanged {
path: super::path(¶ms.changes.into_iter().next_back()?.uri),
};
Some(Message::Notification(notification))
}
_ => None,
}
}
}
pub enum Next {
MorePlease,
Handle(Vec<Message>),
Stop,
}
/// The message buffer pulls messages from the client until one of the following
/// happens:
/// - A shutdown request is received.
/// - A short pause in messages is detected, indicating the programmer has
/// stopped typing for a moment and would benefit from feedback.
/// - A request type message is received, which requires an immediate response.
///
pub struct MessageBuffer {
messages: Vec<Message>,
}
impl MessageBuffer {
pub fn new() -> Self {
Self {
messages: Vec::new(),
}
}
pub fn receive(&mut self, conn: &lsp_server::Connection) -> Next {
let pause = Duration::from_millis(100);
// If the buffer is empty, wait indefinitely for the first message.
// If the buffer is not empty, wait for a short time to see if more messages are
// coming before processing the ones we have.
let message = if self.messages.is_empty() {
Some(conn.receiver.recv().expect("Receiving LSP message"))
} else {
conn.receiver.recv_timeout(pause).ok()
};
// If have have not received a message then it means there is a pause in the
// messages from the client, implying the programmer has stopped typing. Process
// the currently enqueued messages.
let message = match message {
Some(message) => message,
None => {
// A compile please message it added in the instance of this
// pause of activity so that the client gets feedback on the
// state of the code as it is now.
self.push_compile_please_message();
return Next::Handle(self.take_messages());
}
};
match message {
lsp_server::Message::Request(r) if self.shutdown(conn, &r) => Next::Stop,
lsp_server::Message::Request(r) => self.request(r),
lsp_server::Message::Response(r) => self.response(r),
lsp_server::Message::Notification(n) => self.notification(n),
}
}
fn request(&mut self, r: lsp_server::Request) -> Next {
let Some(message) = Request::extract(r) else {
return Next::MorePlease;
};
// Compile the code prior to attempting to process the response, to
// ensure that the response is based on the latest code.
self.push_compile_please_message();
self.messages.push(message);
Next::Handle(self.take_messages())
}
fn notification(&mut self, n: lsp_server::Notification) -> Next {
// A new notification telling us that an edit has been made, or
// something along those lines.
if let Some(message) = Notification::extract(n) {
self.messages.push(message);
}
// Ask for more messages (or a pause), at which point we'll start processing.
Next::MorePlease
}
fn response(&mut self, _: lsp_server::Response) -> Next {
// We do not use or expect responses from the client currently.
Next::MorePlease
}
/// Add a `CompilePlease` message which will prompt the engine to compile
/// the projects.
///
fn push_compile_please_message(&mut self) {
let message = Notification::CompilePlease;
let value = Message::Notification(message);
self.messages.push(value);
}
fn take_messages(&mut self) -> Vec<Message> {
std::mem::take(&mut self.messages)
}
fn shutdown(
&mut self,
connection: &lsp_server::Connection,
request: &lsp_server::Request,
) -> bool {
connection.handle_shutdown(request).expect("LSP shutdown")
}
}
fn cast_request<R>(request: lsp_server::Request) -> R::Params
where
R: lsp::request::Request,
R::Params: serde::de::DeserializeOwned,
{
let (_, params) = request.extract(R::METHOD).expect("cast request");
params
}
fn cast_notification<N>(notification: lsp_server::Notification) -> N::Params
where
N: lsp::notification::Notification,
N::Params: serde::de::DeserializeOwned,
{
notification
.extract::<N::Params>(N::METHOD)
.expect("cast notification")
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/router.rs | language-server/src/router.rs | use gleam_core::{
Error, Result,
build::SourceFingerprint,
error::{FileIoAction, FileKind},
io::{BeamCompiler, CommandExecutor, FileSystemReader, FileSystemWriter},
paths::ProjectPaths,
};
use std::{
collections::{HashMap, hash_map::Entry},
time::SystemTime,
};
use camino::{Utf8Path, Utf8PathBuf};
use super::{
DownloadDependencies, MakeLocker, engine::LanguageServerEngine, feedback::FeedbackBookKeeper,
files::FileSystemProxy, progress::ProgressReporter,
};
/// The language server instance serves a language client, typically a text
/// editor. The editor could have multiple Gleam projects open at once, so run
/// an instance of the language server engine for each project.
///
/// This router is responsible for finding or creating an engine for a given
/// file using the nearest parent `gleam.toml` file.
///
#[derive(Debug)]
pub(crate) struct Router<IO, Reporter> {
io: FileSystemProxy<IO>,
engines: HashMap<Utf8PathBuf, Project<IO, Reporter>>,
progress_reporter: Reporter,
}
impl<IO, Reporter> Router<IO, Reporter>
where
// IO to be supplied from outside of gleam-core
IO: FileSystemReader
+ FileSystemWriter
+ BeamCompiler
+ CommandExecutor
+ DownloadDependencies
+ MakeLocker
+ Clone,
// IO to be supplied from inside of gleam-core
Reporter: ProgressReporter + Clone,
{
pub fn new(progress_reporter: Reporter, io: FileSystemProxy<IO>) -> Self {
Self {
io,
engines: HashMap::new(),
progress_reporter,
}
}
pub fn project_path(&self, path: &Utf8Path) -> Option<Utf8PathBuf> {
find_gleam_project_parent(&self.io, path)
}
pub fn project_for_path(
&mut self,
path: Utf8PathBuf,
) -> Result<Option<&mut Project<IO, Reporter>>> {
// If the path is the root of a known project then return it. Otherwise
// find the nearest parent project.
let path = if self.engines.contains_key(&path) {
path
} else {
let Some(path) = find_gleam_project_parent(&self.io, &path) else {
return Ok(None);
};
path
};
// If the gleam.toml has changed or the build directory is missing
// (e.g. `gleam clean`), then discard the project as the target,
// deps, etc may have changed and we need to rebuild taking them into
// account.
if let Some(project) = self.engines.get(&path) {
let paths = ProjectPaths::new(path.clone());
if !self.io.exists(&paths.build_directory())
|| Self::gleam_toml_changed(&paths, project, &self.io)?
{
let _ = self.engines.remove(&path);
}
}
// Look up the project, creating a new one if it does not exist.
Ok(Some(match self.engines.entry(path.clone()) {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => {
let project =
Self::new_project(path, self.io.clone(), self.progress_reporter.clone())?;
entry.insert(project)
}
}))
}
/// Has gleam.toml changed since the last time we saw this project?
fn gleam_toml_changed(
paths: &ProjectPaths,
project: &Project<IO, Reporter>,
io: &FileSystemProxy<IO>,
) -> Result<bool, Error> {
// Get the location of gleam.toml for this project
let config_path = paths.root_config();
// See if the file modification time has changed.
if io.modification_time(&config_path)? == project.gleam_toml_modification_time {
return Ok(false); // Not changed
}
// The mtime has changed. This might not be a content change, so let's
// check the hash.
let toml = io.read(&config_path)?;
let gleam_toml_changed = project.gleam_toml_fingerprint != SourceFingerprint::new(&toml);
Ok(gleam_toml_changed)
}
pub fn delete_engine_for_path(&mut self, path: &Utf8Path) {
if let Some(path) = find_gleam_project_parent(&self.io, path) {
_ = self.engines.remove(&path);
}
}
fn new_project(
path: Utf8PathBuf,
io: FileSystemProxy<IO>,
progress_reporter: Reporter,
) -> Result<Project<IO, Reporter>, Error> {
tracing::info!(?path, "creating_new_language_server_engine");
let paths = ProjectPaths::new(path);
let config_path = paths.root_config();
let modification_time = io.modification_time(&config_path)?;
let toml = io.read(&config_path)?;
let config = toml::from_str(&toml).map_err(|e| Error::FileIo {
action: FileIoAction::Parse,
kind: FileKind::File,
path: config_path,
err: Some(e.to_string()),
})?;
let engine = LanguageServerEngine::new(config, progress_reporter, io, paths)?;
let project = Project {
engine,
feedback: FeedbackBookKeeper::default(),
gleam_toml_modification_time: modification_time,
gleam_toml_fingerprint: SourceFingerprint::new(&toml),
};
Ok(project)
}
}
/// Given a given path, find the nearest parent directory containing a
/// `gleam.toml` file.
///
/// The file must be in either the `src`, `test` or `dev` directory if it is not a
/// `.gleam` file.
fn find_gleam_project_parent<IO>(io: &IO, path: &Utf8Path) -> Option<Utf8PathBuf>
where
IO: FileSystemReader,
{
let is_module = path.extension().map(|x| x == "gleam").unwrap_or(false);
let mut directory = path.to_path_buf();
// If we are finding the gleam project of a directory then we want to check the directory itself
let is_directory = path.extension().is_none();
if is_directory {
directory.push("src");
}
while let Some(root) = directory.parent() {
// If there's no gleam.toml in the root then we continue to the next parent.
if !io.is_file(&root.join("gleam.toml")) {
_ = directory.pop();
continue;
}
// If it is a Gleam module then it must reside in the src, test dev directory.
if is_module
&& !(directory.ends_with("test")
|| directory.ends_with("src")
|| directory.ends_with("dev"))
{
_ = directory.pop();
continue;
}
return Some(root.to_path_buf());
}
None
}
#[derive(Debug)]
pub(crate) struct Project<A, B> {
pub engine: LanguageServerEngine<A, B>,
pub feedback: FeedbackBookKeeper,
pub gleam_toml_modification_time: SystemTime,
pub gleam_toml_fingerprint: SourceFingerprint,
}
#[cfg(test)]
mod find_gleam_project_parent_tests {
use super::*;
use gleam_core::io::{FileSystemWriter, memory::InMemoryFileSystem};
#[test]
fn root() {
let io = InMemoryFileSystem::new();
assert_eq!(find_gleam_project_parent(&io, Utf8Path::new("/")), None);
}
#[test]
fn outside_a_project() {
let io = InMemoryFileSystem::new();
assert_eq!(
find_gleam_project_parent(&io, Utf8Path::new("/app/src/one.gleam")),
None
);
}
#[test]
fn gleam_toml_itself() {
let io = InMemoryFileSystem::new();
io.write(Utf8Path::new("/app/gleam.toml"), "").unwrap();
assert_eq!(
find_gleam_project_parent(&io, Utf8Path::new("/app/gleam.toml")),
Some(Utf8PathBuf::from("/app"))
);
}
#[test]
fn directory_with_gleam_toml() {
let io = InMemoryFileSystem::new();
io.write(Utf8Path::new("/app/gleam.toml"), "").unwrap();
assert_eq!(
find_gleam_project_parent(&io, Utf8Path::new("/app")),
Some(Utf8PathBuf::from("/app"))
);
}
#[test]
fn test_module() {
let io = InMemoryFileSystem::new();
io.write(Utf8Path::new("/app/gleam.toml"), "").unwrap();
assert_eq!(
find_gleam_project_parent(&io, Utf8Path::new("/app/test/one/two/three.gleam")),
Some(Utf8PathBuf::from("/app"))
);
}
#[test]
fn src_module() {
let io = InMemoryFileSystem::new();
io.write(Utf8Path::new("/app/gleam.toml"), "").unwrap();
assert_eq!(
find_gleam_project_parent(&io, Utf8Path::new("/app/src/one/two/three.gleam")),
Some(Utf8PathBuf::from("/app"))
);
}
#[test]
fn dev_module() {
let io = InMemoryFileSystem::new();
io.write(Utf8Path::new("/app/gleam.toml"), "").unwrap();
assert_eq!(
find_gleam_project_parent(&io, Utf8Path::new("/app/dev/one/two/three.gleam")),
Some(Utf8PathBuf::from("/app"))
);
}
// https://github.com/gleam-lang/gleam/issues/2121
#[test]
fn module_in_project_but_not_src_or_test_or_dev() {
let io = InMemoryFileSystem::new();
io.write(Utf8Path::new("/app/gleam.toml"), "").unwrap();
assert_eq!(
find_gleam_project_parent(&io, Utf8Path::new("/app/other/one/two/three.gleam")),
None,
);
}
#[test]
fn nested_projects() {
let io = InMemoryFileSystem::new();
io.write(Utf8Path::new("/app/gleam.toml"), "").unwrap();
io.write(Utf8Path::new("/app/examples/wibble/gleam.toml"), "")
.unwrap();
assert_eq!(
find_gleam_project_parent(&io, Utf8Path::new("/app/src/one.gleam")),
Some(Utf8PathBuf::from("/app"))
);
assert_eq!(
find_gleam_project_parent(&io, Utf8Path::new("/app/examples/wibble/src/one.gleam")),
Some(Utf8PathBuf::from("/app/examples/wibble"))
);
}
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/engine.rs | language-server/src/engine.rs | use camino::Utf8PathBuf;
use ecow::EcoString;
use gleam_core::{
Error, Result, Warning,
analyse::name::correct_name_case,
ast::{
self, Constant, CustomType, DefinitionLocation, ModuleConstant, PatternUnusedArguments,
SrcSpan, TypedArg, TypedConstant, TypedExpr, TypedFunction, TypedModule, TypedPattern,
},
build::{
ExpressionPosition, Located, Module, UnqualifiedImport, type_constructor_from_modules,
},
config::PackageConfig,
io::{BeamCompiler, CommandExecutor, FileSystemReader, FileSystemWriter},
line_numbers::LineNumbers,
paths::ProjectPaths,
type_::{
self, Deprecation, ModuleInterface, Type, TypeConstructor, ValueConstructor,
ValueConstructorVariant,
error::{Named, VariableSyntax},
printer::Printer,
},
};
use itertools::Itertools;
use lsp::CodeAction;
use lsp_server::ResponseError;
use lsp_types::{
self as lsp, DocumentSymbol, Hover, HoverContents, MarkedString, Position,
PrepareRenameResponse, Range, SignatureHelp, SymbolKind, SymbolTag, TextEdit, Url,
WorkspaceEdit,
};
use std::{collections::HashSet, sync::Arc};
use super::{
DownloadDependencies, MakeLocker,
code_action::{
AddAnnotations, AddOmittedLabels, AnnotateTopLevelDefinitions, CodeActionBuilder,
CollapseNestedCase, ConvertFromUse, ConvertToFunctionCall, ConvertToPipe, ConvertToUse,
ExpandFunctionCapture, ExtractConstant, ExtractFunction, ExtractVariable,
FillInMissingLabelledArgs, FillUnusedFields, FixBinaryOperation,
FixTruncatedBitArraySegment, GenerateDynamicDecoder, GenerateFunction, GenerateJsonEncoder,
GenerateVariant, InlineVariable, InterpolateString, LetAssertToCase, MergeCaseBranches,
PatternMatchOnValue, RedundantTupleInCaseSubject, RemoveBlock, RemoveEchos,
RemovePrivateOpaque, RemoveUnreachableCaseClauses, RemoveUnusedImports,
UseLabelShorthandSyntax, WrapInBlock, code_action_add_missing_patterns,
code_action_convert_qualified_constructor_to_unqualified,
code_action_convert_unqualified_constructor_to_qualified, code_action_import_module,
code_action_inexhaustive_let_to_case,
},
compiler::LspProjectCompiler,
completer::Completer,
files::FileSystemProxy,
progress::ProgressReporter,
reference::{
FindVariableReferences, Referenced, VariableReferenceKind, find_module_references,
reference_for_ast_node,
},
rename::{RenameOutcome, RenameTarget, Renamed, rename_local_variable, rename_module_entity},
signature_help, src_span_to_lsp_range,
};
#[derive(Debug, PartialEq, Eq)]
pub struct Response<T> {
pub result: Result<T, Error>,
pub warnings: Vec<Warning>,
pub compilation: Compilation,
}
#[derive(Debug, PartialEq, Eq)]
pub enum Compilation {
/// Compilation was attempted and succeeded for these modules.
Yes(Vec<Utf8PathBuf>),
/// Compilation was not attempted for this operation.
No,
}
#[derive(Debug)]
pub struct LanguageServerEngine<IO, Reporter> {
pub(crate) paths: ProjectPaths,
/// A compiler for the project that supports repeat compilation of the root
/// package.
/// In the event the project config changes this will need to be
/// discarded and reloaded to handle any changes to dependencies.
pub(crate) compiler: LspProjectCompiler<FileSystemProxy<IO>>,
modules_compiled_since_last_feedback: Vec<Utf8PathBuf>,
compiled_since_last_feedback: bool,
error: Option<Error>,
// Used to publish progress notifications to the client without waiting for
// the usual request-response loop.
progress_reporter: Reporter,
/// Used to know if to show the "View on HexDocs" link
/// when hovering on an imported value
hex_deps: HashSet<EcoString>,
}
impl<'a, IO, Reporter> LanguageServerEngine<IO, Reporter>
where
// IO to be supplied from outside of gleam-core
IO: FileSystemReader
+ FileSystemWriter
+ BeamCompiler
+ CommandExecutor
+ DownloadDependencies
+ MakeLocker
+ Clone,
// IO to be supplied from inside of gleam-core
Reporter: ProgressReporter + Clone + 'a,
{
pub fn new(
config: PackageConfig,
progress_reporter: Reporter,
io: FileSystemProxy<IO>,
paths: ProjectPaths,
) -> Result<Self> {
let locker = io.inner().make_locker(&paths, config.target)?;
// Download dependencies to ensure they are up-to-date for this new
// configuration and new instance of the compiler
progress_reporter.dependency_downloading_started();
let manifest = io.inner().download_dependencies(&paths);
progress_reporter.dependency_downloading_finished();
// NOTE: This must come after the progress reporter has finished!
let manifest = manifest?;
let compiler: LspProjectCompiler<FileSystemProxy<IO>> =
LspProjectCompiler::new(manifest, config, paths.clone(), io.clone(), locker)?;
let hex_deps = compiler
.project_compiler
.packages
.iter()
.flat_map(|(k, v)| match &v.source {
gleam_core::manifest::ManifestPackageSource::Hex { .. } => {
Some(EcoString::from(k.as_str()))
}
gleam_core::manifest::ManifestPackageSource::Git { .. }
| gleam_core::manifest::ManifestPackageSource::Local { .. } => None,
})
.collect();
Ok(Self {
modules_compiled_since_last_feedback: vec![],
compiled_since_last_feedback: false,
progress_reporter,
compiler,
paths,
error: None,
hex_deps,
})
}
pub fn compile_please(&mut self) -> Response<()> {
self.respond(Self::compile)
}
/// Compile the project if we are in one. Otherwise do nothing.
fn compile(&mut self) -> Result<(), Error> {
self.compiled_since_last_feedback = true;
self.progress_reporter.compilation_started();
let outcome = self.compiler.compile();
self.progress_reporter.compilation_finished();
let result = outcome
// Register which modules have changed
.map(|modules| self.modules_compiled_since_last_feedback.extend(modules))
// Return the error, if present
.into_result();
self.error = match &result {
Ok(_) => None,
Err(error) => Some(error.clone()),
};
result
}
fn take_warnings(&mut self) -> Vec<Warning> {
self.compiler.take_warnings()
}
// TODO: implement unqualified imported module functions
//
pub fn goto_definition(
&mut self,
params: lsp::GotoDefinitionParams,
) -> Response<Option<lsp::Location>> {
self.respond(|this| {
let params = params.text_document_position_params;
let (line_numbers, node) = match this.node_at_position(¶ms) {
Some(location) => location,
None => return Ok(None),
};
let Some(location) =
node.definition_location(this.compiler.project_compiler.get_importable_modules())
else {
return Ok(None);
};
Ok(this.definition_location_to_lsp_location(&line_numbers, ¶ms, location))
})
}
pub(crate) fn goto_type_definition(
&mut self,
params: lsp_types::GotoDefinitionParams,
) -> Response<Vec<lsp::Location>> {
self.respond(|this| {
let params = params.text_document_position_params;
let (line_numbers, node) = match this.node_at_position(¶ms) {
Some(location) => location,
None => return Ok(vec![]),
};
let Some(locations) = node
.type_definition_locations(this.compiler.project_compiler.get_importable_modules())
else {
return Ok(vec![]);
};
let locations = locations
.into_iter()
.filter_map(|location| {
this.definition_location_to_lsp_location(&line_numbers, ¶ms, location)
})
.collect_vec();
Ok(locations)
})
}
fn definition_location_to_lsp_location(
&self,
line_numbers: &LineNumbers,
params: &lsp_types::TextDocumentPositionParams,
location: DefinitionLocation,
) -> Option<lsp::Location> {
let (uri, line_numbers) = match location.module {
None => (params.text_document.uri.clone(), line_numbers),
Some(name) => {
let module = self.compiler.get_source(&name)?;
let url = Url::parse(&format!("file:///{}", &module.path))
.expect("goto definition URL parse");
(url, &module.line_numbers)
}
};
let range = src_span_to_lsp_range(location.span, line_numbers);
Some(lsp::Location { uri, range })
}
pub fn completion(
&mut self,
params: lsp::TextDocumentPositionParams,
src: EcoString,
) -> Response<Option<Vec<lsp::CompletionItem>>> {
self.respond(|this| {
let module = match this.module_for_uri(¶ms.text_document.uri) {
Some(m) => m,
None => return Ok(None),
};
let mut completer = Completer::new(&src, ¶ms, &this.compiler, module);
let byte_index = completer.module_line_numbers.byte_index(params.position);
// If in comment context, do not provide completions
if module.extra.is_within_comment(byte_index) {
return Ok(None);
}
// Check current file contents if the user is writing an import
// and handle separately from the rest of the completion flow
// Check if an import is being written
if let Some(value) = completer.import_completions() {
return value;
}
let Some(found) = module.find_node(byte_index) else {
return Ok(None);
};
let completions = match found {
Located::PatternSpread { .. } => None,
Located::Pattern(_pattern) => None,
// Do not show completions when typing inside a string.
Located::Expression {
expression: TypedExpr::String { .. },
..
}
| Located::Constant(Constant::String { .. }) => None,
Located::Expression {
expression: TypedExpr::Call { fun, arguments, .. },
..
} => {
let mut completions = vec![];
completions.append(&mut completer.completion_values());
completions.append(&mut completer.completion_labels(fun, arguments));
Some(completions)
}
Located::Expression {
expression: TypedExpr::RecordAccess { record, type_, .. },
..
} => {
completer.expected_type = Some(type_.clone());
let mut completions = vec![];
completions.append(&mut completer.completion_values());
completions.append(&mut completer.completion_field_accessors(record.type_()));
Some(completions)
}
Located::Expression {
position:
ExpressionPosition::ArgumentOrLabel {
called_function,
function_arguments,
},
..
} => {
let mut completions = vec![];
completions.append(&mut completer.completion_values());
completions.append(
&mut completer.completion_labels(called_function, function_arguments),
);
Some(completions)
}
Located::Expression { expression, .. } => {
completer.expected_type = Some(expression.type_());
Some(completer.completion_values())
}
Located::ModuleFunction(_) => Some(completer.completion_types()),
Located::Statement(_) => Some(completer.completion_values()),
Located::FunctionBody(_) => Some(completer.completion_values()),
Located::ModuleTypeAlias(_)
| Located::ModuleCustomType(_)
| Located::VariantConstructorDefinition(_) => Some(completer.completion_types()),
// If the import completions returned no results and we are in an import then
// we should try to provide completions for unqualified values
Located::ModuleImport(import) => this
.compiler
.get_module_interface(import.module.as_str())
.map(|importing_module| {
completer.unqualified_completions_from_module(importing_module, true)
}),
Located::ModuleConstant(_) | Located::Constant(_) => {
Some(completer.completion_values())
}
Located::UnqualifiedImport(_) => None,
Located::Arg(_) => None,
Located::Annotation { .. } => Some(completer.completion_types()),
Located::Label(_, _) => None,
Located::ModuleName {
layer: ast::Layer::Type,
..
} => Some(completer.completion_types()),
Located::ModuleName {
layer: ast::Layer::Value,
..
} => Some(completer.completion_values()),
};
Ok(completions)
})
}
pub fn code_actions(
&mut self,
params: lsp::CodeActionParams,
) -> Response<Option<Vec<CodeAction>>> {
self.respond(|this| {
let mut actions = vec![];
let Some(module) = this.module_for_uri(¶ms.text_document.uri) else {
return Ok(None);
};
let lines = LineNumbers::new(&module.code);
code_action_unused_values(module, &lines, ¶ms, &mut actions);
actions.extend(RemoveUnusedImports::new(module, &lines, ¶ms).code_actions());
code_action_convert_qualified_constructor_to_unqualified(
module,
&this.compiler,
&lines,
¶ms,
&mut actions,
);
code_action_convert_unqualified_constructor_to_qualified(
module,
&lines,
¶ms,
&mut actions,
);
code_action_fix_names(&lines, ¶ms, &this.error, &mut actions);
code_action_import_module(module, &lines, ¶ms, &this.error, &mut actions);
code_action_add_missing_patterns(module, &lines, ¶ms, &this.error, &mut actions);
actions
.extend(RemoveUnreachableCaseClauses::new(module, &lines, ¶ms).code_actions());
actions.extend(CollapseNestedCase::new(module, &lines, ¶ms).code_actions());
code_action_inexhaustive_let_to_case(
module,
&lines,
¶ms,
&this.error,
&mut actions,
);
actions.extend(MergeCaseBranches::new(module, &lines, ¶ms).code_actions());
actions.extend(FixBinaryOperation::new(module, &lines, ¶ms).code_actions());
actions
.extend(FixTruncatedBitArraySegment::new(module, &lines, ¶ms).code_actions());
actions.extend(LetAssertToCase::new(module, &lines, ¶ms).code_actions());
actions
.extend(RedundantTupleInCaseSubject::new(module, &lines, ¶ms).code_actions());
actions.extend(UseLabelShorthandSyntax::new(module, &lines, ¶ms).code_actions());
actions.extend(FillInMissingLabelledArgs::new(module, &lines, ¶ms).code_actions());
actions.extend(ConvertFromUse::new(module, &lines, ¶ms).code_actions());
actions.extend(RemoveEchos::new(module, &lines, ¶ms).code_actions());
actions.extend(ConvertToUse::new(module, &lines, ¶ms).code_actions());
actions.extend(ExpandFunctionCapture::new(module, &lines, ¶ms).code_actions());
actions.extend(FillUnusedFields::new(module, &lines, ¶ms).code_actions());
actions.extend(InterpolateString::new(module, &lines, ¶ms).code_actions());
actions.extend(ExtractVariable::new(module, &lines, ¶ms).code_actions());
actions.extend(ExtractConstant::new(module, &lines, ¶ms).code_actions());
actions.extend(
GenerateFunction::new(module, &this.compiler.modules, &lines, ¶ms)
.code_actions(),
);
actions.extend(
GenerateVariant::new(module, &this.compiler, &lines, ¶ms).code_actions(),
);
actions.extend(ConvertToPipe::new(module, &lines, ¶ms).code_actions());
actions.extend(ConvertToFunctionCall::new(module, &lines, ¶ms).code_actions());
actions.extend(
PatternMatchOnValue::new(module, &lines, ¶ms, &this.compiler).code_actions(),
);
actions.extend(AddOmittedLabels::new(module, &lines, ¶ms).code_actions());
actions.extend(InlineVariable::new(module, &lines, ¶ms).code_actions());
actions.extend(WrapInBlock::new(module, &lines, ¶ms).code_actions());
actions.extend(RemoveBlock::new(module, &lines, ¶ms).code_actions());
actions.extend(RemovePrivateOpaque::new(module, &lines, ¶ms).code_actions());
actions.extend(ExtractFunction::new(module, &lines, ¶ms).code_actions());
GenerateDynamicDecoder::new(module, &lines, ¶ms, &mut actions).code_actions();
GenerateJsonEncoder::new(
module,
&lines,
¶ms,
&mut actions,
&this.compiler.project_compiler.config,
)
.code_actions();
AddAnnotations::new(module, &lines, ¶ms).code_action(&mut actions);
actions
.extend(AnnotateTopLevelDefinitions::new(module, &lines, ¶ms).code_actions());
Ok(if actions.is_empty() {
None
} else {
Some(actions)
})
})
}
pub fn document_symbol(
&mut self,
params: lsp::DocumentSymbolParams,
) -> Response<Vec<DocumentSymbol>> {
self.respond(|this| {
let mut symbols = vec![];
let Some(module) = this.module_for_uri(¶ms.text_document.uri) else {
return Ok(symbols);
};
let line_numbers = LineNumbers::new(&module.code);
for function in &module.ast.definitions.functions {
// By default, the function's location ends right after the return type.
// For the full symbol range, have it end at the end of the body.
// Also include the documentation, if available.
//
// By convention, the symbol span starts from the leading slash in the
// documentation comment's marker ('///'), not from its content (of which
// we have the position), so we must convert the content start position
// to the leading slash's position using 'get_doc_marker_pos'.
let full_function_span = SrcSpan {
start: function
.documentation
.as_ref()
.map(|(doc_start, _)| get_doc_marker_pos(*doc_start))
.unwrap_or(function.location.start),
end: function.end_position,
};
let (name_location, name) = function
.name
.as_ref()
.expect("Function in a definition must be named");
// The 'deprecated' field is deprecated, but we have to specify it anyway
// to be able to construct the 'DocumentSymbol' type, so
// we suppress the warning. We specify 'None' as specifying 'Some'
// is what is actually deprecated.
#[allow(deprecated)]
symbols.push(DocumentSymbol {
name: name.to_string(),
detail: Some(
Printer::new(&module.ast.names)
.print_type(&get_function_type(function))
.to_string(),
),
kind: SymbolKind::FUNCTION,
tags: make_deprecated_symbol_tag(&function.deprecation),
deprecated: None,
range: src_span_to_lsp_range(full_function_span, &line_numbers),
selection_range: src_span_to_lsp_range(*name_location, &line_numbers),
children: None,
});
}
for alias in &module.ast.definitions.type_aliases {
let full_alias_span = match alias.documentation {
Some((doc_position, _)) => {
SrcSpan::new(get_doc_marker_pos(doc_position), alias.location.end)
}
None => alias.location,
};
// The 'deprecated' field is deprecated, but we have to specify it anyway
// to be able to construct the 'DocumentSymbol' type, so
// we suppress the warning. We specify 'None' as specifying 'Some'
// is what is actually deprecated.
#[allow(deprecated)]
symbols.push(DocumentSymbol {
name: alias.alias.to_string(),
detail: Some(
Printer::new(&module.ast.names)
// If we print with aliases, we end up printing the alias which the user
// is currently hovering, which is not helpful. Instead, we print the
// raw type, so the user can see which type the alias represents
.print_type_without_aliases(&alias.type_)
.to_string(),
),
kind: SymbolKind::CLASS,
tags: make_deprecated_symbol_tag(&alias.deprecation),
deprecated: None,
range: src_span_to_lsp_range(full_alias_span, &line_numbers),
selection_range: src_span_to_lsp_range(alias.name_location, &line_numbers),
children: None,
});
}
for custom_type in &module.ast.definitions.custom_types {
symbols.push(custom_type_symbol(custom_type, &line_numbers, module));
}
for constant in &module.ast.definitions.constants {
// `ModuleConstant.location` ends at the constant's name or type.
// For the full symbol span, necessary for `range`, we need to
// include the constant value as well.
// Also include the documentation at the start, if available.
let full_constant_span = SrcSpan {
start: constant
.documentation
.as_ref()
.map(|(doc_start, _)| get_doc_marker_pos(*doc_start))
.unwrap_or(constant.location.start),
end: constant.value.location().end,
};
// The 'deprecated' field is deprecated, but we have to specify it anyway
// to be able to construct the 'DocumentSymbol' type, so
// we suppress the warning. We specify 'None' as specifying 'Some'
// is what is actually deprecated.
#[allow(deprecated)]
symbols.push(DocumentSymbol {
name: constant.name.to_string(),
detail: Some(
Printer::new(&module.ast.names)
.print_type(&constant.type_)
.to_string(),
),
kind: SymbolKind::CONSTANT,
tags: make_deprecated_symbol_tag(&constant.deprecation),
deprecated: None,
range: src_span_to_lsp_range(full_constant_span, &line_numbers),
selection_range: src_span_to_lsp_range(constant.name_location, &line_numbers),
children: None,
});
}
Ok(symbols)
})
}
/// Check whether a particular module is in the same package as this one
fn is_same_package(&self, current_module: &Module, module_name: &str) -> bool {
let other_module = self
.compiler
.project_compiler
.get_importable_modules()
.get(module_name);
match other_module {
// We can't rename values from other packages if we are not aliasing an unqualified import.
Some(module) => module.package == current_module.ast.type_info.package,
None => false,
}
}
pub fn prepare_rename(
&mut self,
params: lsp::TextDocumentPositionParams,
) -> Response<Option<PrepareRenameResponse>> {
self.respond(|this| {
let (lines, found) = match this.node_at_position(¶ms) {
Some(value) => value,
None => return Ok(None),
};
let Some(current_module) = this.module_for_uri(¶ms.text_document.uri) else {
return Ok(None);
};
let success_response = |location| {
Some(PrepareRenameResponse::Range(src_span_to_lsp_range(
location, &lines,
)))
};
let byte_index = lines.byte_index(params.position);
Ok(match reference_for_ast_node(found, ¤t_module.name) {
Some(Referenced::LocalVariable {
location, origin, ..
}) if location.contains(byte_index) => match origin.map(|origin| origin.syntax) {
Some(VariableSyntax::Generated) => None,
Some(
VariableSyntax::Variable(label) | VariableSyntax::LabelShorthand(label),
) => success_response(SrcSpan {
start: location.start,
end: label
.len()
.try_into()
.map(|len: u32| location.start + len)
.unwrap_or(location.end),
}),
Some(VariableSyntax::AssignmentPattern) | None => success_response(location),
},
Some(
Referenced::ModuleValue {
module,
location,
target_kind,
..
}
| Referenced::ModuleType {
module,
location,
target_kind,
..
},
) if location.contains(byte_index) => {
// We can't rename types or values from other packages if we are not aliasing an unqualified import.
let rename_allowed = match target_kind {
RenameTarget::Qualified => this.is_same_package(current_module, &module),
RenameTarget::Unqualified | RenameTarget::Definition => true,
};
if rename_allowed {
success_response(location)
} else {
None
}
}
_ => None,
})
})
}
pub fn rename(
&mut self,
params: lsp::RenameParams,
) -> Response<Result<Option<WorkspaceEdit>, ResponseError>> {
self.respond(|this| {
let position = ¶ms.text_document_position;
let (lines, found) = match this.node_at_position(position) {
Some(value) => value,
None => return Ok(RenameOutcome::NoRenames.into_result()),
};
let Some(module) = this.module_for_uri(&position.text_document.uri) else {
return Ok(RenameOutcome::NoRenames.into_result());
};
Ok(match reference_for_ast_node(found, &module.name) {
Some(Referenced::LocalVariable {
origin,
definition_location,
name,
..
}) => {
let rename_kind = match origin.map(|origin| origin.syntax) {
Some(VariableSyntax::Generated) => {
return Ok(RenameOutcome::NoRenames.into_result());
}
Some(VariableSyntax::LabelShorthand(_)) => {
VariableReferenceKind::LabelShorthand
}
Some(
VariableSyntax::AssignmentPattern | VariableSyntax::Variable { .. },
)
| None => VariableReferenceKind::Variable,
};
rename_local_variable(
module,
&lines,
¶ms,
definition_location,
name,
rename_kind,
)
.into_result()
}
Some(Referenced::ModuleValue {
module: module_name,
target_kind,
name,
name_kind,
..
}) => rename_module_entity(
¶ms,
module,
this.compiler.project_compiler.get_importable_modules(),
&this.compiler.sources,
Renamed {
module_name: &module_name,
name: &name,
name_kind,
target_kind,
layer: ast::Layer::Value,
},
)
.into_result(),
Some(Referenced::ModuleType {
module: module_name,
target_kind,
name,
..
}) => rename_module_entity(
¶ms,
module,
this.compiler.project_compiler.get_importable_modules(),
&this.compiler.sources,
Renamed {
module_name: &module_name,
name: &name,
name_kind: Named::Type,
target_kind,
layer: ast::Layer::Type,
},
)
.into_result(),
None => RenameOutcome::NoRenames.into_result(),
})
})
}
pub fn find_references(
&mut self,
params: lsp::ReferenceParams,
) -> Response<Option<Vec<lsp::Location>>> {
self.respond(|this| {
let position = ¶ms.text_document_position;
let (lines, found) = match this.node_at_position(position) {
Some(value) => value,
None => return Ok(None),
};
let uri = position.text_document.uri.clone();
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | true |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/lib.rs | language-server/src/lib.rs | #![warn(
clippy::all,
clippy::dbg_macro,
clippy::todo,
clippy::mem_forget,
clippy::filter_map_next,
clippy::needless_continue,
clippy::needless_borrow,
clippy::match_wildcard_for_single_variants,
clippy::imprecise_flops,
clippy::suboptimal_flops,
clippy::lossy_float_literal,
clippy::rest_pat_in_fully_bound_structs,
clippy::fn_params_excessive_bools,
clippy::inefficient_to_string,
clippy::linkedlist,
clippy::macro_use_imports,
clippy::option_option,
clippy::verbose_file_reads,
clippy::unnested_or_patterns,
rust_2018_idioms,
missing_debug_implementations,
missing_copy_implementations,
trivial_casts,
trivial_numeric_casts,
nonstandard_style,
unexpected_cfgs,
unused_import_braces,
unused_qualifications
)]
#![deny(
clippy::await_holding_lock,
clippy::disallowed_methods,
clippy::if_let_mutex,
clippy::indexing_slicing,
clippy::mem_forget,
clippy::ok_expect,
clippy::unimplemented,
clippy::unwrap_used,
unsafe_code,
unstable_features,
unused_results
)]
#![allow(
clippy::assign_op_pattern,
clippy::to_string_trait_impl,
clippy::match_single_binding,
clippy::match_like_matches_macro,
clippy::inconsistent_struct_constructor,
clippy::len_without_is_empty
)]
mod code_action;
mod compiler;
mod completer;
mod edits;
mod engine;
mod feedback;
mod files;
mod messages;
mod progress;
mod reference;
mod rename;
mod router;
mod server;
mod signature_help;
#[cfg(test)]
mod tests;
pub use server::LanguageServer;
use camino::Utf8PathBuf;
use gleam_core::{
Result, ast::SrcSpan, build::Target, line_numbers::LineNumbers, manifest::Manifest,
paths::ProjectPaths,
};
use lsp_types::{Position, Range, TextEdit, Url};
use std::any::Any;
#[derive(Debug)]
pub struct LockGuard(pub Box<dyn Any>);
pub trait Locker {
fn lock_for_build(&self) -> Result<LockGuard>;
}
pub trait MakeLocker {
fn make_locker(&self, paths: &ProjectPaths, target: Target) -> Result<Box<dyn Locker>>;
}
pub trait DownloadDependencies {
fn download_dependencies(&self, paths: &ProjectPaths) -> Result<Manifest>;
}
pub fn src_span_to_lsp_range(location: SrcSpan, line_numbers: &LineNumbers) -> Range {
let start = line_numbers.line_and_column_number(location.start);
let end = line_numbers.line_and_column_number(location.end);
Range::new(
Position::new(start.line - 1, start.column - 1),
Position::new(end.line - 1, end.column - 1),
)
}
pub fn lsp_range_to_src_span(range: Range, line_numbers: &LineNumbers) -> SrcSpan {
let start = line_numbers.byte_index(range.start);
let end = line_numbers.byte_index(range.end);
SrcSpan { start, end }
}
/// A little wrapper around LineNumbers to make it easier to build text edits.
///
#[derive(Debug)]
pub struct TextEdits<'a> {
line_numbers: &'a LineNumbers,
edits: Vec<TextEdit>,
}
impl<'a> TextEdits<'a> {
pub fn new(line_numbers: &'a LineNumbers) -> Self {
TextEdits {
line_numbers,
edits: vec![],
}
}
pub fn src_span_to_lsp_range(&self, location: SrcSpan) -> Range {
src_span_to_lsp_range(location, self.line_numbers)
}
pub fn lsp_range_to_src_span(&self, range: Range) -> SrcSpan {
lsp_range_to_src_span(range, self.line_numbers)
}
pub fn replace(&mut self, location: SrcSpan, new_text: String) {
self.edits.push(TextEdit {
range: src_span_to_lsp_range(location, self.line_numbers),
new_text,
})
}
pub fn insert(&mut self, at: u32, new_text: String) {
self.replace(SrcSpan { start: at, end: at }, new_text)
}
pub fn delete(&mut self, location: SrcSpan) {
self.replace(location, "".to_string())
}
fn delete_range(&mut self, range: Range) {
self.edits.push(TextEdit {
range,
new_text: "".into(),
})
}
}
fn path(uri: &Url) -> Utf8PathBuf {
// The to_file_path method is available on these platforms
#[cfg(any(unix, windows, target_os = "redox", target_os = "wasi"))]
return Utf8PathBuf::from_path_buf(uri.to_file_path().expect("URL file"))
.expect("Non Utf8 Path");
#[cfg(not(any(unix, windows, target_os = "redox", target_os = "wasi")))]
return Utf8PathBuf::from_path_buf(uri.path().into()).expect("Non Utf8 Path");
}
fn url_from_path(path: &str) -> Option<Url> {
// The targets for which `from_file_path` is defined
#[cfg(any(
unix,
windows,
target_os = "redox",
target_os = "wasi",
target_os = "hermit"
))]
let uri = Url::from_file_path(path).ok();
#[cfg(not(any(
unix,
windows,
target_os = "redox",
target_os = "wasi",
target_os = "hermit"
)))]
let uri = Url::parse(&format!("file://{path}")).ok();
uri
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/tests.rs | language-server/src/tests.rs | mod action;
mod compilation;
mod completion;
mod definition;
mod document_symbols;
mod hover;
mod reference;
mod rename;
mod router;
mod signature_help;
use std::{
collections::{HashMap, HashSet},
sync::{Arc, Mutex},
time::SystemTime,
};
use ecow::EcoString;
use hexpm::version::{Range, Version};
use camino::{Utf8Path, Utf8PathBuf};
use itertools::Itertools;
use lsp_types::{Position, TextDocumentIdentifier, TextDocumentPositionParams, Url};
use gleam_core::{
Result,
config::PackageConfig,
io::{
BeamCompiler, Command, CommandExecutor, FileSystemReader, FileSystemWriter, ReadDir,
WrappedReader, memory::InMemoryFileSystem,
},
line_numbers::LineNumbers,
manifest::{Base16Checksum, Manifest, ManifestPackage, ManifestPackageSource},
paths::ProjectPaths,
requirement::Requirement,
};
use super::{
DownloadDependencies, LockGuard, Locker, MakeLocker, engine::LanguageServerEngine,
files::FileSystemProxy, progress::ProgressReporter,
};
pub const LSP_TEST_ROOT_PACKAGE_NAME: &str = "app";
#[derive(Debug, Clone, PartialEq, Eq)]
enum Action {
CompilationStarted,
CompilationFinished,
DependencyDownloadingStarted,
DependencyDownloadingFinished,
DownloadDependencies,
LockBuild,
UnlockBuild,
}
#[derive(Debug, Clone)]
struct LanguageServerTestIO {
io: InMemoryFileSystem,
paths: ProjectPaths,
actions: Arc<Mutex<Vec<Action>>>,
manifest: Manifest,
}
fn default_manifest_package() -> ManifestPackage {
ManifestPackage {
name: Default::default(),
build_tools: Default::default(),
otp_app: Default::default(),
requirements: Default::default(),
version: Version::new(1, 0, 0),
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![]),
},
}
}
impl LanguageServerTestIO {
fn new() -> Self {
Self {
io: Default::default(),
actions: Default::default(),
paths: ProjectPaths::at_filesystem_root(),
manifest: Manifest {
requirements: HashMap::new(),
packages: vec![],
},
}
}
/// Panics if there are other references to the actions.
pub fn into_actions(self) -> Vec<Action> {
Arc::try_unwrap(self.actions).unwrap().into_inner().unwrap()
}
pub fn src_module(&self, name: &str, code: &str) -> Utf8PathBuf {
let src_dir = self.paths.src_directory();
let path = src_dir.join(name).with_extension("gleam");
self.module(&path, code);
path
}
pub fn test_module(&self, name: &str, code: &str) -> Utf8PathBuf {
let test_dir = self.paths.test_directory();
let path = test_dir.join(name).with_extension("gleam");
self.module(&path, code);
path
}
pub fn dev_module(&self, name: &str, code: &str) -> Utf8PathBuf {
let dev_directory = self.paths.dev_directory();
let path = dev_directory.join(name).with_extension("gleam");
self.module(&path, code);
path
}
pub fn path_dep_module(&self, dep: &str, name: &str, code: &str) -> Utf8PathBuf {
let dep_dir = self.paths.root().join(dep).join("src");
let path = dep_dir.join(name).with_extension("gleam");
self.module(&path, code);
path
}
pub fn hex_dep_module(&self, dep: &str, name: &str, code: &str) -> Utf8PathBuf {
let dep_dir = self.paths.build_packages_package(dep).join("src");
let path = dep_dir.join(name).with_extension("gleam");
self.module(&path, code);
path
}
pub fn add_hex_package(&mut self, name: &str) {
self.manifest.packages.push(ManifestPackage {
name: name.into(),
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![]),
},
build_tools: vec!["gleam".into()],
..default_manifest_package()
});
}
fn module(&self, path: &Utf8Path, code: &str) {
self.io.write(path, code).unwrap();
self.io
.try_set_modification_time(path, SystemTime::now())
.unwrap();
}
fn record(&self, action: Action) {
self.actions.lock().unwrap().push(action);
}
}
impl FileSystemReader for LanguageServerTestIO {
fn read_dir(&self, path: &Utf8Path) -> Result<ReadDir> {
self.io.read_dir(path)
}
fn read(&self, path: &Utf8Path) -> Result<String> {
self.io.read(path)
}
fn read_bytes(&self, path: &Utf8Path) -> Result<Vec<u8>> {
self.io.read_bytes(path)
}
fn reader(&self, path: &Utf8Path) -> Result<WrappedReader> {
self.io.reader(path)
}
fn is_file(&self, path: &Utf8Path) -> bool {
self.io.is_file(path)
}
fn is_directory(&self, path: &Utf8Path) -> bool {
self.io.is_directory(path)
}
fn modification_time(&self, path: &Utf8Path) -> Result<SystemTime> {
self.io.modification_time(path)
}
fn canonicalise(&self, path: &Utf8Path) -> Result<Utf8PathBuf, gleam_core::Error> {
self.io.canonicalise(path)
}
}
impl FileSystemWriter for LanguageServerTestIO {
fn mkdir(&self, path: &Utf8Path) -> Result<()> {
self.io.mkdir(path)
}
fn delete_directory(&self, path: &Utf8Path) -> Result<()> {
self.io.delete_directory(path)
}
fn copy(&self, from: &Utf8Path, to: &Utf8Path) -> Result<()> {
self.io.copy(from, to)
}
fn copy_dir(&self, from: &Utf8Path, to: &Utf8Path) -> Result<()> {
self.io.copy_dir(from, to)
}
fn hardlink(&self, from: &Utf8Path, to: &Utf8Path) -> Result<()> {
self.io.hardlink(from, to)
}
fn symlink_dir(&self, from: &Utf8Path, to: &Utf8Path) -> Result<()> {
self.io.symlink_dir(from, to)
}
fn delete_file(&self, path: &Utf8Path) -> Result<()> {
self.io.delete_file(path)
}
fn write(&self, path: &Utf8Path, content: &str) -> Result<(), gleam_core::Error> {
self.io.write(path, content)
}
fn write_bytes(&self, path: &Utf8Path, content: &[u8]) -> Result<(), gleam_core::Error> {
self.io.write_bytes(path, content)
}
fn exists(&self, path: &Utf8Path) -> bool {
self.io.exists(path)
}
}
impl DownloadDependencies for LanguageServerTestIO {
fn download_dependencies(&self, _paths: &ProjectPaths) -> Result<Manifest> {
self.record(Action::DownloadDependencies);
Ok(self.manifest.clone())
}
}
impl CommandExecutor for LanguageServerTestIO {
fn exec(&self, command: Command) -> Result<i32> {
let Command {
program,
args,
env,
cwd,
stdio,
} = command;
panic!("exec({program:?}, {args:?}, {env:?}, {cwd:?}, {stdio:?}) is not implemented")
}
}
impl BeamCompiler for LanguageServerTestIO {
fn compile_beam(
&self,
out: &Utf8Path,
lib: &Utf8Path,
modules: &HashSet<Utf8PathBuf>,
stdio: gleam_core::io::Stdio,
) -> Result<Vec<String>> {
panic!(
"compile_beam({:?}, {:?}, {:?}, {:?}) is not implemented",
out, lib, modules, stdio
)
}
}
impl MakeLocker for LanguageServerTestIO {
fn make_locker(
&self,
_paths: &ProjectPaths,
_target: gleam_core::build::Target,
) -> Result<Box<dyn Locker>> {
Ok(Box::new(TestLocker {
actions: self.actions.clone(),
}))
}
}
#[derive(Debug, Clone)]
struct TestLocker {
actions: Arc<Mutex<Vec<Action>>>,
}
impl TestLocker {
fn record(&self, action: Action) {
self.actions.lock().unwrap().push(action);
}
}
impl Locker for TestLocker {
fn lock_for_build(&self) -> Result<LockGuard> {
self.record(Action::LockBuild);
Ok(LockGuard(Box::new(Guard(self.actions.clone()))))
}
}
struct Guard(Arc<Mutex<Vec<Action>>>);
impl Drop for Guard {
fn drop(&mut self) {
self.0.lock().unwrap().push(Action::UnlockBuild);
}
}
impl ProgressReporter for LanguageServerTestIO {
fn compilation_started(&self) {
self.record(Action::CompilationStarted);
}
fn compilation_finished(&self) {
self.record(Action::CompilationFinished);
}
fn dependency_downloading_started(&self) {
self.record(Action::DependencyDownloadingStarted);
}
fn dependency_downloading_finished(&self) {
self.record(Action::DependencyDownloadingFinished);
}
}
fn add_package_from_manifest<B>(
engine: &mut LanguageServerEngine<LanguageServerTestIO, B>,
toml_path: Utf8PathBuf,
package: ManifestPackage,
) {
let compiler = &mut engine.compiler.project_compiler;
_ = compiler.config.dependencies.insert(
package.name.clone(),
match package.source {
ManifestPackageSource::Hex { .. } => Requirement::Hex {
version: Range::new("1.0.0".into()).unwrap(),
},
ManifestPackageSource::Local { ref path } => Requirement::Path { path: path.into() },
ManifestPackageSource::Git {
ref repo,
ref commit,
} => Requirement::Git {
git: repo.clone(),
ref_: commit.clone(),
},
},
);
write_toml_from_manifest(engine, toml_path, package);
}
fn add_dev_package_from_manifest<B>(
engine: &mut LanguageServerEngine<LanguageServerTestIO, B>,
toml_path: Utf8PathBuf,
package: ManifestPackage,
) {
let compiler = &mut engine.compiler.project_compiler;
_ = compiler.config.dev_dependencies.insert(
package.name.clone(),
match package.source {
ManifestPackageSource::Hex { .. } => Requirement::Hex {
version: Range::new("1.0.0".into()).unwrap(),
},
ManifestPackageSource::Local { ref path } => Requirement::Path { path: path.into() },
ManifestPackageSource::Git {
ref repo,
ref commit,
} => Requirement::Git {
git: repo.clone(),
ref_: commit.clone(),
},
},
);
write_toml_from_manifest(engine, toml_path, package);
}
fn write_toml_from_manifest<B>(
engine: &mut LanguageServerEngine<LanguageServerTestIO, B>,
toml_path: Utf8PathBuf,
package: ManifestPackage,
) {
let compiler = &mut engine.compiler.project_compiler;
let toml = format!(
r#"name = "{}"
version = "{}""#,
&package.name, &package.version
);
_ = compiler.packages.insert(package.name.to_string(), package);
compiler.io.write(toml_path.as_path(), &toml).unwrap();
}
fn add_path_dep<B>(engine: &mut LanguageServerEngine<LanguageServerTestIO, B>, name: &str) {
let path = engine.paths.root().join(name);
add_package_from_manifest(
engine,
path.join("gleam.toml"),
ManifestPackage {
name: name.into(),
version: Version::new(1, 0, 0),
build_tools: vec!["gleam".into()],
otp_app: None,
requirements: vec![],
source: ManifestPackageSource::Local { path: path.clone() },
},
)
}
fn setup_engine(
io: &LanguageServerTestIO,
) -> LanguageServerEngine<LanguageServerTestIO, LanguageServerTestIO> {
let mut config = PackageConfig::default();
config.name = LSP_TEST_ROOT_PACKAGE_NAME.into();
LanguageServerEngine::new(
config,
io.clone(),
FileSystemProxy::new(io.clone()),
io.paths.clone(),
)
.unwrap()
}
struct TestProject<'a> {
src: &'a str,
root_package_modules: Vec<(&'a str, &'a str)>,
dependency_modules: Vec<(&'a str, &'a str)>,
test_modules: Vec<(&'a str, &'a str)>,
dev_modules: Vec<(&'a str, &'a str)>,
hex_modules: Vec<(&'a str, &'a str)>,
dev_hex_modules: Vec<(&'a str, &'a str)>,
indirect_hex_modules: Vec<(&'a str, &'a str)>,
package_modules: HashMap<&'a str, Vec<(&'a str, &'a str)>>,
}
impl<'a> TestProject<'a> {
pub fn for_source(src: &'a str) -> Self {
TestProject {
src,
root_package_modules: vec![],
dependency_modules: vec![],
test_modules: vec![],
dev_modules: vec![],
hex_modules: vec![],
dev_hex_modules: vec![],
indirect_hex_modules: vec![],
package_modules: HashMap::new(),
}
}
pub fn module_name_from_url(&self, url: &Url) -> Option<String> {
Some(
url.path_segments()?
.skip_while(|segment| *segment != "src")
.skip(1)
.join("/")
.trim_end_matches(".gleam")
.into(),
)
}
pub fn src_from_module_url(&self, url: &Url) -> Option<&str> {
let module_name: EcoString = self.module_name_from_url(url)?.into();
if module_name == "app" {
return Some(self.src);
}
let find_module = |modules: &Vec<(&'a str, &'a str)>| {
modules
.iter()
.find(|(name, _)| *name == module_name)
.map(|(_, src)| *src)
};
find_module(&self.root_package_modules)
.or_else(|| find_module(&self.dependency_modules))
.or_else(|| find_module(&self.test_modules))
.or_else(|| find_module(&self.hex_modules))
.or_else(|| find_module(&self.dev_hex_modules))
.or_else(|| find_module(&self.indirect_hex_modules))
}
pub fn add_module(mut self, name: &'a str, src: &'a str) -> Self {
self.root_package_modules.push((name, src));
self
}
pub fn add_dep_module(mut self, name: &'a str, src: &'a str) -> Self {
self.dependency_modules.push((name, src));
self
}
pub fn add_test_module(mut self, name: &'a str, src: &'a str) -> Self {
self.test_modules.push((name, src));
self
}
pub fn add_dev_module(mut self, name: &'a str, src: &'a str) -> Self {
self.dev_modules.push((name, src));
self
}
pub fn add_hex_module(mut self, name: &'a str, src: &'a str) -> Self {
self.hex_modules.push((name, src));
self
}
pub fn add_dev_hex_module(mut self, name: &'a str, src: &'a str) -> Self {
self.dev_hex_modules.push((name, src));
self
}
pub fn add_indirect_hex_module(mut self, name: &'a str, src: &'a str) -> Self {
self.indirect_hex_modules.push((name, src));
self
}
pub fn add_package_module(mut self, package: &'a str, name: &'a str, src: &'a str) -> Self {
self.package_modules
.entry(package)
.or_default()
.push((name, src));
self
}
pub fn build_engine(
&self,
io: &mut LanguageServerTestIO,
) -> LanguageServerEngine<LanguageServerTestIO, LanguageServerTestIO> {
io.add_hex_package("hex");
self.hex_modules.iter().for_each(|(name, code)| {
_ = io.hex_dep_module("hex", name, code);
});
self.dev_hex_modules.iter().for_each(|(name, code)| {
_ = io.hex_dep_module("dev_hex", name, code);
});
self.indirect_hex_modules.iter().for_each(|(name, code)| {
_ = io.hex_dep_module("indirect_hex", name, code);
});
for (package, modules) in self.package_modules.iter() {
io.add_hex_package(package);
for (module, code) in modules {
_ = io.hex_dep_module(package, module, code);
}
}
let mut engine = setup_engine(io);
// Add an external dependency and all its modules
add_path_dep(&mut engine, "dep");
self.dependency_modules.iter().for_each(|(name, code)| {
let _ = io.path_dep_module("dep", name, code);
});
// Add all the modules belonging to the root package
self.root_package_modules.iter().for_each(|(name, code)| {
let _ = io.src_module(name, code);
});
// Add all the test modules
self.test_modules.iter().for_each(|(name, code)| {
let _ = io.test_module(name, code);
});
// Add all the dev modules
self.dev_modules.iter().for_each(|(name, code)| {
let _ = io.dev_module(name, code);
});
for package in &io.manifest.packages {
let toml_path = engine.paths.build_packages_package_config(&package.name);
add_package_from_manifest(&mut engine, toml_path, package.clone());
}
// Add an indirect dependency manifest
let toml_path = engine.paths.build_packages_package_config("indirect_hex");
write_toml_from_manifest(
&mut engine,
toml_path,
ManifestPackage {
name: "indirect_hex".into(),
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![]),
},
build_tools: vec!["gleam".into()],
..default_manifest_package()
},
);
// Add a dev dependency
let toml_path = engine.paths.build_packages_package_config("dev_hex");
add_dev_package_from_manifest(
&mut engine,
toml_path,
ManifestPackage {
name: "dev_hex".into(),
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![]),
},
build_tools: vec!["gleam".into()],
..default_manifest_package()
},
);
engine
}
pub fn build_path(&self, position: Position) -> TextDocumentPositionParams {
let path = Utf8PathBuf::from(if cfg!(target_family = "windows") {
r"\\?\C:\src\app.gleam"
} else {
"/src/app.gleam"
});
let url = Url::from_file_path(path).unwrap();
TextDocumentPositionParams::new(TextDocumentIdentifier::new(url), position)
}
pub fn build_test_path(
&self,
position: Position,
test_name: &str,
) -> TextDocumentPositionParams {
let path = Utf8PathBuf::from(if cfg!(target_family = "windows") {
format!(r"\\?\C:\test\{test_name}.gleam")
} else {
format!("/test/{test_name}.gleam")
});
let url = Url::from_file_path(path).unwrap();
TextDocumentPositionParams::new(TextDocumentIdentifier::new(url), position)
}
pub fn build_dev_path(
&self,
position: Position,
test_name: &str,
) -> TextDocumentPositionParams {
let path = Utf8PathBuf::from(if cfg!(target_family = "windows") {
format!(r"\\?\C:\dev\{test_name}.gleam")
} else {
format!("/dev/{test_name}.gleam")
});
let url = Url::from_file_path(path).unwrap();
TextDocumentPositionParams::new(TextDocumentIdentifier::new(url), position)
}
pub fn positioned_with_io(
&self,
position: Position,
) -> (
LanguageServerEngine<LanguageServerTestIO, LanguageServerTestIO>,
TextDocumentPositionParams,
) {
let mut io = LanguageServerTestIO::new();
let mut engine = self.build_engine(&mut io);
// Add the final module we're going to be positioning the cursor in.
_ = io.src_module("app", self.src);
let _response = engine.compile_please();
let param = self.build_path(position);
(engine, param)
}
pub fn positioned_with_io_in_test(
&self,
position: Position,
test_name: &str,
) -> (
LanguageServerEngine<LanguageServerTestIO, LanguageServerTestIO>,
TextDocumentPositionParams,
) {
let mut io = LanguageServerTestIO::new();
let mut engine = self.build_engine(&mut io);
// Add the final module we're going to be positioning the cursor in.
_ = io.src_module("app", self.src);
let response = engine.compile_please();
assert!(response.result.is_ok());
let param = self.build_test_path(position, test_name);
(engine, param)
}
pub fn positioned_with_io_in_dev(
&self,
position: Position,
test_name: &str,
) -> (
LanguageServerEngine<LanguageServerTestIO, LanguageServerTestIO>,
TextDocumentPositionParams,
) {
let mut io = LanguageServerTestIO::new();
let mut engine = self.build_engine(&mut io);
// Add the final module we're going to be positioning the cursor in.
_ = io.src_module("app", self.src);
let response = engine.compile_please();
assert!(response.result.is_ok());
let param = self.build_dev_path(position, test_name);
(engine, param)
}
pub fn at<T>(
&self,
position: Position,
executor: impl FnOnce(
&mut LanguageServerEngine<LanguageServerTestIO, LanguageServerTestIO>,
TextDocumentPositionParams,
EcoString,
) -> T,
) -> T {
let (mut engine, params) = self.positioned_with_io(position);
executor(&mut engine, params, self.src.into())
}
}
#[derive(Clone)]
pub struct PositionFinder {
value: EcoString,
offset: usize,
nth_occurrence: usize,
}
pub struct RangeSelector {
from: PositionFinder,
to: PositionFinder,
}
impl RangeSelector {
pub fn find_range(&self, src: &str) -> lsp_types::Range {
lsp_types::Range {
start: self.from.find_position(src),
end: self.to.find_position(src),
}
}
}
impl PositionFinder {
pub fn with_char_offset(self, offset: usize) -> Self {
Self {
value: self.value,
offset,
nth_occurrence: self.nth_occurrence,
}
}
pub fn under_char(self, char: char) -> Self {
Self {
offset: self.value.find(char).unwrap_or(0),
value: self.value,
nth_occurrence: self.nth_occurrence,
}
}
pub fn under_last_char(self) -> Self {
let len = self.value.len();
self.with_char_offset(len - 1)
}
pub fn nth_occurrence(self, nth_occurrence: usize) -> Self {
Self {
value: self.value,
offset: self.offset,
nth_occurrence,
}
}
pub fn for_value(value: &str) -> Self {
Self {
value: value.into(),
offset: 0,
nth_occurrence: 1,
}
}
pub fn find_position(&self, src: &str) -> Position {
let PositionFinder {
value,
offset,
nth_occurrence,
} = self;
let byte_index = src
.match_indices(value.as_str())
.nth(nth_occurrence - 1)
.expect("no match for position")
.0;
byte_index_to_position(src, byte_index + offset)
}
pub fn select_until(self, end: PositionFinder) -> RangeSelector {
RangeSelector {
from: self,
to: end,
}
}
pub fn to_selection(self) -> RangeSelector {
RangeSelector {
from: self.clone(),
to: self,
}
}
}
pub fn find_position_of(value: &str) -> PositionFinder {
PositionFinder::for_value(value)
}
fn byte_index_to_position(src: &str, byte_index: usize) -> Position {
let mut line = 0;
let mut col = 0;
for (i, char) in src.bytes().enumerate() {
if i == byte_index {
break;
}
if char == b'\n' {
line += 1;
col = 0;
} else {
col += 1;
}
}
Position::new(line, col)
}
/// This function replicates how the text editor applies TextEdit.
///
pub fn apply_code_edit(src: &str, mut change: Vec<lsp_types::TextEdit>) -> String {
let mut result = src.to_string();
let line_numbers = LineNumbers::new(src);
let mut offset = 0;
change.sort_by_key(|edit| (edit.range.start.line, edit.range.start.character));
for edit in change {
let start = line_numbers.byte_index(edit.range.start) as i32 - offset;
let end = line_numbers.byte_index(edit.range.end) as i32 - offset;
let range = (start as usize)..(end as usize);
offset += end - start;
offset -= edit.new_text.len() as i32;
result.replace_range(range, &edit.new_text);
}
result
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/rename.rs | language-server/src/rename.rs | use std::collections::HashMap;
use ecow::EcoString;
use lsp_server::ResponseError;
use lsp_types::{Range, RenameParams, TextEdit, Url, WorkspaceEdit};
use gleam_core::{
analyse::name,
ast::{self, SrcSpan},
build::Module,
line_numbers::LineNumbers,
reference::ReferenceKind,
type_::{ModuleInterface, error::Named},
};
use super::{
TextEdits,
compiler::ModuleSourceInformation,
edits::{self, Newlines, add_newlines_after_import, position_of_first_definition_if_import},
reference::FindVariableReferences,
reference::VariableReferenceKind,
url_from_path,
};
fn workspace_edit(uri: Url, edits: Vec<TextEdit>) -> WorkspaceEdit {
let mut changes = HashMap::new();
let _ = changes.insert(uri, edits);
WorkspaceEdit {
changes: Some(changes),
document_changes: None,
change_annotations: None,
}
}
pub enum RenameOutcome {
InvalidName { name: EcoString },
NoRenames,
Renamed { edit: WorkspaceEdit },
}
/// Error code for when a request has invalid params as described in:
/// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#errorCodes
///
const INVALID_PARAMS: i32 = -32602;
impl RenameOutcome {
/// Turns the outcome of renaming into a value that's suitable to be used as
/// a response in the language server engine.
///
pub fn into_result(self) -> Result<Option<WorkspaceEdit>, ResponseError> {
match self {
RenameOutcome::NoRenames => Ok(None),
RenameOutcome::Renamed { edit } => Ok(Some(edit)),
RenameOutcome::InvalidName { name } => Err(ResponseError {
code: INVALID_PARAMS,
message: format!("{name} is not a valid name"),
data: None,
}),
}
}
}
pub fn rename_local_variable(
module: &Module,
line_numbers: &LineNumbers,
params: &RenameParams,
definition_location: SrcSpan,
name: EcoString,
kind: VariableReferenceKind,
) -> RenameOutcome {
let new_name = EcoString::from(¶ms.new_name);
if name::check_name_case(Default::default(), &new_name, Named::Variable).is_err() {
return RenameOutcome::InvalidName { name: new_name };
}
let uri = params.text_document_position.text_document.uri.clone();
let mut edits = TextEdits::new(line_numbers);
let references =
FindVariableReferences::new(definition_location, name).find_in_module(&module.ast);
match kind {
VariableReferenceKind::Variable => {
edits.replace(definition_location, params.new_name.clone())
}
VariableReferenceKind::LabelShorthand => {
edits.insert(definition_location.end, format!(" {}", params.new_name))
}
}
for reference in references {
match reference.kind {
VariableReferenceKind::Variable => {
edits.replace(reference.location, params.new_name.clone())
}
VariableReferenceKind::LabelShorthand => {
edits.insert(reference.location.end, format!(" {}", params.new_name))
}
}
}
RenameOutcome::Renamed {
edit: workspace_edit(uri, edits.edits),
}
}
pub enum RenameTarget {
Qualified,
Unqualified,
Definition,
}
pub struct Renamed<'a> {
pub module_name: &'a EcoString,
pub name: &'a EcoString,
pub name_kind: Named,
pub target_kind: RenameTarget,
pub layer: ast::Layer,
}
pub fn rename_module_entity(
params: &RenameParams,
current_module: &Module,
modules: &im::HashMap<EcoString, ModuleInterface>,
sources: &HashMap<EcoString, ModuleSourceInformation>,
renamed: Renamed<'_>,
) -> RenameOutcome {
let new_name = EcoString::from(¶ms.new_name);
if name::check_name_case(
// We don't care about the actual error here, just whether the name is valid,
// so we just use the default span.
SrcSpan::default(),
&new_name,
renamed.name_kind,
)
.is_err()
{
return RenameOutcome::InvalidName { name: new_name };
}
match renamed.target_kind {
RenameTarget::Unqualified if renamed.module_name != ¤t_module.name => {
return alias_references_in_module(
params,
current_module,
renamed.module_name,
renamed.name,
renamed.layer,
);
}
RenameTarget::Unqualified | RenameTarget::Qualified | RenameTarget::Definition => {}
}
let mut workspace_edit = WorkspaceEdit {
changes: Some(HashMap::new()),
document_changes: None,
change_annotations: None,
};
for module in modules.values() {
if &module.name == renamed.module_name
|| module
.references
.imported_modules
.contains(renamed.module_name)
{
let Some(source_information) = sources.get(&module.name) else {
continue;
};
rename_references_in_module(
module,
source_information,
&mut workspace_edit,
renamed.module_name,
renamed.name,
params.new_name.clone(),
renamed.layer,
);
}
}
RenameOutcome::Renamed {
edit: workspace_edit,
}
}
fn rename_references_in_module(
module: &ModuleInterface,
source_information: &ModuleSourceInformation,
workspace_edit: &mut WorkspaceEdit,
module_name: &EcoString,
name: &EcoString,
new_name: String,
layer: ast::Layer,
) {
let reference_map = match layer {
ast::Layer::Value => &module.references.value_references,
ast::Layer::Type => &module.references.type_references,
};
let Some(references) = reference_map.get(&(module_name.clone(), name.clone())) else {
return;
};
let mut edits = TextEdits::new(&source_information.line_numbers);
for reference in references {
match reference.kind {
// If the reference is an alias, the alias name will remain unchanged.
ReferenceKind::Alias => {}
ReferenceKind::Qualified
| ReferenceKind::Unqualified
| ReferenceKind::Import
| ReferenceKind::Definition => edits.replace(reference.location, new_name.clone()),
}
}
let Some(uri) = url_from_path(source_information.path.as_str()) else {
return;
};
if let Some(changes) = workspace_edit.changes.as_mut() {
_ = changes.insert(uri, edits.edits);
}
}
fn alias_references_in_module(
params: &RenameParams,
module: &Module,
module_name: &EcoString,
name: &EcoString,
layer: ast::Layer,
) -> RenameOutcome {
let reference_map = match layer {
ast::Layer::Value => &module.ast.type_info.references.value_references,
ast::Layer::Type => &module.ast.type_info.references.type_references,
};
let Some(references) = reference_map.get(&(module_name.clone(), name.clone())) else {
return RenameOutcome::NoRenames;
};
let mut edits = TextEdits::new(&module.ast.type_info.line_numbers);
let mut found_import = false;
for reference in references {
match reference.kind {
ReferenceKind::Qualified => {}
ReferenceKind::Unqualified | ReferenceKind::Alias => {
edits.replace(reference.location, params.new_name.clone())
}
ReferenceKind::Import => {
edits.insert(reference.location.end, format!(" as {}", params.new_name));
found_import = true;
}
ReferenceKind::Definition => {}
}
}
// If we didn't find the import for the aliased type or value, then this is
// a prelude value and we need to add the import so we can alias it.
if !found_import {
let unqualified_import = match layer {
ast::Layer::Value => format!("{name} as {}", params.new_name),
ast::Layer::Type => format!("type {name} as {}", params.new_name),
};
let import = module
.ast
.definitions
.imports
.iter()
.find(|import| import.module == *module_name);
if let Some(import) = import {
let (position, new_text) =
edits::insert_unqualified_import(import, &module.code, unqualified_import);
edits.insert(position, new_text);
} else {
add_import(module, module_name, unqualified_import, &mut edits);
}
}
RenameOutcome::Renamed {
edit: workspace_edit(
params.text_document_position.text_document.uri.clone(),
edits.edits,
),
}
}
fn add_import(
module: &Module,
module_name: &EcoString,
unqualified_import: String,
edits: &mut TextEdits<'_>,
) {
let position_of_first_import_if_present =
position_of_first_definition_if_import(module, &module.ast.type_info.line_numbers);
let first_is_import = position_of_first_import_if_present.is_some();
let import_location = position_of_first_import_if_present.unwrap_or_default();
let after_import_newlines = add_newlines_after_import(
import_location,
first_is_import,
&module.ast.type_info.line_numbers,
&module.code,
);
let newlines = match after_import_newlines {
Newlines::Single => "\n",
Newlines::Double => "\n\n",
};
edits.edits.push(TextEdit {
range: Range {
start: import_location,
end: import_location,
},
new_text: format!("import {module_name}.{{{unqualified_import}}}{newlines}",),
});
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/feedback.rs | language-server/src/feedback.rs | use gleam_core::{Error, Warning, diagnostic::Diagnostic};
use std::collections::{HashMap, HashSet};
use camino::Utf8PathBuf;
use super::engine::Compilation;
#[derive(Debug, Default, PartialEq, Eq)]
pub struct Feedback {
pub diagnostics: HashMap<Utf8PathBuf, Vec<Diagnostic>>,
pub messages: Vec<Diagnostic>,
}
impl Feedback {
/// Set the diagnostics for a file to an empty vector. This will overwrite
/// any existing diagnostics on the client.
pub fn unset_existing_diagnostics(&mut self, path: Utf8PathBuf) {
_ = self.diagnostics.insert(path, vec![]);
}
pub fn append_diagnostic(&mut self, path: Utf8PathBuf, diagnostic: Diagnostic) {
self.diagnostics.entry(path).or_default().push(diagnostic);
}
/// No feedback at all.
///
pub fn none() -> Feedback {
Default::default()
}
/// Add all the content of another feedback to this feedback.
///
pub fn append_feedback(&mut self, feedback: Feedback) {
for (path, diagnostics) in feedback.diagnostics {
// Any new diagnostics for a file will overwrite any existing ones.
_ = self.diagnostics.insert(path, diagnostics);
}
for diagnostic in feedback.messages {
self.append_message(diagnostic);
}
}
fn append_message(&mut self, diagnostic: Diagnostic) {
self.messages.push(diagnostic);
}
}
/// When an operation succeeds or fails we want to send diagnostics and
/// messages to the client for displaying to the user. This object converts
/// Gleam warnings, errors, etc to these feedback items.
///
/// Gleam has incremental compilation so we cannot erase all previous
/// diagnostics and replace each time new diagnostics are available; if a file
/// has not been recompiled then any diagnostics it had previously are still
/// valid and must not be erased.
/// To do this we keep track of which files have diagnostics and only overwrite
/// them if the file has been recompiled.
///
#[derive(Debug, Default)]
pub struct FeedbackBookKeeper {
files_with_warnings: HashSet<Utf8PathBuf>,
files_with_errors: HashSet<Utf8PathBuf>,
}
impl FeedbackBookKeeper {
/// Send diagnostics for any warnings and remove any diagnostics for files
/// that have compiled without warnings.
///
pub fn response(&mut self, compilation: Compilation, warnings: Vec<Warning>) -> Feedback {
let mut feedback = Feedback::default();
if let Compilation::Yes(compiled_modules) = compilation {
// Any existing diagnostics for files that have been compiled are no
// longer valid so we set an empty vector of diagnostics for the files
// to erase their diagnostics.
for path in compiled_modules {
let has_existing_diagnostics = self.files_with_warnings.remove(&path);
if has_existing_diagnostics {
feedback.unset_existing_diagnostics(path);
}
}
// Compilation was attempted and there is no error (which there is not
// in this function) then it means that compilation has succeeded, so
// there should be no error diagnostics.
// We don't limit this to files that have been compiled as a previous
// cached version could be used instead of a recompile.
self.unset_errors(&mut feedback);
}
for warning in warnings {
self.insert_warning(&mut feedback, warning);
}
feedback
}
fn unset_errors(&mut self, feedback: &mut Feedback) {
// TODO: avoid clobbering warnings. They should be preserved rather than
// removed with the errors here. We will need to store the warnings and
// re-send them.
for path in self.files_with_errors.drain() {
feedback.unset_existing_diagnostics(path);
}
}
/// Compilation failed, boo!
///
/// Send diagnostics for any warnings and remove any diagnostics for files
/// that have compiled without warnings, AND ALSO send diagnostics for the
/// error that caused compilation to fail.
///
pub fn build_with_error(
&mut self,
error: Error,
compilation: Compilation,
warnings: Vec<Warning>,
) -> Feedback {
let diagnostics = error.to_diagnostics();
let mut feedback = self.response(compilation, warnings);
// A new error means that any existing errors are no longer valid. Unset them.
self.unset_errors(&mut feedback);
for diagnostic in diagnostics {
match diagnostic.location.as_ref().map(|l| l.path.clone()) {
Some(path) => {
_ = self.files_with_errors.insert(path.clone());
feedback.append_diagnostic(path, diagnostic);
}
None => {
feedback.append_message(diagnostic);
}
}
}
feedback
}
pub fn error(&mut self, error: Error) -> Feedback {
self.build_with_error(error, Compilation::No, vec![])
}
fn insert_warning(&mut self, feedback: &mut Feedback, warning: Warning) {
let diagnostic = warning.to_diagnostic();
if let Some(path) = diagnostic.location.as_ref().map(|l| l.path.clone()) {
_ = self.files_with_warnings.insert(path.clone());
feedback.append_diagnostic(path, diagnostic);
}
}
}
#[cfg(test)]
mod tests {
use std::assert_eq;
use super::*;
use gleam_core::{
ast::SrcSpan,
diagnostic::Level,
parse::error::{ParseError, ParseErrorType},
type_,
};
#[test]
fn feedback() {
let mut book_keeper = FeedbackBookKeeper::default();
let file1 = Utf8PathBuf::from("src/file1.gleam");
let file2 = Utf8PathBuf::from("src/file2.gleam");
let file3 = Utf8PathBuf::from("src/file3.gleam");
let warning1 = Warning::Type {
path: file1.clone(),
src: "src".into(),
warning: type_::Warning::NoFieldsRecordUpdate {
location: SrcSpan::new(1, 2),
},
};
let warning2 = Warning::Type {
path: file2.clone(),
src: "src".into(),
warning: type_::Warning::NoFieldsRecordUpdate {
location: SrcSpan::new(1, 2),
},
};
let feedback = book_keeper.response(
Compilation::Yes(vec![file1.clone()]),
vec![warning1.clone(), warning1.clone(), warning2.clone()],
);
assert_eq!(
Feedback {
diagnostics: HashMap::from([
(
file1.clone(),
vec![warning1.to_diagnostic(), warning1.to_diagnostic(),]
),
(file2.clone(), vec![warning2.to_diagnostic(),])
]),
messages: vec![],
},
feedback
);
let feedback = book_keeper.response(
Compilation::Yes(vec![file1.clone(), file2.clone(), file3]),
vec![],
);
assert_eq!(
Feedback {
diagnostics: HashMap::from([
// File 1 and 2 had diagnostics before so they have been unset
(file1, vec![]),
(file2, vec![]),
// File 3 had no diagnostics so does not need to to be unset
]),
messages: vec![],
},
feedback
);
}
#[test]
fn locationless_error() {
// The failed method sets an additional messages for errors without a
// location.
let mut book_keeper = FeedbackBookKeeper::default();
let file1 = Utf8PathBuf::from("src/file1.gleam");
let warning1 = Warning::Type {
path: file1.clone(),
src: "src".into(),
warning: type_::Warning::NoFieldsRecordUpdate {
location: SrcSpan::new(1, 2),
},
};
let locationless_error = Error::Gzip("Hello!".into());
let feedback = book_keeper.build_with_error(
locationless_error.clone(),
Compilation::Yes(vec![]),
vec![warning1.clone()],
);
assert_eq!(
Feedback {
diagnostics: HashMap::from([(file1, vec![warning1.to_diagnostic()])]),
messages: locationless_error.to_diagnostics(),
},
feedback
);
}
#[test]
fn error() {
// The failed method sets an additional diagnostic if the error has a
// location.
let mut book_keeper = FeedbackBookKeeper::default();
let file1 = Utf8PathBuf::from("src/file1.gleam");
let file3 = Utf8PathBuf::from("src/file2.gleam");
let warning1 = Warning::Type {
path: file1.clone(),
src: "src".into(),
warning: type_::Warning::NoFieldsRecordUpdate {
location: SrcSpan::new(1, 2),
},
};
let error = Error::Parse {
path: file3.clone(),
src: "blah".into(),
error: Box::new(ParseError {
error: ParseErrorType::ConcatPatternVariableLeftHandSide,
location: SrcSpan::new(1, 4),
}),
};
let feedback = book_keeper.build_with_error(
error.clone(),
Compilation::Yes(vec![]),
vec![warning1.clone()],
);
assert_eq!(
Feedback {
diagnostics: HashMap::from([
(file1, vec![warning1.to_diagnostic()]),
(file3.clone(), error.to_diagnostics()),
]),
messages: vec![],
},
feedback
);
// The error diagnostic should be removed if the file compiles later.
let feedback = book_keeper.response(Compilation::Yes(vec![file3.clone()]), vec![]);
assert_eq!(
Feedback {
diagnostics: HashMap::from([(file3, vec![])]),
messages: vec![],
},
feedback
);
}
// https://github.com/gleam-lang/gleam/issues/2093
#[test]
fn successful_compilation_removes_error_diagnostic() {
// It is possible for a compile error to be fixed but the module that
// had the error to not actually be recompiled.
//
// 1. File is OK
// 2. File is edited to an invalid state
// 3. A compile error is emitted
// 4. File is edited back to the earlier valid state
// 5. File is not recompiled as the cache from step 1 is still valid
//
// Because of this the compiled files iterator does not contain the
// file, so we need to make sure that the error is removed through other
// means, such as tracking which files have errors and removing them all
// when a successful compilation occurs.
let mut book_keeper = FeedbackBookKeeper::default();
let file1 = Utf8PathBuf::from("src/file1.gleam");
let file2 = Utf8PathBuf::from("src/file2.gleam");
let error = Error::Parse {
path: file1.clone(),
src: "blah".into(),
error: Box::new(ParseError {
error: ParseErrorType::ConcatPatternVariableLeftHandSide,
location: SrcSpan::new(1, 4),
}),
};
let feedback =
book_keeper.build_with_error(error.clone(), Compilation::Yes(vec![]), vec![]);
assert_eq!(
Feedback {
diagnostics: HashMap::from([(file1.clone(), error.to_diagnostics())]),
messages: vec![],
},
feedback
);
// The error diagnostic should be removed on a successful compilation,
// even though the file is not in the compiled files iterator.
let feedback = book_keeper.response(Compilation::Yes(vec![file2]), vec![]);
assert_eq!(
Feedback {
diagnostics: HashMap::from([(file1, vec![])]),
messages: vec![],
},
feedback
);
}
// https://github.com/gleam-lang/gleam/issues/2122
#[test]
fn second_failure_unsets_previous_error() {
let mut book_keeper = FeedbackBookKeeper::default();
let file1 = Utf8PathBuf::from("src/file1.gleam");
let file2 = Utf8PathBuf::from("src/file2.gleam");
let error = |file: &camino::Utf8Path| Error::Parse {
path: file.to_path_buf(),
src: "blah".into(),
error: Box::new(ParseError {
error: ParseErrorType::ConcatPatternVariableLeftHandSide,
location: SrcSpan::new(1, 4),
}),
};
let feedback =
book_keeper.build_with_error(error(&file1), Compilation::Yes(vec![]), vec![]);
assert_eq!(
Feedback {
diagnostics: HashMap::from([(file1.clone(), error(&file1).to_diagnostics())]),
messages: vec![],
},
feedback
);
let feedback =
book_keeper.build_with_error(error(&file2), Compilation::Yes(vec![]), vec![]);
assert_eq!(
Feedback {
diagnostics: HashMap::from([
// Unset the previous error
(file1, vec![]),
// Set the new one
(file2.clone(), error(&file2).to_diagnostics()),
]),
messages: vec![],
},
feedback
);
}
// https://github.com/gleam-lang/gleam/issues/2105
#[test]
fn successful_non_compilation_does_not_remove_error_diagnostic() {
let mut book_keeper = FeedbackBookKeeper::default();
let file1 = Utf8PathBuf::from("src/file1.gleam");
let error = Error::Parse {
path: file1.clone(),
src: "blah".into(),
error: Box::new(ParseError {
error: ParseErrorType::ConcatPatternVariableLeftHandSide,
location: SrcSpan::new(1, 4),
}),
};
let feedback =
book_keeper.build_with_error(error.clone(), Compilation::Yes(vec![]), vec![]);
assert_eq!(
Feedback {
diagnostics: HashMap::from([(file1, error.to_diagnostics())]),
messages: vec![],
},
feedback
);
// The error diagnostic should not be removed, nothing has been
// successfully compiled.
let feedback = book_keeper.response(Compilation::No, vec![]);
assert_eq!(
Feedback {
diagnostics: HashMap::new(),
messages: vec![],
},
feedback
);
}
#[test]
fn append_feedback_new_file() {
let mut feedback = Feedback {
diagnostics: HashMap::from([(
Utf8PathBuf::from("src/file1.gleam"),
vec![Diagnostic {
location: None,
hint: None,
text: "Error 1".to_string(),
title: "Error 1".to_string(),
level: Level::Error,
}],
)]),
messages: vec![Diagnostic {
location: None,
hint: None,
text: "Error 2".to_string(),
title: "Error 2".to_string(),
level: Level::Error,
}],
};
feedback.append_feedback(Feedback {
diagnostics: HashMap::from([(
Utf8PathBuf::from("src/file2.gleam"),
vec![Diagnostic {
location: None,
hint: None,
text: "Error 3".to_string(),
title: "Error 3".to_string(),
level: Level::Error,
}],
)]),
messages: vec![],
});
assert_eq!(
feedback,
Feedback {
diagnostics: HashMap::from([
(
Utf8PathBuf::from("src/file1.gleam"),
vec![Diagnostic {
location: None,
hint: None,
text: "Error 1".to_string(),
title: "Error 1".to_string(),
level: Level::Error,
}],
),
(
Utf8PathBuf::from("src/file2.gleam"),
vec![Diagnostic {
location: None,
hint: None,
text: "Error 3".to_string(),
title: "Error 3".to_string(),
level: Level::Error,
}],
),
]),
messages: vec![Diagnostic {
location: None,
hint: None,
text: "Error 2".to_string(),
title: "Error 2".to_string(),
level: Level::Error,
},],
}
);
}
#[test]
fn append_feedback_same_file() {
let mut feedback = Feedback {
diagnostics: HashMap::from([(
Utf8PathBuf::from("src/file1.gleam"),
vec![Diagnostic {
location: None,
hint: None,
text: "Error 1".to_string(),
title: "Error 1".to_string(),
level: Level::Error,
}],
)]),
messages: vec![Diagnostic {
location: None,
hint: None,
text: "Error 2".to_string(),
title: "Error 2".to_string(),
level: Level::Error,
}],
};
feedback.append_feedback(Feedback {
diagnostics: HashMap::from([(
Utf8PathBuf::from("src/file1.gleam"),
vec![Diagnostic {
location: None,
hint: None,
text: "Error 3".to_string(),
title: "Error 3".to_string(),
level: Level::Error,
}],
)]),
messages: vec![],
});
assert_eq!(
feedback,
Feedback {
diagnostics: HashMap::from([(
Utf8PathBuf::from("src/file1.gleam"),
vec![Diagnostic {
location: None,
hint: None,
text: "Error 3".to_string(),
title: "Error 3".to_string(),
level: Level::Error,
}],
),]),
messages: vec![Diagnostic {
location: None,
hint: None,
text: "Error 2".to_string(),
title: "Error 2".to_string(),
level: Level::Error,
},],
}
);
}
#[test]
fn append_feedback_new_message() {
let mut feedback = Feedback {
diagnostics: HashMap::from([(
Utf8PathBuf::from("src/file1.gleam"),
vec![Diagnostic {
location: None,
hint: None,
text: "Error 1".to_string(),
title: "Error 1".to_string(),
level: Level::Error,
}],
)]),
messages: vec![Diagnostic {
location: None,
hint: None,
text: "Error 2".to_string(),
title: "Error 2".to_string(),
level: Level::Error,
}],
};
feedback.append_feedback(Feedback {
diagnostics: HashMap::from([]),
messages: vec![Diagnostic {
location: None,
hint: None,
text: "Error 3".to_string(),
title: "Error 3".to_string(),
level: Level::Error,
}],
});
assert_eq!(
feedback,
Feedback {
diagnostics: HashMap::from([(
Utf8PathBuf::from("src/file1.gleam"),
vec![Diagnostic {
location: None,
hint: None,
text: "Error 1".to_string(),
title: "Error 1".to_string(),
level: Level::Error,
},],
),]),
messages: vec![
Diagnostic {
location: None,
hint: None,
text: "Error 2".to_string(),
title: "Error 2".to_string(),
level: Level::Error,
},
Diagnostic {
location: None,
hint: None,
text: "Error 3".to_string(),
title: "Error 3".to_string(),
level: Level::Error,
}
],
}
);
}
#[test]
fn append_feedback_new_file_blank() {
let mut feedback = Feedback {
diagnostics: HashMap::from([(
Utf8PathBuf::from("src/file1.gleam"),
vec![Diagnostic {
location: None,
hint: None,
text: "Error 1".to_string(),
title: "Error 1".to_string(),
level: Level::Error,
}],
)]),
messages: vec![Diagnostic {
location: None,
hint: None,
text: "Error 2".to_string(),
title: "Error 2".to_string(),
level: Level::Error,
}],
};
feedback.append_feedback(Feedback {
diagnostics: HashMap::from([(Utf8PathBuf::from("src/file2.gleam"), vec![])]),
messages: vec![],
});
assert_eq!(
feedback,
Feedback {
diagnostics: HashMap::from([
(
Utf8PathBuf::from("src/file1.gleam"),
vec![Diagnostic {
location: None,
hint: None,
text: "Error 1".to_string(),
title: "Error 1".to_string(),
level: Level::Error,
},],
),
(Utf8PathBuf::from("src/file2.gleam"), vec![],),
]),
messages: vec![Diagnostic {
location: None,
hint: None,
text: "Error 2".to_string(),
title: "Error 2".to_string(),
level: Level::Error,
},],
}
);
}
#[test]
fn append_feedback_existing_file_blank() {
let mut feedback = Feedback {
diagnostics: HashMap::from([(
Utf8PathBuf::from("src/file1.gleam"),
vec![Diagnostic {
location: None,
hint: None,
text: "Error 1".to_string(),
title: "Error 1".to_string(),
level: Level::Error,
}],
)]),
messages: vec![Diagnostic {
location: None,
hint: None,
text: "Error 2".to_string(),
title: "Error 2".to_string(),
level: Level::Error,
}],
};
feedback.append_feedback(Feedback {
diagnostics: HashMap::from([(Utf8PathBuf::from("src/file1.gleam"), vec![])]),
messages: vec![],
});
assert_eq!(
feedback,
Feedback {
diagnostics: HashMap::from([(Utf8PathBuf::from("src/file1.gleam"), vec![],),]),
messages: vec![Diagnostic {
location: None,
hint: None,
text: "Error 2".to_string(),
title: "Error 2".to_string(),
level: Level::Error,
},],
}
);
}
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/progress.rs | language-server/src/progress.rs | use debug_ignore::DebugIgnore;
use lsp_types::{
InitializeParams, NumberOrString, ProgressParams, ProgressParamsValue, WorkDoneProgress,
WorkDoneProgressBegin, WorkDoneProgressCreateParams, WorkDoneProgressEnd,
};
const DOWNLOADING_TOKEN: &str = "downloading-dependencies";
pub trait ProgressReporter {
fn compilation_started(&self);
fn compilation_finished(&self);
fn dependency_downloading_started(&self);
fn dependency_downloading_finished(&self);
}
// Used to publish progress notifications to the client without waiting for
// the usual request-response loop of the language server.
#[derive(Debug, Clone)]
pub struct ConnectionProgressReporter<'a> {
connection: DebugIgnore<&'a lsp_server::Connection>,
}
impl<'a> ConnectionProgressReporter<'a> {
pub fn new(
connection: &'a lsp_server::Connection,
// We don't actually need these but we take them anyway to ensure that
// this object is only created after the server has been initialised.
// If it was created before then the creation of the progress token
// would fail.
_initialise_params: &InitializeParams,
) -> Self {
create_token(DOWNLOADING_TOKEN, connection);
Self {
connection: connection.into(),
}
}
fn send_notification(&self, token: &str, work_done: WorkDoneProgress) {
let params = ProgressParams {
token: NumberOrString::String(token.to_string()),
value: ProgressParamsValue::WorkDone(work_done),
};
let notification = lsp_server::Notification {
method: "$/progress".into(),
params: serde_json::to_value(params).expect("ProgressParams json"),
};
self.connection
.sender
.send(lsp_server::Message::Notification(notification))
.expect("send_work_done_notification send")
}
}
impl ProgressReporter for ConnectionProgressReporter<'_> {
fn compilation_started(&self) {
// Do nothing. This is only used for tests currently.
// In future we could make this emit a message to the client if compilation is taking a
// long time.
}
fn compilation_finished(&self) {
// Do nothing. This is only used for tests currently.
}
fn dependency_downloading_started(&self) {
let title = "Downloading Gleam dependencies";
self.send_notification(DOWNLOADING_TOKEN, begin_message(title));
}
fn dependency_downloading_finished(&self) {
self.send_notification(DOWNLOADING_TOKEN, end_message());
}
}
fn end_message() -> WorkDoneProgress {
WorkDoneProgress::End(WorkDoneProgressEnd { message: None })
}
fn begin_message(title: &str) -> WorkDoneProgress {
WorkDoneProgress::Begin(WorkDoneProgressBegin {
title: title.into(),
cancellable: Some(false),
message: None,
percentage: None,
})
}
fn create_token(token: &str, connection: &lsp_server::Connection) {
let params = WorkDoneProgressCreateParams {
token: NumberOrString::String(token.into()),
};
let request = lsp_server::Request {
id: format!("create-token--{token}").into(),
method: "window/workDoneProgress/create".into(),
params: serde_json::to_value(params).expect("WorkDoneProgressCreateParams json"),
};
connection
.sender
.send(lsp_server::Message::Request(request))
.expect("WorkDoneProgressCreate");
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/edits.rs | language-server/src/edits.rs | use ecow::EcoString;
use lsp_types::{Position, Range, TextEdit};
use gleam_core::{
ast::{Import, SrcSpan, TypedDefinitions},
build::Module,
line_numbers::LineNumbers,
};
use super::src_span_to_lsp_range;
// Gets the position of the import statement if it's the first definition in the module.
pub fn position_of_first_definition_if_import(
module: &Module,
line_numbers: &LineNumbers,
) -> Option<Position> {
let TypedDefinitions {
imports,
constants,
custom_types,
type_aliases,
functions,
} = &module.ast.definitions;
// We first find the firts import by position
let first_import = imports.iter().min_by_key(|import| import.location)?;
// Then we need to make sure it actually comes before any other definition.
let import_is_first_definition = constants
.iter()
.map(|constant| constant.location)
.chain(custom_types.iter().map(|custom_type| custom_type.location))
.chain(type_aliases.iter().map(|type_alias| type_alias.location))
.chain(functions.iter().map(|function| function.location))
.all(|location| location >= first_import.location);
if import_is_first_definition {
Some(src_span_to_lsp_range(first_import.location, line_numbers).start)
} else {
None
}
}
pub enum Newlines {
Single,
Double,
}
// Returns how many newlines should be added after an import statement. By default `Newlines::Single`,
// but if there's not any import statement, it returns `Newlines::Double`.
//
// * ``import_location`` - The position of the first import statement in the source code.
pub fn add_newlines_after_import(
import_location: Position,
has_imports: bool,
line_numbers: &LineNumbers,
src: &str,
) -> Newlines {
let import_start_cursor = line_numbers.byte_index(import_location);
let is_new_line = src
.chars()
.nth(import_start_cursor as usize)
.unwrap_or_default()
== '\n';
match !has_imports && !is_new_line {
true => Newlines::Double,
false => Newlines::Single,
}
}
pub fn get_import_edit(
import_location: Position,
module_full_name: &str,
insert_newlines: &Newlines,
) -> TextEdit {
let new_lines = match insert_newlines {
Newlines::Single => "\n",
Newlines::Double => "\n\n",
};
TextEdit {
range: Range {
start: import_location,
end: import_location,
},
new_text: ["import ", module_full_name, new_lines].concat(),
}
}
pub fn insert_unqualified_import(
import: &Import<EcoString>,
code: &str,
name: String,
) -> (u32, String) {
let SrcSpan { start, end } = import.location;
let import_code = code
.get(start as usize..end as usize)
.expect("Import location is invalid");
let has_brace = import_code.contains('}');
if has_brace {
insert_into_braced_import(name, import.location, import_code)
} else {
insert_into_unbraced_import(name, import, import_code)
}
}
// Handle inserting into an unbraced import
fn insert_into_unbraced_import(
name: String,
import: &Import<EcoString>,
import_code: &str,
) -> (u32, String) {
let location = import.location;
if import.as_name.is_none() {
// Case: import module
(location.end, format!(".{{{name}}}"))
} else {
// Case: import module as alias
let as_pos = import_code
.find(" as ")
.expect("Expected ' as ' in import statement");
let before_as_pos = import_code
.get(..as_pos)
.and_then(|s| s.rfind(|c: char| !c.is_whitespace()))
.map(|pos| location.start as usize + pos + 1)
.expect("Expected non-whitespace character before ' as '");
(before_as_pos as u32, format!(".{{{name}}}"))
}
}
// Handle inserting into a braced import
fn insert_into_braced_import(name: String, location: SrcSpan, import_code: &str) -> (u32, String) {
if let Some((pos, c)) = find_last_char_before_closing_brace(location, import_code) {
// Case: import module.{Existing, } (as alias)
if c == ',' {
(pos as u32 + 1, format!(" {name}"))
} else {
// Case: import module.{Existing} (as alias)
(pos as u32 + 1, format!(", {name}"))
}
} else {
// Case: import module.{} (as alias)
let left_brace_pos = import_code
.find('{')
.map(|pos| location.start as usize + pos)
.expect("Expected '{' in import statement");
(left_brace_pos as u32 + 1, name)
}
}
fn find_last_char_before_closing_brace(
location: SrcSpan,
import_code: &str,
) -> Option<(usize, char)> {
let closing_brace_pos = import_code.rfind('}')?;
let bytes = import_code.as_bytes();
let mut pos = closing_brace_pos;
while pos > 0 {
pos -= 1;
let c = (*bytes.get(pos)?) as char;
if c.is_whitespace() {
continue;
}
if c == '{' {
break;
}
return Some((location.start as usize + pos, c));
}
None
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/files.rs | language-server/src/files.rs | use std::collections::HashSet;
use std::time::SystemTime;
use debug_ignore::DebugIgnore;
use gleam_core::{
Result,
error::Error,
io::{
BeamCompiler, Command, CommandExecutor, FileSystemReader, FileSystemWriter, ReadDir, Stdio,
WrappedReader, memory::InMemoryFileSystem,
},
};
use camino::{Utf8Path, Utf8PathBuf};
// A proxy intended for `LanguageServer` to use when files are modified in
// memory but not yet saved to disc by the client.
//
// Uses the `IO` for writing directly to disk, or `InMemoryFileSystem` to
// cache files that were not yet saved. Reading files will always first try the
// `InMemoryFileSystem` first and fallback to use the `ProjectIO` if the file
// was not found in the cache.
//
#[derive(Debug, Clone)]
pub struct FileSystemProxy<IO> {
io: DebugIgnore<IO>,
edit_cache: InMemoryFileSystem,
}
impl<IO> FileSystemProxy<IO>
where
IO: FileSystemWriter + FileSystemReader + CommandExecutor,
{
pub fn new(io: IO) -> Self {
Self {
io: io.into(),
edit_cache: InMemoryFileSystem::new(),
}
}
pub fn inner(&self) -> &IO {
&self.io
}
pub fn write_mem_cache(&mut self, path: &Utf8Path, content: &str) -> Result<()> {
let write_result = self.edit_cache.write(path, content);
self.edit_cache
.try_set_modification_time(path, SystemTime::now())?;
write_result
}
pub fn delete_mem_cache(&self, path: &Utf8Path) -> Result<()> {
if self.edit_cache.is_directory(path) {
self.edit_cache.delete_directory(path)
} else {
self.edit_cache.delete_file(path)
}
}
}
// All write operations goes to disk (for mem-cache use the dedicated `_mem_cache` methods)
impl<IO> FileSystemWriter for FileSystemProxy<IO>
where
IO: FileSystemWriter,
{
fn mkdir(&self, path: &Utf8Path) -> Result<()> {
self.io.mkdir(path)
}
fn write(&self, path: &Utf8Path, content: &str) -> Result<()> {
self.io.write(path, content)
}
fn write_bytes(&self, path: &Utf8Path, content: &[u8]) -> Result<()> {
self.io.write_bytes(path, content)
}
fn delete_directory(&self, path: &Utf8Path) -> Result<()> {
self.io.delete_directory(path)
}
fn copy(&self, from: &Utf8Path, to: &Utf8Path) -> Result<()> {
self.io.copy(from, to)
}
fn copy_dir(&self, from: &Utf8Path, to: &Utf8Path) -> Result<()> {
self.io.copy_dir(from, to)
}
fn hardlink(&self, from: &Utf8Path, to: &Utf8Path) -> Result<()> {
self.io.hardlink(from, to)
}
fn symlink_dir(&self, from: &Utf8Path, to: &Utf8Path) -> Result<()> {
self.io.symlink_dir(from, to)
}
fn delete_file(&self, path: &Utf8Path) -> Result<()> {
self.io.delete_file(path)
}
fn exists(&self, path: &Utf8Path) -> bool {
self.io.exists(path)
}
}
impl<IO> FileSystemReader for FileSystemProxy<IO>
where
IO: FileSystemReader,
{
fn read_dir(&self, path: &Utf8Path) -> Result<ReadDir> {
self.io.read_dir(path)
}
fn read(&self, path: &Utf8Path) -> Result<String> {
match self.edit_cache.read(path) {
result @ Ok(_) => result,
Err(_) => self.io.read(path),
}
}
fn read_bytes(&self, path: &Utf8Path) -> Result<Vec<u8>> {
match self.edit_cache.read_bytes(path) {
result @ Ok(_) => result,
Err(_) => self.io.read_bytes(path),
}
}
fn reader(&self, path: &Utf8Path) -> Result<WrappedReader> {
self.io.reader(path)
}
// Cache overrides existence of file
fn is_file(&self, path: &Utf8Path) -> bool {
self.edit_cache.is_file(path) || self.io.is_file(path)
}
// Cache overrides existence of directory
fn is_directory(&self, path: &Utf8Path) -> bool {
self.edit_cache.is_directory(path) || self.io.is_directory(path)
}
fn modification_time(&self, path: &Utf8Path) -> Result<SystemTime> {
match self.edit_cache.modification_time(path) {
result @ Ok(_) => result,
Err(_) => self.io.modification_time(path),
}
}
fn canonicalise(&self, path: &Utf8Path) -> Result<Utf8PathBuf, Error> {
self.io.canonicalise(path)
}
}
impl<IO> CommandExecutor for FileSystemProxy<IO>
where
IO: CommandExecutor,
{
fn exec(&self, _command: Command) -> Result<i32> {
panic!("The language server is not permitted to create subprocesses")
}
}
impl<IO> BeamCompiler for FileSystemProxy<IO>
where
IO: BeamCompiler,
{
fn compile_beam(
&self,
_out: &Utf8Path,
_lib: &Utf8Path,
_modules: &HashSet<Utf8PathBuf>,
_stdio: Stdio,
) -> Result<Vec<String>, Error> {
panic!("The language server is not permitted to create subprocesses")
}
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/server.rs | language-server/src/server.rs | use super::{
DownloadDependencies, MakeLocker,
engine::{self, LanguageServerEngine},
feedback::{Feedback, FeedbackBookKeeper},
files::FileSystemProxy,
messages::{Message, MessageBuffer, Next, Notification, Request},
progress::ConnectionProgressReporter,
router::Router,
src_span_to_lsp_range,
};
use camino::{Utf8Path, Utf8PathBuf};
use debug_ignore::DebugIgnore;
use gleam_core::{
Result,
diagnostic::{Diagnostic, ExtraLabel, Level},
io::{BeamCompiler, CommandExecutor, FileSystemReader, FileSystemWriter},
line_numbers::LineNumbers,
};
use lsp_server::ResponseError;
use lsp_types::{
self as lsp, HoverProviderCapability, InitializeParams, Position, PublishDiagnosticsParams,
Range, RenameOptions, TextEdit, Url,
};
use serde_json::Value as Json;
use std::collections::{HashMap, HashSet};
/// This class is responsible for handling the language server protocol and
/// delegating the work to the engine.
///
/// - Configuring watching of the `gleam.toml` file.
/// - Decoding requests.
/// - Encoding responses.
/// - Sending diagnostics and messages to the client.
/// - Tracking the state of diagnostics and messages.
/// - Performing the initialisation handshake.
///
#[derive(Debug)]
pub struct LanguageServer<'a, IO> {
initialise_params: InitializeParams,
connection: DebugIgnore<&'a lsp_server::Connection>,
outside_of_project_feedback: FeedbackBookKeeper,
router: Router<IO, ConnectionProgressReporter<'a>>,
changed_projects: HashSet<Utf8PathBuf>,
io: FileSystemProxy<IO>,
}
impl<'a, IO> LanguageServer<'a, IO>
where
IO: FileSystemReader
+ FileSystemWriter
+ BeamCompiler
+ CommandExecutor
+ DownloadDependencies
+ MakeLocker
+ Clone,
{
pub fn new(connection: &'a lsp_server::Connection, io: IO) -> Result<Self> {
let initialise_params = initialisation_handshake(connection);
let reporter = ConnectionProgressReporter::new(connection, &initialise_params);
let io = FileSystemProxy::new(io);
let router = Router::new(reporter, io.clone());
Ok(Self {
connection: connection.into(),
initialise_params,
changed_projects: HashSet::new(),
outside_of_project_feedback: FeedbackBookKeeper::default(),
router,
io,
})
}
pub fn run(&mut self) -> Result<()> {
self.start_watching_gleam_toml();
let mut buffer = MessageBuffer::new();
loop {
match buffer.receive(*self.connection) {
Next::Stop => break,
Next::MorePlease => (),
Next::Handle(messages) => {
for message in messages {
self.handle_message(message);
}
}
}
}
Ok(())
}
fn handle_message(&mut self, message: Message) {
match message {
Message::Request(id, request) => self.handle_request(id, request),
Message::Notification(notification) => self.handle_notification(notification),
}
}
fn handle_request(&mut self, id: lsp_server::RequestId, request: Request) {
let (outcome, feedback) = match request {
Request::Format(param) => self.format(param),
Request::Hover(param) => self.hover(param),
Request::GoToDefinition(param) => self.goto_definition(param),
Request::Completion(param) => self.completion(param),
Request::CodeAction(param) => self.code_action(param),
Request::SignatureHelp(param) => self.signature_help(param),
Request::DocumentSymbol(param) => self.document_symbol(param),
Request::PrepareRename(param) => self.prepare_rename(param),
Request::Rename(param) => self.rename(param),
Request::GoToTypeDefinition(param) => self.goto_type_definition(param),
Request::FindReferences(param) => self.find_references(param),
};
self.publish_feedback(feedback);
let response = match outcome {
Ok(payload) => lsp_server::Response {
id,
error: None,
result: Some(payload),
},
Err(error) => lsp_server::Response {
id,
error: Some(error),
result: None,
},
};
self.connection
.sender
.send(lsp_server::Message::Response(response))
.expect("channel send LSP response")
}
fn handle_notification(&mut self, notification: Notification) {
let feedback = match notification {
Notification::CompilePlease => self.compile_please(),
Notification::SourceFileMatchesDisc { path } => self.discard_in_memory_cache(path),
Notification::SourceFileChangedInMemory { path, text } => {
self.cache_file_in_memory(path, text)
}
Notification::ConfigFileChanged { path } => self.watched_files_changed(path),
};
self.publish_feedback(feedback);
}
fn publish_feedback(&self, feedback: Feedback) {
self.publish_diagnostics(feedback.diagnostics);
self.publish_messages(feedback.messages);
}
fn publish_diagnostics(&self, diagnostics: HashMap<Utf8PathBuf, Vec<Diagnostic>>) {
for (path, diagnostics) in diagnostics {
let diagnostics = diagnostics
.into_iter()
.flat_map(diagnostic_to_lsp)
.collect::<Vec<_>>();
let uri = path_to_uri(path);
// Publish the diagnostics
let diagnostic_params = PublishDiagnosticsParams {
uri,
diagnostics,
version: None,
};
let notification = lsp_server::Notification {
method: "textDocument/publishDiagnostics".into(),
params: serde_json::to_value(diagnostic_params)
.expect("textDocument/publishDiagnostics to json"),
};
self.connection
.sender
.send(lsp_server::Message::Notification(notification))
.expect("send textDocument/publishDiagnostics");
}
}
fn start_watching_gleam_toml(&mut self) {
let supports_watch_files = self
.initialise_params
.capabilities
.workspace
.as_ref()
.and_then(|w| w.did_change_watched_files)
.map(|wf| wf.dynamic_registration.unwrap_or(false))
.unwrap_or(false);
if !supports_watch_files {
tracing::warn!("lsp_client_cannot_watch_gleam_toml");
return;
}
// Register gleam.toml as a watched file so we get a notification when
// it changes and thus know that we need to rebuild the entire project.
let watch_config = lsp::Registration {
id: "watch-gleam-toml".into(),
method: "workspace/didChangeWatchedFiles".into(),
register_options: Some(
serde_json::value::to_value(lsp::DidChangeWatchedFilesRegistrationOptions {
watchers: vec![lsp::FileSystemWatcher {
glob_pattern: "**/gleam.toml".to_string().into(),
kind: Some(lsp::WatchKind::Change),
}],
})
.expect("workspace/didChangeWatchedFiles to json"),
),
};
let request = lsp_server::Request {
id: 1.into(),
method: "client/registerCapability".into(),
params: serde_json::value::to_value(lsp::RegistrationParams {
registrations: vec![watch_config],
})
.expect("client/registerCapability to json"),
};
self.connection
.sender
.send(lsp_server::Message::Request(request))
.expect("send client/registerCapability");
}
fn publish_messages(&self, messages: Vec<Diagnostic>) {
for message in messages {
let params = lsp::ShowMessageParams {
typ: match message.level {
Level::Error => lsp::MessageType::ERROR,
Level::Warning => lsp::MessageType::WARNING,
},
message: message.text,
};
let notification = lsp_server::Notification {
method: "window/showMessage".into(),
params: serde_json::to_value(params).expect("window/showMessage to json"),
};
self.connection
.sender
.send(lsp_server::Message::Notification(notification))
.expect("send window/showMessage");
}
}
fn respond_with_engine<T, Handler>(
&mut self,
path: Utf8PathBuf,
handler: Handler,
) -> (Result<Json, ResponseError>, Feedback)
where
T: serde::Serialize,
Handler: FnOnce(
&mut LanguageServerEngine<IO, ConnectionProgressReporter<'a>>,
) -> engine::Response<T>,
{
self.fallible_respond_with_engine(path, |engine| {
let response = handler(engine);
engine::Response {
result: response.result.map(Ok),
warnings: response.warnings,
compilation: response.compilation,
}
})
}
fn fallible_respond_with_engine<T, Handler>(
&mut self,
path: Utf8PathBuf,
handler: Handler,
) -> (Result<Json, ResponseError>, Feedback)
where
T: serde::Serialize,
Handler: FnOnce(
&mut LanguageServerEngine<IO, ConnectionProgressReporter<'a>>,
) -> engine::Response<Result<T, ResponseError>>,
{
match self.router.project_for_path(path) {
Ok(Some(project)) => {
let engine::Response {
result,
warnings,
compilation,
} = handler(&mut project.engine);
match result {
Ok(Ok(value)) => {
let feedback = project.feedback.response(compilation, warnings);
let json = serde_json::to_value(value).expect("response to json");
(Ok(json), feedback)
}
Ok(Err(error)) => {
let feedback = project.feedback.response(compilation, warnings);
(Err(error), feedback)
}
Err(e) => {
let feedback = project.feedback.build_with_error(e, compilation, warnings);
(Ok(Json::Null), feedback)
}
}
}
Ok(None) => (Ok(Json::Null), Feedback::default()),
Err(error) => (
Ok(Json::Null),
self.outside_of_project_feedback.error(error),
),
}
}
fn path_error_response(
&mut self,
path: Utf8PathBuf,
error: gleam_core::Error,
) -> (Result<Json, ResponseError>, Feedback) {
let feedback = match self.router.project_for_path(path) {
Ok(Some(project)) => project.feedback.error(error),
Ok(None) | Err(_) => self.outside_of_project_feedback.error(error),
};
(Ok(Json::Null), feedback)
}
fn format(
&mut self,
params: lsp::DocumentFormattingParams,
) -> (Result<Json, ResponseError>, Feedback) {
let path = super::path(¶ms.text_document.uri);
let mut new_text = String::new();
let src = match self.io.read(&path) {
Ok(src) => src.into(),
Err(error) => return self.path_error_response(path, error),
};
if let Err(error) = gleam_core::format::pretty(&mut new_text, &src, &path) {
return self.path_error_response(path, error);
}
let line_count = src.lines().count() as u32;
let edit = TextEdit {
range: Range::new(Position::new(0, 0), Position::new(line_count, 0)),
new_text,
};
let json = serde_json::to_value(vec![edit]).expect("to JSON value");
(Ok(json), Feedback::default())
}
fn hover(&mut self, params: lsp::HoverParams) -> (Result<Json, ResponseError>, Feedback) {
let path = super::path(¶ms.text_document_position_params.text_document.uri);
self.respond_with_engine(path, |engine| engine.hover(params))
}
fn goto_definition(
&mut self,
params: lsp::GotoDefinitionParams,
) -> (Result<Json, ResponseError>, Feedback) {
let path = super::path(¶ms.text_document_position_params.text_document.uri);
self.respond_with_engine(path, |engine| engine.goto_definition(params))
}
fn goto_type_definition(
&mut self,
params: lsp_types::GotoDefinitionParams,
) -> (Result<Json, ResponseError>, Feedback) {
let path = super::path(¶ms.text_document_position_params.text_document.uri);
self.respond_with_engine(path, |engine| engine.goto_type_definition(params))
}
fn completion(
&mut self,
params: lsp::CompletionParams,
) -> (Result<Json, ResponseError>, Feedback) {
let path = super::path(¶ms.text_document_position.text_document.uri);
let src = match self.io.read(&path) {
Ok(src) => src.into(),
Err(error) => return self.path_error_response(path, error),
};
self.respond_with_engine(path, |engine| {
engine.completion(params.text_document_position, src)
})
}
fn signature_help(
&mut self,
params: lsp_types::SignatureHelpParams,
) -> (Result<Json, ResponseError>, Feedback) {
let path = super::path(¶ms.text_document_position_params.text_document.uri);
self.respond_with_engine(path, |engine| engine.signature_help(params))
}
fn code_action(
&mut self,
params: lsp::CodeActionParams,
) -> (Result<Json, ResponseError>, Feedback) {
let path = super::path(¶ms.text_document.uri);
self.respond_with_engine(path, |engine| engine.code_actions(params))
}
fn document_symbol(
&mut self,
params: lsp::DocumentSymbolParams,
) -> (Result<Json, ResponseError>, Feedback) {
let path = super::path(¶ms.text_document.uri);
self.respond_with_engine(path, |engine| engine.document_symbol(params))
}
fn prepare_rename(
&mut self,
params: lsp::TextDocumentPositionParams,
) -> (Result<Json, ResponseError>, Feedback) {
let path = super::path(¶ms.text_document.uri);
self.respond_with_engine(path, |engine| engine.prepare_rename(params))
}
fn rename(&mut self, params: lsp::RenameParams) -> (Result<Json, ResponseError>, Feedback) {
let path = super::path(¶ms.text_document_position.text_document.uri);
self.fallible_respond_with_engine(
path,
|engine: &mut LanguageServerEngine<IO, ConnectionProgressReporter<'a>>| {
engine.rename(params)
},
)
}
fn find_references(
&mut self,
params: lsp_types::ReferenceParams,
) -> (Result<Json, ResponseError>, Feedback) {
let path = super::path(¶ms.text_document_position.text_document.uri);
self.respond_with_engine(path, |engine| engine.find_references(params))
}
fn cache_file_in_memory(&mut self, path: Utf8PathBuf, text: String) -> Feedback {
self.project_changed(&path);
if let Err(error) = self.io.write_mem_cache(&path, &text) {
return self.outside_of_project_feedback.error(error);
}
Feedback::none()
}
fn discard_in_memory_cache(&mut self, path: Utf8PathBuf) -> Feedback {
self.project_changed(&path);
if let Err(error) = self.io.delete_mem_cache(&path) {
return self.outside_of_project_feedback.error(error);
}
Feedback::none()
}
fn watched_files_changed(&mut self, path: Utf8PathBuf) -> Feedback {
self.router.delete_engine_for_path(&path);
Feedback::none()
}
fn compile_please(&mut self) -> Feedback {
let mut accumulator = Feedback::none();
let projects = std::mem::take(&mut self.changed_projects);
for path in projects {
let (_, feedback) = self.respond_with_engine(path, |this| this.compile_please());
accumulator.append_feedback(feedback);
}
accumulator
}
fn project_changed(&mut self, path: &Utf8Path) {
let project_path = self.router.project_path(path);
if let Some(project_path) = project_path {
_ = self.changed_projects.insert(project_path);
}
}
}
fn initialisation_handshake(connection: &lsp_server::Connection) -> InitializeParams {
let server_capabilities = lsp::ServerCapabilities {
text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
lsp::TextDocumentSyncOptions {
open_close: Some(true),
change: Some(lsp::TextDocumentSyncKind::FULL),
will_save: None,
will_save_wait_until: None,
save: Some(lsp::TextDocumentSyncSaveOptions::SaveOptions(
lsp::SaveOptions {
include_text: Some(false),
},
)),
},
)),
selection_range_provider: None,
hover_provider: Some(HoverProviderCapability::Simple(true)),
completion_provider: Some(lsp::CompletionOptions {
resolve_provider: None,
trigger_characters: Some(vec![".".into()]),
all_commit_characters: None,
work_done_progress_options: lsp::WorkDoneProgressOptions {
work_done_progress: None,
},
completion_item: None,
}),
signature_help_provider: Some(lsp::SignatureHelpOptions {
trigger_characters: Some(vec!["(".into(), ",".into(), ":".into()]),
retrigger_characters: None,
work_done_progress_options: lsp::WorkDoneProgressOptions {
work_done_progress: None,
},
}),
definition_provider: Some(lsp::OneOf::Left(true)),
type_definition_provider: Some(lsp::TypeDefinitionProviderCapability::Simple(true)),
implementation_provider: None,
references_provider: Some(lsp::OneOf::Left(true)),
document_highlight_provider: None,
document_symbol_provider: Some(lsp::OneOf::Left(true)),
workspace_symbol_provider: None,
code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
code_lens_provider: None,
document_formatting_provider: Some(lsp::OneOf::Left(true)),
document_range_formatting_provider: None,
document_on_type_formatting_provider: None,
rename_provider: Some(lsp::OneOf::Right(RenameOptions {
prepare_provider: Some(true),
work_done_progress_options: lsp::WorkDoneProgressOptions {
work_done_progress: None,
},
})),
document_link_provider: None,
color_provider: None,
folding_range_provider: None,
declaration_provider: None,
execute_command_provider: None,
workspace: None,
call_hierarchy_provider: None,
semantic_tokens_provider: None,
moniker_provider: None,
linked_editing_range_provider: None,
experimental: None,
position_encoding: None,
inline_value_provider: None,
inlay_hint_provider: None,
diagnostic_provider: None,
};
let server_capabilities_json =
serde_json::to_value(server_capabilities).expect("server_capabilities_serde");
let initialise_params_json = connection
.initialize(server_capabilities_json)
.expect("LSP initialize");
let initialise_params: InitializeParams =
serde_json::from_value(initialise_params_json).expect("LSP InitializeParams from json");
initialise_params
}
fn diagnostic_to_lsp(diagnostic: Diagnostic) -> Vec<lsp::Diagnostic> {
let severity = match diagnostic.level {
Level::Error => lsp::DiagnosticSeverity::ERROR,
Level::Warning => lsp::DiagnosticSeverity::WARNING,
};
let hint = diagnostic.hint;
let mut text = diagnostic.title;
if let Some(label) = diagnostic
.location
.as_ref()
.and_then(|location| location.label.text.as_deref())
{
text.push_str("\n\n");
text.push_str(label);
if !label.ends_with(['.', '?']) {
text.push('.');
}
}
if !diagnostic.text.is_empty() {
text.push_str("\n\n");
text.push_str(&diagnostic.text);
}
// TODO: Redesign the diagnostic type so that we can be sure there is always
// a location. Locationless diagnostics would be handled separately.
let location = diagnostic
.location
.expect("Diagnostic given to LSP without location");
let line_numbers = LineNumbers::new(&location.src);
let path = path_to_uri(location.path);
let range = src_span_to_lsp_range(location.label.span, &line_numbers);
let main = lsp::Diagnostic {
range,
severity: Some(severity),
code: None,
code_description: None,
source: None,
message: text,
related_information: related_information(
&hint,
&location.extra_labels,
&path,
&line_numbers,
range,
),
tags: None,
data: None,
};
match hint {
Some(hint) => {
let hint = lsp::Diagnostic {
severity: Some(lsp::DiagnosticSeverity::HINT),
message: hint,
// Some editors require this kind of "link" to group diagnostics.
// For example, in Zed "go to next diagnostic" would move you from
// the warning to the hint in the same location without this.
related_information: Some(vec![lsp::DiagnosticRelatedInformation {
location: lsp::Location { uri: path, range },
message: String::new(),
}]),
..main.clone()
};
vec![main, hint]
}
None => vec![main],
}
}
fn related_information(
hint: &Option<String>,
extra_labels: &[ExtraLabel],
path: &Url,
line_numbers: &LineNumbers,
range: Range,
) -> Option<Vec<lsp::DiagnosticRelatedInformation>> {
let mut related_info = Vec::with_capacity(extra_labels.len() + 1);
// The hint is included as a dedicated diagnostic _and_ the related information
// to maximize compatibility
if let Some(hint) = hint {
let hint = lsp::DiagnosticRelatedInformation {
message: hint.clone(),
location: lsp::Location {
uri: path.clone(),
range,
},
};
related_info.push(hint);
}
let additional_info = extra_labels.iter().map(|extra| {
let message = extra.label.text.clone().unwrap_or_default();
let location = match &extra.src_info {
Some((src, path)) => {
let line_numbers = LineNumbers::new(src);
lsp::Location {
uri: path_to_uri(path.clone()),
range: src_span_to_lsp_range(extra.label.span, &line_numbers),
}
}
_ => lsp::Location {
uri: path.clone(),
range: src_span_to_lsp_range(extra.label.span, line_numbers),
},
};
lsp::DiagnosticRelatedInformation { location, message }
});
related_info.extend(additional_info);
if related_info.is_empty() {
None
} else {
Some(related_info)
}
}
fn path_to_uri(path: Utf8PathBuf) -> Url {
let mut file: String = "file://".into();
file.push_str(&path.as_os_str().to_string_lossy());
Url::parse(&file).expect("path_to_uri URL parse")
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/completer.rs | language-server/src/completer.rs | use std::{collections::HashMap, sync::Arc};
use ecow::EcoString;
use itertools::Itertools;
use lsp_types::{
CompletionItem, CompletionItemKind, CompletionItemLabelDetails, CompletionTextEdit,
Documentation, MarkupContent, MarkupKind, Position, Range, TextDocumentPositionParams,
TextEdit,
};
use strum::IntoEnumIterator;
use vec1::Vec1;
use gleam_core::{
Result,
ast::{
self, Arg, CallArg, Function, FunctionLiteralKind, Pattern, Publicity, TypedExpr,
visit::Visit,
},
build::{Module, Origin},
line_numbers::LineNumbers,
type_::{
self, FieldMap, ModuleInterface, PRELUDE_MODULE_NAME, PreludeType, RecordAccessor, Type,
TypeConstructor, ValueConstructorVariant, collapse_links, error::VariableOrigin,
pretty::Printer,
},
};
use super::{
compiler::LspProjectCompiler,
edits::{
Newlines, add_newlines_after_import, get_import_edit,
position_of_first_definition_if_import,
},
files::FileSystemProxy,
};
// Represents the kind/specificity of completion that is being requested.
#[derive(Copy, Clone)]
enum CompletionKind {
// A label for a function or type definition
Label,
// A field of a record
FieldAccessor,
// Values or types defined in the current module
LocallyDefined,
// Values or types defined in an already imported module
ImportedModule,
// Types or values defined in the prelude
Prelude,
// Types defined in a module that has not been imported
ImportableModule,
}
#[derive(Copy, Clone)]
enum TypeMatch {
Matching,
Incompatible,
Unknown,
}
// Gives the sort text for a completion item based on the kind and label.
// This ensures that more specific kinds of completions are placed before
// less specific ones..
fn sort_text(kind: CompletionKind, label: &str, type_match: TypeMatch) -> String {
let priority: u8 = match kind {
CompletionKind::Label => 0,
CompletionKind::FieldAccessor => 1,
CompletionKind::LocallyDefined => 2,
CompletionKind::ImportedModule => 3,
CompletionKind::Prelude => 4,
CompletionKind::ImportableModule => 5,
};
match type_match {
// We want to prioritise type which match what is expected in the completion
// as those are more likely to be what the user wants.
TypeMatch::Matching => format!("0{priority}_{label}"),
TypeMatch::Incompatible | TypeMatch::Unknown => format!("{priority}_{label}"),
}
}
// The form in which a type completion is needed in context.
// Mainly used to determine if the "type" keyword should be appended to the completion
enum TypeCompletionForm {
// The type completion is for an unqualified import.
UnqualifiedImport,
// The type completion is for an unqualified import within braces.
UnqualifiedImportWithinBraces,
Default,
}
pub struct Completer<'a, IO> {
/// The direct buffer source code
src: &'a EcoString,
/// The line number information of the buffer source code
pub src_line_numbers: LineNumbers,
/// The current cursor position within the buffer source code
cursor_position: &'a Position,
/// A reference to the lsp compiler for getting module information
compiler: &'a LspProjectCompiler<FileSystemProxy<IO>>,
/// A reference to the current module the completion is for
module: &'a Module,
/// The line number information of the latest compiled module.
/// This is not necessarily the same as src_line_numbers if the module
/// is in a non-compiling state
pub module_line_numbers: LineNumbers,
/// The expected type of the value we are completing. `None` if we are
/// completing a type annotation or label, where this information is not
/// applicable.
pub expected_type: Option<Arc<Type>>,
}
impl<'a, IO> Completer<'a, IO> {
pub fn new(
src: &'a EcoString,
params: &'a TextDocumentPositionParams,
compiler: &'a LspProjectCompiler<FileSystemProxy<IO>>,
module: &'a Module,
) -> Self {
Completer {
src,
src_line_numbers: LineNumbers::new(src.as_str()),
cursor_position: ¶ms.position,
compiler,
module,
module_line_numbers: LineNumbers::new(&module.code),
expected_type: None,
}
}
// Gets the current range around the cursor to place a completion
// and the phrase surrounding the cursor to use for completion.
// This method takes in a helper to determine what qualifies as
// a phrase depending on context.
fn get_phrase_surrounding_for_completion(
&'a self,
valid_phrase_char: &impl Fn(char) -> bool,
) -> (Range, String) {
let cursor = self.src_line_numbers.byte_index(*self.cursor_position);
// Get part of phrase prior to cursor
let before = self
.src
.get(..cursor as usize)
.and_then(|line| line.rsplit_once(valid_phrase_char).map(|r| r.1))
.unwrap_or("");
// Get part of phrase following cursor
let after = self
.src
.get(cursor as usize..)
.and_then(|line| line.split_once(valid_phrase_char).map(|r| r.0))
.unwrap_or("");
(
Range {
start: Position {
line: self.cursor_position.line,
character: self.cursor_position.character - before.len() as u32,
},
end: Position {
line: self.cursor_position.line,
character: self.cursor_position.character,
},
},
format!("{before}{after}"),
)
}
// Gets the current range around the cursor to place a completion
// and any part of the phrase preceeding a dot if a module is being selected from.
// A continuous phrase in this case is a name or typename that may have a dot in it.
// This is used to match the exact location to fill in the completion.
fn get_phrase_surrounding_completion(&'a self) -> (Range, Option<String>) {
let valid_phrase_char = |c: char| {
// Checks if a character is not a valid name/upname character or a dot.
!c.is_ascii_alphanumeric() && c != '.' && c != '_'
};
let (range, word) = self.get_phrase_surrounding_for_completion(&valid_phrase_char);
(range, word.split_once('.').map(|c| String::from(c.0)))
}
// Gets the current range around the cursor to place a completion.
// For unqualified imports we special case the word being completed to allow for whitespace but not dots.
// This is to allow `type MyType` to be treated as 1 "phrase" for the sake of completion.
fn get_phrase_surrounding_completion_for_import(&'a self) -> Range {
let valid_phrase_char = |c: char| {
// Checks if a character is not a valid name/upname character or whitespace.
// The newline character is not included as well.
!c.is_ascii_alphanumeric() && c != '_' && c != ' ' && c != '\t'
};
let (range, _) = self.get_phrase_surrounding_for_completion(&valid_phrase_char);
range
}
/// Checks if the line being editted is an import line and provides completions if it is.
/// If the line includes a dot then it provides unqualified import completions.
/// Otherwise it provides direct module import completions.
pub fn import_completions(&'a self) -> Option<Result<Option<Vec<CompletionItem>>>> {
let start_of_line = self.src_line_numbers.byte_index(Position {
line: self.cursor_position.line,
character: 0,
});
let end_of_line = self.src_line_numbers.byte_index(Position {
line: self.cursor_position.line + 1,
character: 0,
});
// Drop all lines except the line the cursor is on
let src = self.src.get(start_of_line as usize..end_of_line as usize)?;
// If this isn't an import line then we don't offer import completions
if !src.trim_start().starts_with("import") {
return None;
}
// Check if we are completing an unqualified import
if let Some(dot_index) = src.find('.') {
// Find the module that is being imported from
let importing_module_name = src.get(6..dot_index)?.trim();
let importing_module: &ModuleInterface =
self.compiler.get_module_interface(importing_module_name)?;
let within_braces = match src.get(dot_index + 1..) {
Some(x) => x.trim_start().starts_with('{'),
None => false,
};
Some(Ok(Some(self.unqualified_completions_from_module(
importing_module,
within_braces,
))))
} else {
// Find where to start and end the import completion
let start = self.src_line_numbers.line_and_column_number(start_of_line);
let end = self
.src_line_numbers
.line_and_column_number(end_of_line - 1);
let start = Position::new(start.line - 1, start.column + 6);
let end = Position::new(end.line - 1, end.column - 1);
let completions = self.complete_modules_for_import(start, end);
Some(Ok(Some(completions)))
}
}
/// Gets the completes for unqualified imports from a module.
pub fn unqualified_completions_from_module(
&'a self,
module_being_imported_from: &'a ModuleInterface,
within_braces: bool,
) -> Vec<CompletionItem> {
let insert_range = self.get_phrase_surrounding_completion_for_import();
let mut completions = vec![];
// Find values and type that have already previously been imported
let mut already_imported_types = std::collections::HashSet::new();
let mut already_imported_values = std::collections::HashSet::new();
// Search the ast for import statements
for import in &self.module.ast.definitions.imports {
// Find the import that matches the module being imported from
if import.module == module_being_imported_from.name {
// Add the values and types that have already been imported
for unqualified in &import.unqualified_types {
let _ = already_imported_types.insert(&unqualified.name);
}
for unqualified in &import.unqualified_values {
let _ = already_imported_values.insert(&unqualified.name);
}
}
}
// Get completable types
for (name, type_) in &module_being_imported_from.types {
// Skip types that should not be suggested
if !self.is_suggestable_import(
&type_.publicity,
module_being_imported_from.package.as_str(),
) {
continue;
}
// Skip type that are already imported
if already_imported_types.contains(name) {
continue;
}
completions.push(type_completion(
None,
name,
type_,
insert_range,
if within_braces {
TypeCompletionForm::UnqualifiedImportWithinBraces
} else {
TypeCompletionForm::UnqualifiedImport
},
CompletionKind::ImportedModule,
));
}
// Get completable values
for (name, value) in &module_being_imported_from.values {
// Skip values that should not be suggested
if !self.is_suggestable_import(
&value.publicity,
module_being_imported_from.package.as_str(),
) {
continue;
}
// Skip values that are already imported
if already_imported_values.contains(name) {
continue;
}
completions.push(self.value_completion(
None,
&module_being_imported_from.name,
name,
value,
insert_range,
CompletionKind::ImportedModule,
));
}
completions
}
// Get all the modules that can be imported that have not already been imported.
fn completable_modules_for_import(&self) -> Vec<(&EcoString, &ModuleInterface)> {
let mut direct_dep_packages: std::collections::HashSet<&EcoString> =
std::collections::HashSet::from_iter(
self.compiler.project_compiler.config.dependencies.keys(),
);
if !self.module.origin.is_src() {
// In tests we can import direct dev dependencies
direct_dep_packages.extend(
self.compiler
.project_compiler
.config
.dev_dependencies
.keys(),
)
}
let already_imported: std::collections::HashSet<EcoString> =
std::collections::HashSet::from_iter(
self.module.dependencies.iter().map(|d| d.0.clone()),
);
self.compiler
.project_compiler
.get_importable_modules()
.iter()
//
// You cannot import yourself
.filter(|(name, _)| *name != &self.module.name)
//
// Different origin directories will get different import completions
.filter(|(_, module)| match self.module.origin {
// src/ can import from src/
Origin::Src => module.origin.is_src(),
// dev/ can import from src/ or dev/
Origin::Dev => !module.origin.is_test(),
// Test can import from anywhere
Origin::Test => true,
})
//
// It is possible to import internal modules from other packages,
// but it's not recommended so we don't include them in completions
.filter(|(_, module)| module.package == self.root_package_name() || !module.is_internal)
//
// You cannot import a module twice
.filter(|(name, _)| !already_imported.contains(*name))
//
// It is possible to import modules from dependencies of dependencies
// but it's not recommended so we don't include them in completions
.filter(|(_, module)| {
let is_root_or_prelude =
module.package == self.root_package_name() || module.package.is_empty();
is_root_or_prelude || direct_dep_packages.contains(&module.package)
})
.collect()
}
// Get all the completions for modules that can be imported
fn complete_modules_for_import(
&'a self,
start: Position,
end: Position,
) -> Vec<CompletionItem> {
self.completable_modules_for_import()
.iter()
.map(|(name, _)| CompletionItem {
label: name.to_string(),
kind: Some(CompletionItemKind::MODULE),
text_edit: Some(CompletionTextEdit::Edit(TextEdit {
range: Range { start, end },
new_text: name.to_string(),
})),
..Default::default()
})
.collect()
}
// NOTE: completion_types and completion_values are really similar
// but just different enough that an abstraction would
// be really hard to understand or use a lot of trait magic.
// For now I've left it as is but might be worth revisiting.
/// Provides completions for when the context being editted is a type.
pub fn completion_types(&'a self) -> Vec<CompletionItem> {
let surrounding_completion = self.get_phrase_surrounding_completion();
let mut completions = vec![];
let (insert_range, module_select) = surrounding_completion;
// Module and prelude types
// Do not complete direct module types if the user has already started typing a module select.
// e.x. when the user has typed mymodule.| we know local module types and prelude types are no
// longer relevant.
if module_select.is_none() {
for (name, type_) in &self.module.ast.type_info.types {
completions.push(type_completion(
None,
name,
type_,
insert_range,
TypeCompletionForm::Default,
CompletionKind::LocallyDefined,
));
}
for type_ in PreludeType::iter() {
let label: String = type_.name().into();
let sort_text = Some(sort_text(
CompletionKind::Prelude,
&label,
TypeMatch::Unknown,
));
completions.push(CompletionItem {
label,
detail: Some("Type".into()),
kind: Some(CompletionItemKind::CLASS),
sort_text,
..Default::default()
});
}
}
// Imported modules
for import in &self.module.ast.definitions.imports {
// The module may not be known of yet if it has not previously
// compiled yet in this editor session.
let Some(module) = self.compiler.get_module_interface(&import.module) else {
continue;
};
// Qualified types
for (name, type_) in &module.types {
if !self.is_suggestable_import(&type_.publicity, module.package.as_str()) {
continue;
}
if let Some(module) = import.used_name() {
// If the user has already started a module select then don't show irrelevant modules.
// e.x. when the user has typed mymodule.| we should only show items from mymodule.
if let Some(input_mod_name) = &module_select
&& &module != input_mod_name
{
continue;
}
completions.push(type_completion(
Some(&module),
name,
type_,
insert_range,
TypeCompletionForm::Default,
CompletionKind::ImportedModule,
));
}
}
// Unqualified types
// Do not complete unqualified types if the user has already started typing a module select.
// e.x. when the user has typed mymodule.| we know unqualified module types are no longer relevant.
if module_select.is_none() {
for unqualified in &import.unqualified_types {
if let Some(type_) = module.get_public_type(&unqualified.name) {
completions.push(type_completion(
None,
unqualified.used_name(),
type_,
insert_range,
TypeCompletionForm::Default,
CompletionKind::ImportedModule,
))
}
}
}
}
// Importable modules
let first_import_pos =
position_of_first_definition_if_import(self.module, &self.src_line_numbers);
let first_is_import = first_import_pos.is_some();
let import_location = first_import_pos.unwrap_or_default();
let after_import_newlines = add_newlines_after_import(
import_location,
first_is_import,
&self.src_line_numbers,
self.src,
);
for (module_full_name, module) in self.completable_modules_for_import() {
// Do not try to import the prelude.
if module_full_name == "gleam" {
continue;
}
let qualifier = module_full_name
.split('/')
.next_back()
.unwrap_or(module_full_name);
// If the user has already started a module select then don't show irrelevant modules.
// e.x. when the user has typed mymodule.| we should only show items from mymodule.
if let Some(input_mod_name) = &module_select
&& qualifier != input_mod_name
{
continue;
}
// Qualified types
for (name, type_) in &module.types {
if !self.is_suggestable_import(&type_.publicity, module.package.as_str()) {
continue;
}
let mut completion = type_completion(
Some(qualifier),
name,
type_,
insert_range,
TypeCompletionForm::Default,
CompletionKind::ImportableModule,
);
add_import_to_completion(
&mut completion,
import_location,
module_full_name,
&after_import_newlines,
);
completions.push(completion);
}
}
completions
}
/// Provides completions for when the context being editted is a value.
pub fn completion_values(&'a self) -> Vec<CompletionItem> {
let surrounding_completion = self.get_phrase_surrounding_completion();
let mut completions = vec![];
let mod_name = self.module.name.as_str();
let (insert_range, module_select) = surrounding_completion;
// Module and prelude values
// Do not complete direct module values if the user has already started typing a module select.
// e.x. when the user has typed mymodule.| we know local module and prelude values are no longer
// relevant.
if module_select.is_none() {
let cursor = self.src_line_numbers.byte_index(*self.cursor_position);
// Find the function that the cursor is in and push completions for
// its arguments and local variables.
if let Some(function) = self
.module
.ast
.definitions
.functions
.iter()
.filter(|function| function.full_location().contains(cursor))
.peekable()
.peek()
{
completions.extend(
LocalCompletion::new(
mod_name,
insert_range,
cursor,
self.expected_type.clone(),
)
.fn_completions(function),
);
}
for (name, value) in &self.module.ast.type_info.values {
// Here we do not check for the internal attribute: we always want
// to show autocompletions for values defined in the same module,
// even if those are internal.
completions.push(self.value_completion(
None,
mod_name,
name,
value,
insert_range,
CompletionKind::LocallyDefined,
));
}
let mut push_prelude_completion = |label: &str, kind, type_: Arc<Type>| {
let label = label.to_string();
let sort_text = Some(sort_text(
CompletionKind::Prelude,
&label,
match_type(&self.expected_type, &type_),
));
completions.push(CompletionItem {
label,
detail: Some(PRELUDE_MODULE_NAME.into()),
kind: Some(kind),
sort_text,
..Default::default()
});
};
for type_ in PreludeType::iter() {
match type_ {
PreludeType::Bool => {
push_prelude_completion(
"True",
CompletionItemKind::ENUM_MEMBER,
type_::bool(),
);
push_prelude_completion(
"False",
CompletionItemKind::ENUM_MEMBER,
type_::bool(),
);
}
PreludeType::Nil => {
push_prelude_completion(
"Nil",
CompletionItemKind::ENUM_MEMBER,
type_::nil(),
);
}
PreludeType::Result => {
push_prelude_completion(
"Ok",
CompletionItemKind::CONSTRUCTOR,
type_::result(type_::unbound_var(0), type_::unbound_var(0)),
);
push_prelude_completion(
"Error",
CompletionItemKind::CONSTRUCTOR,
type_::result(type_::unbound_var(0), type_::unbound_var(0)),
);
}
PreludeType::BitArray
| PreludeType::Float
| PreludeType::Int
| PreludeType::List
| PreludeType::String
| PreludeType::UtfCodepoint => {}
}
}
}
// Imported modules
for import in &self.module.ast.definitions.imports {
// The module may not be known of yet if it has not previously
// compiled yet in this editor session.
let Some(module) = self.compiler.get_module_interface(&import.module) else {
continue;
};
// Qualified values
for (name, value) in &module.values {
if !self.is_suggestable_import(&value.publicity, module.package.as_str()) {
continue;
}
if let Some(module) = import.used_name() {
// If the user has already started a module select then don't show irrelevant modules.
// e.x. when the user has typed mymodule.| we should only show items from mymodule.
if let Some(input_mod_name) = &module_select
&& &module != input_mod_name
{
continue;
}
completions.push(self.value_completion(
Some(&module),
mod_name,
name,
value,
insert_range,
CompletionKind::ImportedModule,
));
}
}
// Unqualified values
// Do not complete unqualified values if the user has already started typing a module select.
// e.x. when the user has typed mymodule.| we know unqualified module values are no longer relevant.
if module_select.is_none() {
for unqualified in &import.unqualified_values {
if let Some(value) = module.get_public_value(&unqualified.name) {
let name = unqualified.used_name();
completions.push(self.value_completion(
None,
mod_name,
name,
value,
insert_range,
CompletionKind::ImportedModule,
))
}
}
}
}
// Importable modules
let first_import_pos =
position_of_first_definition_if_import(self.module, &self.src_line_numbers);
let first_is_import = first_import_pos.is_some();
let import_location = first_import_pos.unwrap_or_default();
let after_import_newlines = add_newlines_after_import(
import_location,
first_is_import,
&self.src_line_numbers,
self.src,
);
for (module_full_name, module) in self.completable_modules_for_import() {
// Do not try to import the prelude.
if module_full_name == "gleam" {
continue;
}
let qualifier = module_full_name
.split('/')
.next_back()
.unwrap_or(module_full_name);
// If the user has already started a module select then don't show irrelevant modules.
// e.x. when the user has typed mymodule.| we should only show items from mymodule.
if let Some(input_mod_name) = &module_select
&& qualifier != input_mod_name
{
continue;
}
// Qualified values
for (name, value) in &module.values {
if !self.is_suggestable_import(&value.publicity, module.package.as_str()) {
continue;
}
let mut completion = self.value_completion(
Some(qualifier),
module_full_name,
name,
value,
insert_range,
CompletionKind::ImportableModule,
);
add_import_to_completion(
&mut completion,
import_location,
module_full_name,
&after_import_newlines,
);
completions.push(completion);
}
}
completions
}
// Looks up the type accessors for the given type
fn type_accessors_from_modules(
&'a self,
importable_modules: &'a im::HashMap<EcoString, ModuleInterface>,
type_: Arc<Type>,
) -> Option<&'a HashMap<EcoString, RecordAccessor>> {
let type_ = collapse_links(type_);
match type_.as_ref() {
Type::Named {
name,
module,
inferred_variant,
..
} => importable_modules
.get(module)
.and_then(|i| i.accessors.get(name))
.filter(|a| a.publicity.is_importable() || module == &self.module.name)
.map(|a| a.accessors_for_variant(*inferred_variant)),
Type::Fn { .. } | Type::Var { .. } | Type::Tuple { .. } => None,
}
}
/// Provides completions for field accessors when the context being editted
/// is a custom type instance
pub fn completion_field_accessors(&'a self, type_: Arc<Type>) -> Vec<CompletionItem> {
self.type_accessors_from_modules(
self.compiler.project_compiler.get_importable_modules(),
type_,
)
.map(|accessors| {
accessors
.values()
.map(|accessor| self.field_completion(&accessor.label, accessor.type_.clone()))
.collect_vec()
})
.unwrap_or_default()
}
fn callable_field_map(
&'a self,
expr: &'a TypedExpr,
importable_modules: &'a im::HashMap<EcoString, ModuleInterface>,
) -> Option<&'a FieldMap> {
match expr {
TypedExpr::Var { constructor, .. } => constructor.field_map(),
TypedExpr::ModuleSelect {
module_name, label, ..
} => importable_modules
.get(module_name)
.and_then(|i| i.values.get(label))
.and_then(|a| a.field_map()),
TypedExpr::Int { .. }
| TypedExpr::Float { .. }
| TypedExpr::String { .. }
| TypedExpr::Block { .. }
| TypedExpr::Pipeline { .. }
| TypedExpr::Fn { .. }
| TypedExpr::List { .. }
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | true |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/signature_help.rs | language-server/src/signature_help.rs | use std::{
collections::{HashMap, HashSet},
sync::Arc,
};
use ecow::EcoString;
use lsp_types::{
Documentation, MarkupContent, MarkupKind, ParameterInformation, ParameterLabel, SignatureHelp,
SignatureInformation,
};
use gleam_core::{
ast::{CallArg, ImplicitCallArgOrigin, TypedExpr},
type_::{FieldMap, ModuleValueConstructor, Type, pretty::Printer},
};
pub fn for_expression(expr: &TypedExpr) -> Option<SignatureHelp> {
// If we're inside a function call we can provide signature help,
// otherwise we don't want anything to pop up.
let TypedExpr::Call { fun, arguments, .. } = expr else {
return None;
};
match fun.as_ref() {
// If the thing being called is a local variable then we want to
// use it's name as the function name to be used in the signature
// help.
TypedExpr::Var {
constructor, name, ..
} => signature_help(name.clone(), fun, arguments, constructor.field_map()),
// If we're making a qualified call to another module's function
// then we want to show its type, documentation and the exact name
// being used (that is "<module_name>.<function_name>").
//
// eg. list.map(|)
// ^ When the cursor is here we are going to show
// "list.map(List(a), with: fn(a) -> b) -> List(b)"
// as the help signature.
//
TypedExpr::ModuleSelect {
module_alias,
label,
constructor,
..
} => {
let field_map = match constructor {
ModuleValueConstructor::Constant { .. } => None,
ModuleValueConstructor::Record { field_map, .. }
| ModuleValueConstructor::Fn { field_map, .. } => field_map.into(),
};
let name = format!("{module_alias}.{label}").into();
signature_help(name, fun, arguments, field_map)
}
// If the function being called is an invalid node we don't want to
// provide any hint, otherwise one might be under the impression that
// that function actually exists somewhere.
//
TypedExpr::Invalid { .. } => None,
// In all other cases we can't figure out a good name to show in the
// signature help so we use an anonymous `fn` as the name to be
// shown.
//
// eg. fn(a){a}(|)
// ^ When the cursor is here we are going to show
// "fn(a: a) -> a" as the help signature.
//
TypedExpr::Int { .. }
| TypedExpr::Float { .. }
| TypedExpr::String { .. }
| TypedExpr::Block { .. }
| TypedExpr::Pipeline { .. }
| TypedExpr::Fn { .. }
| TypedExpr::List { .. }
| TypedExpr::Call { .. }
| TypedExpr::BinOp { .. }
| TypedExpr::Case { .. }
| TypedExpr::RecordAccess { .. }
| TypedExpr::PositionalAccess { .. }
| TypedExpr::Tuple { .. }
| TypedExpr::TupleIndex { .. }
| TypedExpr::Todo { .. }
| TypedExpr::Panic { .. }
| TypedExpr::Echo { .. }
| TypedExpr::BitArray { .. }
| TypedExpr::RecordUpdate { .. }
| TypedExpr::NegateBool { .. }
| TypedExpr::NegateInt { .. } => signature_help("fn".into(), fun, arguments, None),
}
}
/// Show the signature help of a function with the given name.
/// Besides the function's typed expression `fun`, this function needs a bit of
/// additional data to properly display a useful help signature:
///
/// - `fun_name` is used as the display name of the function in the help
/// signature.
/// - `supplied_arguments` are arguments being passed to the function call, those
/// might not be of the correct arity or have wrong types but are used to
/// deduce which argument should be highlighted next in the help signature.
/// - `field_map` is the function's field map (if any) that will be used to
/// display labels and understand which labelled argument should be
/// highlighted next in the help signature.
///
fn signature_help(
fun_name: EcoString,
fun: &TypedExpr,
supplied_arguments: &[CallArg<TypedExpr>],
field_map: Option<&FieldMap>,
) -> Option<SignatureHelp> {
let (arguments, return_) = fun.type_().fn_types()?;
// If the function has no arguments, we don't want to show any help.
let arity = arguments.len() as u32;
if arity == 0 {
return None;
}
let index_to_label = match field_map {
Some(field_map) => field_map
.fields
.iter()
.map(|(name, index)| (*index, name))
.collect(),
None => HashMap::new(),
};
let printer = Printer::new();
let (label, parameters) =
print_signature_help(printer, fun_name, arguments, return_, &index_to_label);
let active_parameter = active_parameter_index(arity, supplied_arguments, index_to_label)
// If we don't want to highlight any arg in the suggestion we have to
// explicitly provide an out of bound index.
.or(Some(arity));
Some(SignatureHelp {
signatures: vec![SignatureInformation {
label,
documentation: fun.get_documentation().map(|d| {
Documentation::MarkupContent(MarkupContent {
kind: MarkupKind::Markdown,
value: d.into(),
})
}),
parameters: Some(parameters),
active_parameter: None,
}],
active_signature: Some(0),
active_parameter,
})
}
fn active_parameter_index(
arity: u32,
supplied_arguments: &[CallArg<TypedExpr>],
mut index_to_label: HashMap<u32, &EcoString>,
) -> Option<u32> {
let mut is_use_call = false;
let mut found_labelled_argument = false;
let mut used_labels = HashSet::new();
let mut supplied_unlabelled_arguments = 0;
let unlabelled_arguments = arity - index_to_label.len() as u32;
for (i, arg) in supplied_arguments.iter().enumerate() {
// If there's an unlabelled argument after a labelled one, we can't
// figure out what to suggest since arguments were passed in a wrong
// order.
if found_labelled_argument && arg.label.is_none() && !arg.is_implicit() {
return None;
}
// Once we reach to an implicit use argument (be it the callback or the
// missing implicitly inserted ones) we can break since those must be
// the last arguments of the function and are not explicitly supplied by
// the programmer.
if let Some(ImplicitCallArgOrigin::Use | ImplicitCallArgOrigin::IncorrectArityUse) =
arg.implicit
{
is_use_call = true;
break;
}
match &arg.label {
Some(label) => {
found_labelled_argument = true;
let _ = used_labels.insert(label);
}
// If the argument is unlabelled we just remove the label
// corresponding to it from the field map since it has already been
// passed as an unlabelled argument.
None => {
supplied_unlabelled_arguments += 1;
let _ = index_to_label.remove(&(i as u32));
}
}
}
let active_index = if supplied_unlabelled_arguments < unlabelled_arguments {
if found_labelled_argument {
// If I have supplied some labelled args but I haven't supplied all
// unlabelled args before a labelled one then we can't safely
// suggest anything as the next argument.
None
} else {
// If I haven't supplied enough unlabelled arguments then I have to
// set the next one as active (be it labelled or not).
Some(supplied_unlabelled_arguments)
}
} else {
// If I have supplied all the unlabelled arguments (and we could have
// also supplied some labelled ones as unlabelled!) then we pick the
// leftmost labelled argument that hasn't been supplied yet.
index_to_label
.into_iter()
.filter(|(_index, label)| !used_labels.contains(label))
.map(|(index, _label)| index)
.min()
.or(Some(supplied_arguments.len() as u32))
};
// If we're showing hints for a use call and we end up deciding that the
// only index we can suggest is the one of the use callback then we do not
// highlight it or it would lead people into believing they can manually
// pass that argument in.
if is_use_call && active_index == Some(arity - 1) {
None
} else {
active_index
}
}
/// To produce a signature that can be used by the LS, we need to also keep
/// track of the arguments' positions in the printed signature. So this function
/// prints the signature help producing at the same time a list of correct
/// `ParameterInformation` for all its arguments.
///
fn print_signature_help(
mut printer: Printer,
function_name: EcoString,
arguments: Vec<Arc<Type>>,
return_: Arc<Type>,
index_to_label: &HashMap<u32, &EcoString>,
) -> (String, Vec<ParameterInformation>) {
let arguments_count = arguments.len();
let mut signature = format!("{function_name}(");
let mut parameter_informations = Vec::with_capacity(arguments_count);
for (i, argument) in arguments.iter().enumerate() {
let arg_start = signature.len();
if let Some(label) = index_to_label.get(&(i as u32)) {
signature.push_str(label);
signature.push_str(": ");
}
signature.push_str(&printer.pretty_print(argument, 0));
let arg_end = signature.len();
let label = ParameterLabel::LabelOffsets([arg_start as u32, arg_end as u32]);
parameter_informations.push(ParameterInformation {
label,
documentation: None,
});
let is_last = i == arguments_count - 1;
if !is_last {
signature.push_str(", ");
}
}
signature.push_str(") -> ");
signature.push_str(&printer.pretty_print(&return_, 0));
(signature, parameter_informations)
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/code_action.rs | language-server/src/code_action.rs | use std::{collections::HashSet, iter, sync::Arc};
use ecow::{EcoString, eco_format};
use gleam_core::{
Error, STDLIB_PACKAGE_NAME,
analyse::Inferred,
ast::{
self, ArgNames, AssignName, AssignmentKind, BitArraySegmentTruncation, BoundVariable,
BoundVariableName, CallArg, CustomType, FunctionLiteralKind, ImplicitCallArgOrigin, Import,
InvalidExpression, PIPE_PRECEDENCE, Pattern, PatternUnusedArguments,
PipelineAssignmentKind, Publicity, RecordConstructor, SrcSpan, TodoKind, TypedArg,
TypedAssignment, TypedClauseGuard, TypedDefinitions, TypedExpr, TypedModuleConstant,
TypedPattern, TypedPipelineAssignment, TypedRecordConstructor, TypedStatement,
TypedTailPattern, TypedUse, visit::Visit as _,
},
build::{Located, Module},
config::PackageConfig,
exhaustiveness::CompiledCase,
line_numbers::LineNumbers,
parse::{extra::ModuleExtra, lexer::str_to_keyword},
strings::to_snake_case,
type_::{
self, FieldMap, ModuleValueConstructor, Type, TypeVar, TypedCallArg, ValueConstructor,
error::{ModuleSuggestion, VariableDeclaration, VariableOrigin},
printer::Printer,
},
};
use im::HashMap;
use itertools::Itertools;
use lsp_types::{CodeAction, CodeActionKind, CodeActionParams, Position, Range, TextEdit, Url};
use vec1::{Vec1, vec1};
use super::{
TextEdits,
compiler::LspProjectCompiler,
edits,
edits::{add_newlines_after_import, get_import_edit, position_of_first_definition_if_import},
engine::{overlaps, within},
files::FileSystemProxy,
reference::{FindVariableReferences, VariableReferenceKind},
src_span_to_lsp_range, url_from_path,
};
#[derive(Debug)]
pub struct CodeActionBuilder {
action: CodeAction,
}
impl CodeActionBuilder {
pub fn new(title: &str) -> Self {
Self {
action: CodeAction {
title: title.to_string(),
kind: None,
diagnostics: None,
edit: None,
command: None,
is_preferred: None,
disabled: None,
data: None,
},
}
}
pub fn kind(mut self, kind: CodeActionKind) -> Self {
self.action.kind = Some(kind);
self
}
pub fn changes(mut self, uri: Url, edits: Vec<TextEdit>) -> Self {
let mut edit = self.action.edit.take().unwrap_or_default();
let mut changes = edit.changes.take().unwrap_or_default();
_ = changes.insert(uri, edits);
edit.changes = Some(changes);
self.action.edit = Some(edit);
self
}
pub fn preferred(mut self, is_preferred: bool) -> Self {
self.action.is_preferred = Some(is_preferred);
self
}
pub fn push_to(self, actions: &mut Vec<CodeAction>) {
actions.push(self.action);
}
}
/// A small helper function to get the indentation at a given position.
fn count_indentation(code: &str, line_numbers: &LineNumbers, line: u32) -> usize {
let mut indent_size = 0;
let line_start = *line_numbers
.line_starts
.get(line as usize)
.expect("Line number should be valid");
let mut chars = code[line_start as usize..].chars();
while chars.next() == Some(' ') {
indent_size += 1;
}
indent_size
}
/// Code action to remove literal tuples in case subjects, essentially making
/// the elements of the tuples into the case's subjects.
///
/// The code action is only available for the i'th subject if:
/// - it is a non-empty tuple, and
/// - the i'th pattern (including alternative patterns) is a literal tuple for all clauses.
///
/// # Basic example:
///
/// The following case expression:
///
/// ```gleam
/// case #(1, 2) {
/// #(a, b) -> 0
/// }
/// ```
///
/// Becomes:
///
/// ```gleam
/// case 1, 2 {
/// a, b -> 0
/// }
/// ```
///
/// # Another example:
///
/// The following case expression does not produce any code action
///
/// ```gleam
/// case #(1, 2) {
/// a -> 0 // <- the pattern is not a tuple
/// }
/// ```
pub struct RedundantTupleInCaseSubject<'a> {
edits: TextEdits<'a>,
code: &'a EcoString,
extra: &'a ModuleExtra,
params: &'a CodeActionParams,
module: &'a ast::TypedModule,
hovered: bool,
}
impl<'ast> ast::visit::Visit<'ast> for RedundantTupleInCaseSubject<'_> {
fn visit_typed_expr_case(
&mut self,
location: &'ast SrcSpan,
type_: &'ast Arc<Type>,
subjects: &'ast [TypedExpr],
clauses: &'ast [ast::TypedClause],
compiled_case: &'ast CompiledCase,
) {
for (subject_idx, subject) in subjects.iter().enumerate() {
let TypedExpr::Tuple {
location, elements, ..
} = subject
else {
continue;
};
// Ignore empty tuple
if elements.is_empty() {
continue;
}
// We cannot rewrite clauses whose i-th pattern is not a discard or
// tuples.
let all_ith_patterns_are_tuples_or_discards = clauses
.iter()
.map(|clause| clause.pattern.get(subject_idx))
.all(|pattern| {
matches!(
pattern,
Some(Pattern::Tuple { .. } | Pattern::Discard { .. })
)
});
if !all_ith_patterns_are_tuples_or_discards {
continue;
}
self.delete_tuple_tokens(*location, elements.last().map(|element| element.location()));
for clause in clauses {
match clause.pattern.get(subject_idx) {
Some(Pattern::Tuple { location, elements }) => self.delete_tuple_tokens(
*location,
elements.last().map(|element| element.location()),
),
Some(Pattern::Discard { location, .. }) => {
self.discard_tuple_items(*location, elements.len())
}
_ => panic!("safe: we've just checked all patterns must be discards/tuples"),
}
}
let range = self.edits.src_span_to_lsp_range(*location);
self.hovered = self.hovered || overlaps(self.params.range, range);
}
ast::visit::visit_typed_expr_case(self, location, type_, subjects, clauses, compiled_case)
}
}
impl<'a> RedundantTupleInCaseSubject<'a> {
pub fn new(
module: &'a Module,
line_numbers: &'a LineNumbers,
params: &'a CodeActionParams,
) -> Self {
Self {
edits: TextEdits::new(line_numbers),
code: &module.code,
extra: &module.extra,
params,
module: &module.ast,
hovered: false,
}
}
pub fn code_actions(mut self) -> Vec<CodeAction> {
self.visit_typed_module(self.module);
if !self.hovered {
return vec![];
}
self.edits.edits.sort_by_key(|edit| edit.range.start);
let mut actions = vec![];
CodeActionBuilder::new("Remove redundant tuples")
.kind(CodeActionKind::REFACTOR_REWRITE)
.changes(self.params.text_document.uri.clone(), self.edits.edits)
.preferred(true)
.push_to(&mut actions);
actions
}
fn delete_tuple_tokens(&mut self, location: SrcSpan, last_elem_location: Option<SrcSpan>) {
let tuple_code = self
.code
.get(location.start as usize..location.end as usize)
.expect("valid span");
// Delete `#`
self.edits
.delete(SrcSpan::new(location.start, location.start + 1));
// Delete `(`
let (lparen_offset, _) = tuple_code
.match_indices('(')
// Ignore in comments
.find(|(i, _)| !self.extra.is_within_comment(location.start + *i as u32))
.expect("`(` not found in tuple");
self.edits.delete(SrcSpan::new(
location.start + lparen_offset as u32,
location.start + lparen_offset as u32 + 1,
));
// Delete trailing `,` (if applicable)
if let Some(last_elem_location) = last_elem_location {
// Get the code after the last element until the tuple's `)`
let code_after_last_elem = self
.code
.get(last_elem_location.end as usize..location.end as usize)
.expect("valid span");
if let Some((trailing_comma_offset, _)) = code_after_last_elem
.rmatch_indices(',')
// Ignore in comments
.find(|(i, _)| {
!self
.extra
.is_within_comment(last_elem_location.end + *i as u32)
})
{
self.edits.delete(SrcSpan::new(
last_elem_location.end + trailing_comma_offset as u32,
last_elem_location.end + trailing_comma_offset as u32 + 1,
));
}
}
// Delete )
self.edits
.delete(SrcSpan::new(location.end - 1, location.end));
}
fn discard_tuple_items(&mut self, discard_location: SrcSpan, tuple_items: usize) {
// Replace the old discard with multiple discard, one for each of the
// tuple items.
self.edits.replace(
discard_location,
itertools::intersperse(iter::repeat_n("_", tuple_items), ", ").collect(),
)
}
}
/// Builder for code action to convert `let assert` into a case expression.
///
pub struct LetAssertToCase<'a> {
module: &'a Module,
params: &'a CodeActionParams,
actions: Vec<CodeAction>,
edits: TextEdits<'a>,
}
impl<'ast> ast::visit::Visit<'ast> for LetAssertToCase<'_> {
fn visit_typed_assignment(&mut self, assignment: &'ast TypedAssignment) {
let assignment_range = self.edits.src_span_to_lsp_range(assignment.location);
let assignment_start_range = self.edits.src_span_to_lsp_range(SrcSpan {
start: assignment.location.start,
end: assignment.value.location().start,
});
self.visit_typed_expr(&assignment.value);
// Only offer the code action if the cursor is over the statement and
// to prevent weird behaviour when `let assert` statements are nested,
// we only check for the code action between the `let` and `=`.
if !(within(self.params.range, assignment_range)
&& overlaps(self.params.range, assignment_start_range))
{
return;
}
// This pattern only applies to `let assert`
let AssignmentKind::Assert { message, .. } = &assignment.kind else {
return;
};
// Get the source code for the tested expression
let location = assignment.value.location();
let expr = self
.module
.code
.get(location.start as usize..location.end as usize)
.expect("Location must be valid");
// Get the source code for the pattern
let pattern_location = assignment.pattern.location();
let pattern = self
.module
.code
.get(pattern_location.start as usize..pattern_location.end as usize)
.expect("Location must be valid");
let message = message.as_ref().map(|message| {
let location = message.location();
self.module
.code
.get(location.start as usize..location.end as usize)
.expect("Location must be valid")
});
let range = self.edits.src_span_to_lsp_range(assignment.location);
// Figure out which variables are assigned in the pattern
let variables = PatternVariableFinder::find_variables_in_pattern(&assignment.pattern);
let assigned = match variables.len() {
0 => "_",
1 => variables.first().expect("Variables is length one"),
_ => &format!("#({})", variables.join(", ")),
};
let mut new_text = format!("let {assigned} = ");
let panic_message = if let Some(message) = message {
&format!("panic as {message}")
} else {
"panic"
};
let clauses = vec![
// The existing pattern
CaseClause {
pattern,
// `_` is not a valid expression, so if we are not
// binding any variables in the pattern, we simply return Nil.
expression: if assigned == "_" { "Nil" } else { assigned },
},
CaseClause {
pattern: "_",
expression: panic_message,
},
];
print_case_expression(range.start.character as usize, expr, clauses, &mut new_text);
let uri = &self.params.text_document.uri;
CodeActionBuilder::new("Convert to case")
.kind(CodeActionKind::REFACTOR_REWRITE)
.changes(uri.clone(), vec![TextEdit { range, new_text }])
.preferred(false)
.push_to(&mut self.actions);
}
}
impl<'a> LetAssertToCase<'a> {
pub fn new(
module: &'a Module,
line_numbers: &'a LineNumbers,
params: &'a CodeActionParams,
) -> Self {
Self {
module,
params,
actions: Vec::new(),
edits: TextEdits::new(line_numbers),
}
}
pub fn code_actions(mut self) -> Vec<CodeAction> {
self.visit_typed_module(&self.module.ast);
self.actions
}
}
struct PatternVariableFinder {
pattern_variables: Vec<EcoString>,
}
impl PatternVariableFinder {
fn new() -> Self {
Self {
pattern_variables: Vec::new(),
}
}
fn find_variables_in_pattern(pattern: &TypedPattern) -> Vec<EcoString> {
let mut finder = Self::new();
finder.visit_typed_pattern(pattern);
finder.pattern_variables
}
}
impl<'ast> ast::visit::Visit<'ast> for PatternVariableFinder {
fn visit_typed_pattern_variable(
&mut self,
_location: &'ast SrcSpan,
name: &'ast EcoString,
_type: &'ast Arc<Type>,
_origin: &'ast VariableOrigin,
) {
self.pattern_variables.push(name.clone());
}
fn visit_typed_pattern_assign(
&mut self,
location: &'ast SrcSpan,
name: &'ast EcoString,
pattern: &'ast TypedPattern,
) {
self.pattern_variables.push(name.clone());
ast::visit::visit_typed_pattern_assign(self, location, name, pattern);
}
fn visit_typed_pattern_string_prefix(
&mut self,
_location: &'ast SrcSpan,
_left_location: &'ast SrcSpan,
left_side_assignment: &'ast Option<(EcoString, SrcSpan)>,
_right_location: &'ast SrcSpan,
_left_side_string: &'ast EcoString,
right_side_assignment: &'ast AssignName,
) {
if let Some((name, _)) = left_side_assignment {
self.pattern_variables.push(name.clone());
}
if let AssignName::Variable(name) = right_side_assignment {
self.pattern_variables.push(name.clone());
}
}
}
pub fn code_action_inexhaustive_let_to_case(
module: &Module,
line_numbers: &LineNumbers,
params: &CodeActionParams,
error: &Option<Error>,
actions: &mut Vec<CodeAction>,
) {
let Some(Error::Type { errors, .. }) = error else {
return;
};
let inexhaustive_assignments = errors
.iter()
.filter_map(|error| {
if let type_::Error::InexhaustiveLetAssignment { location, missing } = error {
Some((*location, missing))
} else {
None
}
})
.collect_vec();
if inexhaustive_assignments.is_empty() {
return;
}
for (location, missing) in inexhaustive_assignments {
let mut text_edits = TextEdits::new(line_numbers);
let range = text_edits.src_span_to_lsp_range(location);
if !overlaps(params.range, range) {
return;
}
let Some(Located::Statement(TypedStatement::Assignment(assignment))) =
module.find_node(location.start)
else {
continue;
};
let TypedAssignment {
value,
pattern,
kind: AssignmentKind::Let,
location,
compiled_case: _,
annotation: _,
} = assignment.as_ref()
else {
continue;
};
// Get the source code for the tested expression
let value_location = value.location();
let expr = module
.code
.get(value_location.start as usize..value_location.end as usize)
.expect("Location must be valid");
// Get the source code for the pattern
let pattern_location = pattern.location();
let pattern_code = module
.code
.get(pattern_location.start as usize..pattern_location.end as usize)
.expect("Location must be valid");
let range = text_edits.src_span_to_lsp_range(*location);
// Figure out which variables are assigned in the pattern
let variables = PatternVariableFinder::find_variables_in_pattern(pattern);
let assigned = match variables.len() {
0 => "_",
1 => variables.first().expect("Variables is length one"),
_ => &format!("#({})", variables.join(", ")),
};
let mut new_text = format!("let {assigned} = ");
print_case_expression(
range.start.character as usize,
expr,
iter::once(CaseClause {
pattern: pattern_code,
expression: if assigned == "_" { "Nil" } else { assigned },
})
.chain(missing.iter().map(|pattern| CaseClause {
pattern,
expression: "todo",
}))
.collect(),
&mut new_text,
);
let uri = ¶ms.text_document.uri;
text_edits.replace(*location, new_text);
CodeActionBuilder::new("Convert to case")
.kind(CodeActionKind::QUICKFIX)
.changes(uri.clone(), text_edits.edits)
.preferred(true)
.push_to(actions);
}
}
struct CaseClause<'a> {
pattern: &'a str,
expression: &'a str,
}
fn print_case_expression(
indent_size: usize,
subject: &str,
clauses: Vec<CaseClause<'_>>,
buffer: &mut String,
) {
let indent = " ".repeat(indent_size);
// Print the beginning of the expression
buffer.push_str("case ");
buffer.push_str(subject);
buffer.push_str(" {");
for clause in clauses.iter() {
// Print the newline and indentation for this clause
buffer.push('\n');
buffer.push_str(&indent);
// Indent this clause one level deeper than the case expression
buffer.push_str(" ");
// Print the clause
buffer.push_str(clause.pattern);
buffer.push_str(" -> ");
buffer.push_str(clause.expression);
}
// If there are no clauses to print, the closing brace should be
// on the same line as the opening one, with no space between.
if !clauses.is_empty() {
buffer.push('\n');
buffer.push_str(&indent);
}
buffer.push('}');
}
/// Builder for code action to apply the label shorthand syntax on arguments
/// where the label has the same name as the variable.
///
pub struct UseLabelShorthandSyntax<'a> {
module: &'a Module,
params: &'a CodeActionParams,
edits: TextEdits<'a>,
}
impl<'a> UseLabelShorthandSyntax<'a> {
pub fn new(
module: &'a Module,
line_numbers: &'a LineNumbers,
params: &'a CodeActionParams,
) -> Self {
Self {
module,
params,
edits: TextEdits::new(line_numbers),
}
}
pub fn code_actions(mut self) -> Vec<CodeAction> {
self.visit_typed_module(&self.module.ast);
if self.edits.edits.is_empty() {
return vec![];
}
let mut action = Vec::with_capacity(1);
CodeActionBuilder::new("Use label shorthand syntax")
.kind(CodeActionKind::REFACTOR)
.changes(self.params.text_document.uri.clone(), self.edits.edits)
.preferred(false)
.push_to(&mut action);
action
}
}
impl<'ast> ast::visit::Visit<'ast> for UseLabelShorthandSyntax<'_> {
fn visit_typed_call_arg(&mut self, arg: &'ast TypedCallArg) {
let arg_range = self.edits.src_span_to_lsp_range(arg.location);
let is_selected = overlaps(arg_range, self.params.range);
match arg {
CallArg {
label: Some(label),
value: TypedExpr::Var { name, location, .. },
..
} if is_selected && !arg.uses_label_shorthand() && label == name => {
self.edits.delete(*location)
}
_ => (),
}
ast::visit::visit_typed_call_arg(self, arg)
}
fn visit_typed_pattern_call_arg(&mut self, arg: &'ast CallArg<TypedPattern>) {
let arg_range = self.edits.src_span_to_lsp_range(arg.location);
let is_selected = overlaps(arg_range, self.params.range);
match arg {
CallArg {
label: Some(label),
value: TypedPattern::Variable { name, location, .. },
..
} if is_selected && !arg.uses_label_shorthand() && label == name => {
self.edits.delete(*location)
}
_ => (),
}
ast::visit::visit_typed_pattern_call_arg(self, arg)
}
}
/// Builder for code action to apply the fill in the missing labelled arguments
/// of the selected function call.
///
pub struct FillInMissingLabelledArgs<'a> {
module: &'a Module,
params: &'a CodeActionParams,
edits: TextEdits<'a>,
use_right_hand_side_location: Option<SrcSpan>,
selected_call: Option<SelectedCall<'a>>,
}
struct SelectedCall<'a> {
location: SrcSpan,
field_map: &'a FieldMap,
arguments: Vec<CallArg<()>>,
kind: SelectedCallKind,
}
enum SelectedCallKind {
Value,
Pattern,
}
impl<'a> FillInMissingLabelledArgs<'a> {
pub fn new(
module: &'a Module,
line_numbers: &'a LineNumbers,
params: &'a CodeActionParams,
) -> Self {
Self {
module,
params,
edits: TextEdits::new(line_numbers),
use_right_hand_side_location: None,
selected_call: None,
}
}
pub fn code_actions(mut self) -> Vec<CodeAction> {
self.visit_typed_module(&self.module.ast);
if let Some(SelectedCall {
location: call_location,
field_map,
arguments,
kind,
}) = self.selected_call
{
let is_use_call = arguments.iter().any(|arg| arg.is_use_implicit_callback());
let missing_labels = field_map.missing_labels(&arguments);
// If we're applying the code action to a use call, then we know
// that the last missing argument is going to be implicitly inserted
// by the compiler, so in that case we don't want to also add that
// last label to the completions.
let missing_labels = missing_labels.iter().peekable();
let mut missing_labels = if is_use_call {
missing_labels.dropping_back(1)
} else {
missing_labels
};
// If we couldn't find any missing label to insert we just return.
if missing_labels.peek().is_none() {
return vec![];
}
// A pattern could have been written with no parentheses at all!
// So we need to check for the last character to see if parentheses
// are there or not before filling the arguments in
let has_parentheses = ")"
== code_at(
self.module,
SrcSpan::new(call_location.end - 1, call_location.end),
);
let label_insertion_start = if has_parentheses {
// If it ends with a parentheses we'll need to start inserting
// right before the closing one...
call_location.end - 1
} else {
// ...otherwise we just append the result
call_location.end
};
// Now we need to figure out if there's a comma at the end of the
// arguments list:
//
// call(one, |)
// ^ Cursor here, with a comma behind
//
// call(one|)
// ^ Cursor here, no comma behind, we'll have to add one!
//
let has_comma_after_last_argument =
if let Some(last_arg) = arguments.iter().rfind(|arg| !arg.is_implicit()) {
self.module
.code
.get(last_arg.location.end as usize..=label_insertion_start as usize)
.is_some_and(|text| text.contains(','))
} else {
false
};
let format_label = match kind {
SelectedCallKind::Value => |label| format!("{label}: todo"),
SelectedCallKind::Pattern => |label| format!("{label}:"),
};
let labels_list = missing_labels.map(format_label).join(", ");
let has_no_explicit_arguments = arguments
.iter()
.filter(|arg| !arg.is_implicit())
.peekable()
.peek()
.is_none();
let labels_list = if has_no_explicit_arguments || has_comma_after_last_argument {
labels_list
} else {
format!(", {labels_list}")
};
let edit = if has_parentheses {
labels_list
} else {
// If the variant whose arguments we're filling in was written
// with no parentheses we need to add those as well to make it a
// valid constructor.
format!("({labels_list})")
};
self.edits.insert(label_insertion_start, edit);
let mut action = Vec::with_capacity(1);
CodeActionBuilder::new("Fill labels")
.kind(CodeActionKind::QUICKFIX)
.changes(self.params.text_document.uri.clone(), self.edits.edits)
.preferred(true)
.push_to(&mut action);
return action;
}
vec![]
}
fn empty_argument<A>(argument: &CallArg<A>) -> CallArg<()> {
CallArg {
label: argument.label.clone(),
location: argument.location,
value: (),
implicit: argument.implicit,
}
}
}
impl<'ast> ast::visit::Visit<'ast> for FillInMissingLabelledArgs<'ast> {
fn visit_typed_use(&mut self, use_: &'ast TypedUse) {
// If we're adding labels to a use call the correct location of the
// function we need to add labels to is `use_right_hand_side_location`.
// So we store it for when we're typing the use call.
let previous = self.use_right_hand_side_location;
self.use_right_hand_side_location = Some(use_.right_hand_side_location);
ast::visit::visit_typed_use(self, use_);
self.use_right_hand_side_location = previous;
}
fn visit_typed_expr_call(
&mut self,
location: &'ast SrcSpan,
type_: &'ast Arc<Type>,
fun: &'ast TypedExpr,
arguments: &'ast [TypedCallArg],
) {
let call_range = self.edits.src_span_to_lsp_range(*location);
if !within(self.params.range, call_range) {
return;
}
if let Some(field_map) = fun.field_map() {
let location = self.use_right_hand_side_location.unwrap_or(*location);
self.selected_call = Some(SelectedCall {
location,
field_map,
arguments: arguments.iter().map(Self::empty_argument).collect(),
kind: SelectedCallKind::Value,
})
}
// We only want to take into account the innermost function call
// containing the current selection so we can't stop at the first call
// we find (the outermost one) and have to keep traversing it in case
// we're inside a nested call.
let previous = self.use_right_hand_side_location;
self.use_right_hand_side_location = None;
ast::visit::visit_typed_expr_call(self, location, type_, fun, arguments);
self.use_right_hand_side_location = previous;
}
fn visit_typed_pattern_constructor(
&mut self,
location: &'ast SrcSpan,
name_location: &'ast SrcSpan,
name: &'ast EcoString,
arguments: &'ast Vec<CallArg<TypedPattern>>,
module: &'ast Option<(EcoString, SrcSpan)>,
constructor: &'ast Inferred<type_::PatternConstructor>,
spread: &'ast Option<SrcSpan>,
type_: &'ast Arc<Type>,
) {
let call_range = self.edits.src_span_to_lsp_range(*location);
if !within(self.params.range, call_range) {
return;
}
if let Some(field_map) = constructor.field_map() {
self.selected_call = Some(SelectedCall {
location: *location,
field_map,
arguments: arguments.iter().map(Self::empty_argument).collect(),
kind: SelectedCallKind::Pattern,
})
}
ast::visit::visit_typed_pattern_constructor(
self,
location,
name_location,
name,
arguments,
module,
constructor,
spread,
type_,
);
}
}
struct MissingImport {
location: SrcSpan,
suggestions: Vec<ImportSuggestion>,
}
struct ImportSuggestion {
// The name to replace with, if the user made a typo
name: EcoString,
// The optional module to import, if suggesting an importable module
import: Option<EcoString>,
}
pub fn code_action_import_module(
module: &Module,
line_numbers: &LineNumbers,
params: &CodeActionParams,
error: &Option<Error>,
actions: &mut Vec<CodeAction>,
) {
let uri = ¶ms.text_document.uri;
let Some(Error::Type { errors, .. }) = error else {
return;
};
let missing_imports = errors
.into_iter()
.filter_map(|e| {
if let type_::Error::UnknownModule {
location,
suggestions,
..
} = e
{
suggest_imports(*location, suggestions)
} else {
None
}
})
.collect_vec();
if missing_imports.is_empty() {
return;
}
let first_import_pos = position_of_first_definition_if_import(module, line_numbers);
let first_is_import = first_import_pos.is_some();
let import_location = first_import_pos.unwrap_or_default();
let after_import_newlines =
add_newlines_after_import(import_location, first_is_import, line_numbers, &module.code);
for missing_import in missing_imports {
let range = src_span_to_lsp_range(missing_import.location, line_numbers);
if !overlaps(params.range, range) {
continue;
}
for suggestion in missing_import.suggestions {
let mut edits = vec![TextEdit {
range,
new_text: suggestion.name.to_string(),
}];
if let Some(import) = &suggestion.import {
edits.push(get_import_edit(
import_location,
import,
&after_import_newlines,
))
};
let title = match &suggestion.import {
Some(import) => &format!("Import `{import}`"),
_ => &format!("Did you mean `{}`", suggestion.name),
};
CodeActionBuilder::new(title)
.kind(CodeActionKind::QUICKFIX)
.changes(uri.clone(), edits)
.preferred(true)
.push_to(actions);
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | true |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/compiler.rs | language-server/src/compiler.rs | use debug_ignore::DebugIgnore;
use ecow::EcoString;
use itertools::Itertools;
use gleam_core::{
Error, Result, Warning,
analyse::TargetSupport,
build::{self, Mode, Module, NullTelemetry, Outcome, ProjectCompiler},
config::PackageConfig,
io::{BeamCompiler, CommandExecutor, FileSystemReader, FileSystemWriter, Stdio},
line_numbers::LineNumbers,
manifest::Manifest,
paths::ProjectPaths,
type_::ModuleInterface,
warning::VectorWarningEmitterIO,
};
use std::{collections::HashMap, rc::Rc};
use camino::Utf8PathBuf;
use super::{LockGuard, Locker};
/// A wrapper around the project compiler which makes it possible to repeatedly
/// recompile the top level package, reusing the information about the already
/// compiled dependency packages.
///
#[derive(Debug)]
pub struct LspProjectCompiler<IO> {
pub project_compiler: ProjectCompiler<IO>,
/// Information on compiled modules.
pub modules: HashMap<EcoString, Module>,
pub sources: HashMap<EcoString, ModuleSourceInformation>,
/// The storage for the warning emitter.
pub warnings: Rc<VectorWarningEmitterIO>,
/// A lock to ensure that multiple instances of the LSP don't try and use
/// build directory at the same time.
pub locker: DebugIgnore<Box<dyn Locker>>,
}
impl<IO> LspProjectCompiler<IO>
where
IO: CommandExecutor + FileSystemWriter + FileSystemReader + BeamCompiler + Clone,
{
pub fn new(
manifest: Manifest,
config: PackageConfig,
paths: ProjectPaths,
io: IO,
locker: Box<dyn Locker>,
) -> Result<Self> {
let target = config.target;
let name = config.name.clone();
let warnings = Rc::new(VectorWarningEmitterIO::default());
// The build caches do not contain all the information we need in the
// LSP (e.g. the typed AST) so delete the caches for the top level
// package before we run for the first time.
{
let _guard: LockGuard = locker.lock_for_build()?;
let path = paths.build_directory_for_package(Mode::Lsp, target, &name);
io.delete_directory(&path)?;
}
let options = build::Options {
warnings_as_errors: false,
mode: Mode::Lsp,
target: None,
codegen: build::Codegen::None,
compile: build::Compile::All,
root_target_support: TargetSupport::Enforced,
no_print_progress: false,
};
let mut project_compiler = ProjectCompiler::new(
config,
options,
manifest.packages,
&NullTelemetry,
warnings.clone(),
paths,
io,
);
// To avoid the Erlang compiler printing to stdout (and thus
// violating LSP which is currently using stdout) we silence it.
project_compiler.subprocess_stdio = Stdio::Null;
Ok(Self {
locker: locker.into(),
warnings,
project_compiler,
modules: HashMap::new(),
sources: HashMap::new(),
})
}
pub fn compile(&mut self) -> Outcome<Vec<Utf8PathBuf>, Error> {
// Lock the build directory to ensure to ensure we are the only one compiling
let _lock_guard: LockGuard = match self.locker.lock_for_build() {
Ok(it) => it,
Err(err) => return err.into(),
};
// Verify that the build directory was created using the same version of
// Gleam as we are running. If it is not then we discard the build
// directory as the cache files may be in a different format.
if let Err(e) = self.project_compiler.check_gleam_version() {
return e.into();
}
self.project_compiler.reset_state_for_new_compile_run();
let compiled_dependencies = match self.project_compiler.compile_dependencies() {
Ok(it) => it,
Err(err) => return err.into(),
};
// Store the compiled dependency module information
for module in &compiled_dependencies {
let path = module.input_path.as_os_str().to_string_lossy().to_string();
// strip canonicalised windows prefix
#[cfg(target_family = "windows")]
let path = path
.strip_prefix(r"\\?\")
.map(|s| s.to_string())
.unwrap_or(path);
let line_numbers = LineNumbers::new(&module.code);
let source = ModuleSourceInformation { path, line_numbers };
_ = self.sources.insert(module.name.clone(), source);
}
// Since cached modules are not recompiled we need to manually add them
for (name, module) in self.project_compiler.get_importable_modules() {
// It we already have the source for an importable module it means
// that we already have all the information we are adding here, so
// we can skip past to to avoid doing extra work for no gain.
if self.sources.contains_key(name) || name == "gleam" {
continue;
}
// Create the source information
let path = module.src_path.to_string();
// strip canonicalised windows prefix
#[cfg(target_family = "windows")]
let path = path
.strip_prefix(r"\\?\")
.map(|s| s.to_string())
.unwrap_or(path);
let line_numbers = module.line_numbers.clone();
let source = ModuleSourceInformation { path, line_numbers };
_ = self.sources.insert(name.clone(), source);
}
// Warnings from dependencies are not fixable by the programmer so
// we don't bother them with diagnostics for them.
let _ = self.take_warnings();
// Compile the root package, that is, the one that the programmer is
// working in.
let (modules, error) = match self.project_compiler.compile_root_package() {
Outcome::Ok(package) => (package.modules, None),
Outcome::PartialFailure(package, error) => (package.modules, Some(error)),
Outcome::TotalFailure(error) => (vec![], Some(error)),
};
// Record the compiled dependency modules
let mut compiled_modules = compiled_dependencies
.into_iter()
.map(|m| m.input_path)
.collect_vec();
// Store the compiled module information
for module in modules {
let path = module.input_path.as_os_str().to_string_lossy().to_string();
let line_numbers = LineNumbers::new(&module.code);
let source = ModuleSourceInformation { path, line_numbers };
// Record that this one has been compiled. This is returned by this
// function and is used to determine what diagnostics to reset.
compiled_modules.push(module.input_path.clone());
// Register information for the LS to use
_ = self.sources.insert(module.name.clone(), source);
_ = self.modules.insert(module.name.clone(), module);
}
match error {
None => Outcome::Ok(compiled_modules),
Some(error) => Outcome::PartialFailure(compiled_modules, error),
}
}
}
impl<IO> LspProjectCompiler<IO> {
pub fn take_warnings(&mut self) -> Vec<Warning> {
self.warnings.take()
}
pub fn get_source(&self, module: &str) -> Option<&ModuleSourceInformation> {
self.sources.get(module)
}
pub fn get_module_interface(&self, name: &str) -> Option<&ModuleInterface> {
self.project_compiler.get_importable_modules().get(name)
}
}
#[derive(Debug)]
pub struct ModuleSourceInformation {
/// The path to the source file from within the project root
pub path: String,
/// Useful for converting from Gleam's byte index offsets to the LSP line
/// and column number positions.
pub line_numbers: LineNumbers,
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/tests/reference.rs | language-server/src/tests/reference.rs | use std::collections::HashMap;
use lsp_types::{
PartialResultParams, Position, Range, ReferenceContext, ReferenceParams,
TextDocumentPositionParams, WorkDoneProgressParams,
};
use super::{TestProject, find_position_of};
fn find_references(
tester: &TestProject<'_>,
position: Position,
) -> Option<HashMap<String, Vec<Range>>> {
let locations = tester.at(position, |engine, params, _| {
let params = ReferenceParams {
text_document_position: TextDocumentPositionParams {
text_document: params.text_document,
position,
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
context: ReferenceContext {
include_declaration: true,
},
};
engine.find_references(params).result.unwrap()
})?;
let mut references: HashMap<String, Vec<Range>> = HashMap::new();
for location in locations {
let module_name = tester
.module_name_from_url(&location.uri)
.expect("Valid uri");
_ = references
.entry(module_name)
.or_default()
.push(location.range);
}
Some(references)
}
fn show_references(code: &str, position: Option<Position>, ranges: &[Range]) -> String {
let mut buffer = String::new();
for (line_number, line) in code.lines().enumerate() {
let mut underline = String::new();
let mut underline_empty = true;
let line_number = line_number as u32;
if let Some(Range { start, end }) = ranges
.iter()
.find(|range| range.start.line == line_number && range.end.line == line_number)
{
for (column_number, _) in line.chars().enumerate() {
let current_position = Position::new(line_number, column_number as u32);
if Some(current_position) == position {
underline_empty = false;
underline.push('↑');
} else if start <= ¤t_position && current_position < *end {
underline_empty = false;
underline.push('▔');
} else {
underline.push(' ');
}
}
}
buffer.push_str(line);
if !underline_empty {
buffer.push('\n');
buffer.push_str(&underline);
}
buffer.push('\n');
}
buffer
}
macro_rules! assert_references {
($code:literal, $position:expr $(,)?) => {
assert_references!(TestProject::for_source($code), $position);
};
(($module_name:literal, $module_src:literal), $code:literal, $position:expr $(,)?) => {
assert_references!(
TestProject::for_source($code).add_module($module_name, $module_src),
$position
);
};
($project:expr, $position:expr $(,)?) => {
let project = $project;
let src = project.src;
let position = $position.find_position(src);
let result = find_references(&project, position).expect("References not found");
let mut output = String::new();
for (name, src) in project.root_package_modules.iter() {
output.push_str(&format!(
"-- {name}.gleam\n{}\n\n",
show_references(src, None, result.get(*name).unwrap_or(&Vec::new()))
));
}
output.push_str(&format!(
"-- app.gleam\n{}",
show_references(
src,
Some(position),
result.get("app").unwrap_or(&Vec::new())
)
));
insta::assert_snapshot!(insta::internals::AutoName, output, src);
};
}
macro_rules! assert_no_references {
($code:literal, $position:expr $(,)?) => {
let project = TestProject::for_source($code);
assert_no_references!(&project, $position);
};
($project:expr, $position:expr $(,)?) => {
let src = $project.src;
let position = $position.find_position(src);
let result = find_references($project, position);
assert_eq!(result, None);
};
}
#[test]
fn references_for_local_variable() {
assert_references!(
"
pub fn main() {
let wibble = 10
let wobble = wibble + 1
wibble + wobble
}
",
find_position_of("wibble").nth_occurrence(2),
);
}
#[test]
fn references_for_local_variable_from_definition() {
assert_references!(
"
pub fn main() {
let wibble = 10
let wobble = wibble + 1
wibble + wobble
}
",
find_position_of("wibble"),
);
}
#[test]
fn references_for_private_function() {
assert_references!(
"
fn wibble() {
wibble()
}
pub fn main() {
let _ = wibble()
wibble() + 4
}
fn wobble() {
wibble() || wobble()
}
",
find_position_of("wibble"),
);
}
#[test]
fn references_for_private_function_from_reference() {
assert_references!(
"
fn wibble() {
wibble()
}
pub fn main() {
let _ = wibble()
wibble() + 4
}
fn wobble() {
wibble() || wobble()
}
",
find_position_of("wibble").nth_occurrence(2),
);
}
#[test]
fn references_for_public_function() {
assert_references!(
(
"mod",
"
import app.{wibble}
fn wobble() {
app.wibble()
}
fn other() {
wibble()
}
"
),
"
pub fn wibble() {
wibble()
}
",
find_position_of("wibble").nth_occurrence(2),
);
}
#[test]
fn references_for_function_from_qualified_reference() {
assert_references!(
(
"mod",
"
pub fn wibble() {
wibble()
}
"
),
"
import mod
pub fn main() {
let value = mod.wibble()
mod.wibble()
value
}
",
find_position_of("wibble"),
);
}
#[test]
fn references_for_function_from_unqualified_reference() {
assert_references!(
(
"mod",
"
pub fn wibble() {
wibble()
}
"
),
"
import mod.{wibble}
pub fn main() {
let value = wibble()
mod.wibble()
value
}
",
find_position_of("wibble()"),
);
}
#[test]
fn references_for_private_constant() {
assert_references!(
"
const wibble = 10
pub fn main() {
let _ = wibble
wibble + 4
}
fn wobble() {
wibble + wobble()
}
",
find_position_of("wibble"),
);
}
#[test]
fn references_for_private_constant_from_reference() {
assert_references!(
"
const wibble = 10
pub fn main() {
let _ = wibble
wibble + 4
}
fn wobble() {
wibble + wobble()
}
",
find_position_of("wibble").nth_occurrence(2),
);
}
#[test]
fn references_for_public_constant() {
assert_references!(
(
"mod",
"
import app.{wibble}
fn wobble() {
app.wibble
}
fn other() {
wibble
}
"
),
"
pub const wibble = 10
pub fn main() {
wibble
}
",
find_position_of("wibble").nth_occurrence(2),
);
}
#[test]
fn references_for_constant_from_qualified_reference() {
assert_references!(
(
"mod",
"
pub const wibble = 10
fn wobble() {
wibble
}
"
),
"
import mod
pub fn main() {
let value = mod.wibble
mod.wibble + value
}
",
find_position_of("wibble"),
);
}
#[test]
fn references_for_constant_from_unqualified_reference() {
assert_references!(
(
"mod",
"
pub const wibble = 10
fn wobble() {
wibble
}
"
),
"
import mod.{wibble}
pub fn main() {
let value = mod.wibble
wibble + value
}
",
find_position_of("wibble +"),
);
}
#[test]
fn references_for_private_type_variant() {
assert_references!(
"
type Wibble { Wibble }
fn main() {
let _ = Wibble
Wibble
}
fn wobble() {
Wibble
wobble()
}
",
find_position_of("Wibble }"),
);
}
#[test]
fn references_for_private_type_variant_from_reference() {
assert_references!(
"
type Wibble { Wibble }
fn main() {
let _ = Wibble
Wibble
}
fn wobble() {
Wibble
wobble()
}
",
find_position_of(" = Wibble").under_char('W'),
);
}
#[test]
fn references_for_public_type_variant() {
assert_references!(
(
"mod",
"
import app.{Wibble}
fn wobble() {
app.Wibble
}
fn other() {
Wibble
}
"
),
"
pub type Wibble { Wibble }
pub fn main() {
Wibble
}
",
find_position_of("Wibble }"),
);
}
#[test]
fn references_for_type_variant_from_qualified_reference() {
assert_references!(
(
"mod",
"
pub type Wibble { Wibble }
fn wobble() {
Wibble
}
"
),
"
import mod
pub fn main() {
let value = mod.Wibble
mod.Wibble
value
}
",
find_position_of("Wibble"),
);
}
#[test]
fn references_for_type_variant_from_unqualified_reference() {
assert_references!(
(
"mod",
"
pub type Wibble { Wibble }
fn wobble() {
Wibble
}
"
),
"
import mod.{Wibble}
pub fn main() {
let value = mod.Wibble
Wibble
}
",
find_position_of("Wibble").nth_occurrence(3),
);
}
#[test]
fn no_references_for_keyword() {
assert_no_references!(
"
pub fn wibble() {
todo
}
",
find_position_of("fn")
);
}
#[test]
fn references_for_aliased_value() {
assert_references!(
(
"mod",
"
import app.{Wibble as Wobble}
fn wobble() {
Wobble
}
"
),
"
pub type Wibble { Wibble }
pub fn main() {
Wibble
}
",
find_position_of("Wibble").nth_occurrence(2),
);
}
#[test]
fn references_for_aliased_const() {
assert_references!(
(
"mod",
"
import app.{wibble as other}
fn wobble() {
other
}
"
),
"
pub const wibble = 123
pub fn main() {
wibble
}
",
find_position_of("wibble").nth_occurrence(2),
);
}
#[test]
fn references_for_aliased_function() {
assert_references!(
(
"mod",
"
import app.{wibble as other}
fn wobble() {
other()
}
"
),
"
pub fn wibble() {
123
}
pub fn main() {
wibble()
}
",
find_position_of("wibble").nth_occurrence(2),
);
}
#[test]
fn references_for_private_type() {
assert_references!(
"
type Wibble { Wibble }
fn main() -> Wibble {
todo
}
fn wobble(w: Wibble) {
todo
}
",
find_position_of("Wibble"),
);
}
#[test]
fn references_for_private_type_from_reference() {
assert_references!(
"
type Wibble { Wibble }
fn main() -> Wibble {
todo
}
fn wobble(w: Wibble) {
todo
}
",
find_position_of("-> Wibble").under_char('W'),
);
}
#[test]
fn references_for_public_type() {
assert_references!(
(
"mod",
"
import app.{type Wibble}
fn wobble() -> Wibble {
todo
}
fn other(w: app.Wibble) {
todo
}
"
),
"
pub type Wibble { Wibble }
pub fn main() -> Wibble {
todo
}
",
find_position_of("Wibble"),
);
}
#[test]
fn references_for_type_from_qualified_reference() {
assert_references!(
(
"mod",
"
pub type Wibble { Wibble }
fn wobble() -> Wibble {
todo
}
"
),
"
import mod
pub fn main() -> mod.Wibble {
let _: mod.Wibble = todo
}
",
find_position_of("Wibble"),
);
}
#[test]
fn references_for_type_from_unqualified_reference() {
assert_references!(
(
"mod",
"
pub type Wibble { Wibble }
fn wobble() -> Wibble {
todo
}
"
),
"
import mod.{type Wibble}
pub fn main() -> Wibble {
let _: mod.Wibble = todo
}
",
find_position_of("Wibble").nth_occurrence(2),
);
}
#[test]
fn references_for_aliased_type() {
assert_references!(
(
"mod",
"
import app.{type Wibble as Wobble}
fn wobble() -> Wobble {
todo
}
fn other(w: app.Wibble) {
todo
}
"
),
"
pub type Wibble { Wibble }
pub fn main() -> Wibble {
todo
}
",
find_position_of("-> Wibble").under_char('W'),
);
}
#[test]
fn references_for_type_from_let_annotation() {
assert_references!(
(
"mod",
"
pub type Wibble { Wibble }
fn wobble() -> Wibble {
todo
}
"
),
"
import mod.{type Wibble}
pub fn main() -> Wibble {
let _: mod.Wibble = todo
}
",
find_position_of("mod.Wibble").under_char('W'),
);
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/tests/router.rs | language-server/src/tests/router.rs | use std::time::SystemTime;
use gleam_core::{Error, io::FileSystemWriter, paths::ProjectPaths};
use crate::{files::FileSystemProxy, tests::Action};
use super::LanguageServerTestIO;
type Router = crate::router::Router<LanguageServerTestIO, LanguageServerTestIO>;
#[test]
fn recompile_after_no_changes_does_not_redownload_dependencies() {
let paths = ProjectPaths::new("/app".into());
let (io, mut router) = set_up_minimal_router(&paths);
assert_eq!(
compile(&mut router, &paths),
Ok(()),
"Pre-condition: Initial compile should succeed"
);
{
let mut actions = io.actions.lock().unwrap();
assert!(
actions.contains(&Action::DownloadDependencies),
"Expectation: Initial compile should download dependencies"
);
actions.clear();
}
assert_eq!(
compile(&mut router, &paths),
Ok(()),
"Recompile should succeed"
);
{
let actions = io.actions.lock().unwrap();
assert!(
!actions.contains(&Action::DownloadDependencies),
"Recompile should not re-download dependencies"
);
}
}
#[test]
fn deleting_build_dir_redownloads_dependencies() {
let paths = ProjectPaths::new("/app".into());
let (io, mut router) = set_up_minimal_router(&paths);
_ = compile(&mut router, &paths);
io.actions.lock().unwrap().clear();
io.delete_directory(&paths.build_directory()).unwrap();
assert_eq!(
compile(&mut router, &paths),
Ok(()),
"Compile after deleting build directory should succeed"
);
{
let actions = io.actions.lock().unwrap();
assert!(
actions.contains(&Action::DownloadDependencies),
"Compile after deleting build directory should re-download dependencies"
);
}
}
#[test]
fn changing_config_redownloads_dependencies() {
let paths = ProjectPaths::new("/app".into());
let (io, mut router) = set_up_minimal_router(&paths);
_ = compile(&mut router, &paths);
io.actions.lock().unwrap().clear();
let toml = r#"name = "wobble"
version = "1.0.0""#;
io.write(&paths.root_config(), toml).unwrap();
io.io
.try_set_modification_time(&paths.root_config(), SystemTime::now())
.unwrap();
assert_eq!(
compile(&mut router, &paths),
Ok(()),
"Compile after changing gleam.toml should succeed"
);
{
let actions = io.actions.lock().unwrap();
assert!(
actions.contains(&Action::DownloadDependencies),
"Compile after changing gleam.toml should re-download dependencies"
);
}
}
fn compile(router: &mut Router, paths: &ProjectPaths) -> Result<(), Error> {
router
.project_for_path(paths.root().into())
.unwrap()
.unwrap()
.engine
.compile_please()
.result
}
fn set_up_minimal_router(paths: &ProjectPaths) -> (LanguageServerTestIO, Router) {
let io = LanguageServerTestIO::new();
let router = Router::new(io.clone(), FileSystemProxy::new(io.clone()));
let toml = r#"name = "wibble"
version = "1.0.0""#;
io.write(&paths.root_config(), toml).unwrap();
(io, router)
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/tests/completion.rs | language-server/src/tests/completion.rs | use insta::assert_debug_snapshot;
use itertools::Itertools;
use lsp_types::{CompletionItem, Position};
use super::*;
pub fn show_complete(code: &str, position: Position) -> String {
let mut str: String = "".into();
for (line_number, line) in code.lines().enumerate() {
let same_line = line_number as u32 == position.line;
if !same_line {
str.push_str(line);
} else {
str.push_str(&line[0..position.character as usize]);
str.push('|');
str.push_str(&line[position.character as usize..]);
}
str.push('\n');
}
str
}
fn apply_conversion(src: &str, completions: Vec<CompletionItem>, value: &str) -> String {
let completion = completions
.iter()
.find(|c| c.label == value.to_string())
.expect(&format!("no completion with value `{value}`"));
let mut edits = vec![];
if let Some(lsp_types::CompletionTextEdit::Edit(edit)) = &completion.text_edit {
edits.push(edit.clone());
}
apply_code_edit(src, edits)
}
#[macro_export]
macro_rules! assert_apply_completion {
($project:expr, $name:literal, $position:expr) => {
let src = $project.src;
let completions = completion($project, $position);
let output = format!(
"{}\n\n----- After applying completion -----\n{}",
show_complete(src, $position),
apply_conversion(src, completions, $name)
);
insta::assert_snapshot!(insta::internals::AutoName, output, src);
};
}
#[macro_export]
macro_rules! assert_completion {
($project:expr) => {
let src = $project.src;
let result = completion_with_prefix($project, "");
let output = format!(
"{}\n\n----- Completion content -----\n{}",
show_complete(src, Position::new(0, 0)),
format_completion_results(result)
);
insta::assert_snapshot!(insta::internals::AutoName, output, src);
};
($project:expr, $position:expr) => {
let src = $project.src;
let result = completion($project, $position);
let output = format!(
"{}\n\n----- Completion content -----\n{}",
show_complete(src, $position),
format_completion_results(result)
);
insta::assert_snapshot!(insta::internals::AutoName, output, src);
};
}
#[macro_export]
macro_rules! assert_completion_with_prefix {
($project:expr, $prefix:expr) => {
let src = $project.src;
let result = completion_with_prefix($project, $prefix);
let line = 1 + $prefix.lines().count();
let output = format!(
"{}\n\n----- Completion content -----\n{}",
show_complete(src, Position::new(line as u32, 0)),
format_completion_results(result)
);
insta::assert_snapshot!(insta::internals::AutoName, output, src);
};
}
fn format_completion_results(completions: Vec<CompletionItem>) -> EcoString {
use std::fmt::Write;
let mut buffer: EcoString = "".into();
for CompletionItem {
label,
label_details,
kind,
detail,
documentation,
deprecated,
preselect,
sort_text,
filter_text,
insert_text,
insert_text_format,
insert_text_mode,
text_edit,
additional_text_edits,
command,
commit_characters,
data,
tags,
} in completions
{
assert!(deprecated.is_none());
assert!(preselect.is_none());
assert!(filter_text.is_none());
assert!(insert_text.is_none());
assert!(insert_text_format.is_none());
assert!(insert_text_mode.is_none());
assert!(command.is_none());
assert!(commit_characters.is_none());
assert!(data.is_none());
assert!(tags.is_none());
buffer.push_str(&label);
if let Some(kind) = kind {
write!(buffer, "\n kind: {kind:?}").unwrap();
}
if let Some(detail) = detail {
write!(buffer, "\n detail: {detail}").unwrap();
}
if let Some(sort_text) = sort_text {
write!(buffer, "\n sort: {sort_text}").unwrap();
}
if let Some(label_details) = label_details {
assert!(label_details.detail.is_none());
if let Some(desc) = label_details.description {
write!(buffer, "\n desc: {desc}").unwrap();
}
}
if let Some(documentation) = documentation {
let lsp_types::Documentation::MarkupContent(m) = documentation else {
panic!("unexpected docs in test {documentation:?}");
};
match m.kind {
lsp_types::MarkupKind::Markdown => (),
lsp_types::MarkupKind::PlainText => {
panic!("unexpected docs markup kind {:?}", m.kind)
}
};
write!(buffer, "\n docs: {:?}", m.value).unwrap();
}
let edit = |buffer: &mut EcoString, e: lsp_types::TextEdit| {
let a = e.range.start.line;
let b = e.range.start.character;
let c = e.range.start.line;
let d = e.range.start.character;
write!(buffer, "\n [{a}:{b}-{c}:{d}]: {:?}", e.new_text).unwrap();
};
if let Some(text_edit) = text_edit {
let lsp_types::CompletionTextEdit::Edit(e) = text_edit else {
panic!("unexpected text edit in test {text_edit:?}");
};
buffer.push_str("\n edits:");
edit(&mut buffer, e);
}
for e in additional_text_edits.unwrap_or_default() {
edit(&mut buffer, e);
}
buffer.push('\n');
}
buffer
}
fn completion(tester: TestProject<'_>, position: Position) -> Vec<CompletionItem> {
tester.at(position, |engine, param, src| {
let response = engine.completion(param, src);
let mut completions = response.result.unwrap().unwrap_or_default();
completions.sort_by(|a, b| a.label.cmp(&b.label));
completions
})
}
fn completion_with_prefix(tester: TestProject<'_>, prefix: &str) -> Vec<CompletionItem> {
let src = &format!("{}fn typing_in_here() {{\n 0\n}}\n {}", prefix, tester.src);
let tester = TestProject { src, ..tester };
// Put the cursor inside the "typing_in_here" fn body.
let line = 1 + prefix.lines().count();
completion(tester, Position::new(line as u32, 0))
.into_iter()
.filter(|c| c.label != "typing_in_here")
.collect_vec()
}
#[test]
fn completions_for_outside_a_function() {
let code = "
pub fn main() {
0
}";
assert_completion!(TestProject::for_source(code), Position::new(0, 0));
}
#[test]
fn local_public_function() {
let code = "
pub fn main() {
0
}";
assert_completion!(TestProject::for_source(code));
}
#[test]
fn local_public_function_with_documentation() {
let code = "
/// Hello
pub fn main() {
0
}";
assert_completion!(TestProject::for_source(code));
}
#[test]
fn local_public_enum() {
let code = "
pub type Direction {
Left
Right
}
";
assert_completion!(TestProject::for_source(code));
}
#[test]
fn local_public_record() {
let code = "
pub type Box {
/// Hello
Box(Int, Int, Float)
}
";
assert_completion!(TestProject::for_source(code));
}
#[test]
fn local_public_enum_with_documentation() {
let code = "
pub type Direction {
/// Hello
Left
/// Goodbye
Right
}
";
assert_completion!(TestProject::for_source(code));
}
#[test]
fn local_public_record_with_documentation() {
let code = "
pub type Box {
Box(Int, Int, Float)
}
";
assert_completion!(TestProject::for_source(code));
}
#[test]
fn imported_module_function() {
let code = "
import dep
";
let dep = "
pub fn wobble() {
Nil
}
";
assert_completion!(TestProject::for_source(code).add_module("dep", dep));
}
#[test]
fn importable_module_function() {
let code = "
";
let dep = "
pub fn wobble() {
Nil
}
";
assert_completion!(TestProject::for_source(code).add_module("dep", dep));
}
#[test]
fn importable_module_function_with_existing_imports() {
let code = "
//// Some module comments
// Some other whitespace
import dep2
";
let dep = "
pub fn wobble() {
Nil
}
";
let dep2 = "
pub fn wobble() {
Nil
}
";
assert_completion!(
TestProject::for_source(code)
.add_module("dep", dep)
.add_module("dep2", dep2)
);
}
#[test]
fn importable_module_function_from_deep_module() {
let code = "
";
let dep = "
pub fn wobble() {
Nil
}
";
assert_completion!(TestProject::for_source(code).add_module("a/b/dep", dep));
}
#[test]
fn completions_for_type_import_completions_without_brackets() {
let src = "import dep.";
let dep = "
pub opaque type Wibble {
Wibble(wibble: String, wobble: Int)
}
";
let position = Position::new(0, 11);
let tester = TestProject::for_source("import dep").add_module("dep", dep);
let mut io = LanguageServerTestIO::new();
let mut engine = tester.build_engine(&mut io);
// pass a valid src to compile once
_ = io.src_module("app", tester.src);
let _ = engine.compile_please();
// update src to the one we want to test
_ = io.src_module("app", src);
let param = tester.build_path(position);
let response = engine.completion(param, src.into());
let mut completions = response.result.unwrap().unwrap_or_default();
completions.sort_by(|a, b| a.label.cmp(&b.label));
let output = format!(
"{}\n\n----- Completion content -----\n{}",
show_complete(src, position),
format_completion_results(completions)
);
insta::assert_snapshot!(insta::internals::AutoName, output, src);
}
#[test]
fn importable_adds_extra_new_line_if_no_imports() {
let dep = "pub fn wobble() {\nNil\n}";
let code = "";
assert_completion!(TestProject::for_source(code).add_module("dep", dep));
}
#[test]
fn importable_adds_extra_new_line_if_import_exists_below_other_definitions() {
let dep = "pub fn wobble() {\nNil\n}";
let code = "\nimport dep2\n"; // "code" goes after "fn typing_in_here() {}".
assert_completion!(
TestProject::for_source(code)
.add_module("dep", dep)
.add_module("dep2", "")
);
}
#[test]
fn importable_does_not_add_extra_new_line_if_imports_exist() {
let dep = "pub fn wobble() {\nNil\n}";
let prefix = "import wibble\n\n";
let code = "";
assert_completion_with_prefix!(
TestProject::for_source(code)
.add_module("dep", dep)
.add_module("wibble", ""),
prefix
);
}
#[test]
fn importable_does_not_add_extra_new_line_if_newline_exists() {
let dep = "pub fn wobble() {\nNil\n}";
let prefix = "\n";
let code = "";
assert_completion_with_prefix!(
TestProject::for_source(code)
.add_module("dep", dep)
.add_module("wibble", ""),
prefix
);
}
#[test]
fn imported_public_enum() {
let code = "
import dep
";
let dep = "
pub type Direction {
Left
Right
}
";
assert_completion!(TestProject::for_source(code).add_module("dep", dep));
}
#[test]
fn imported_public_record() {
let code = "
import dep
";
let dep = "
pub type Box {
Box(Int)
}
";
assert_completion!(TestProject::for_source(code).add_module("dep", dep));
}
#[test]
fn imported_unqualified_module_function() {
let code = "
import dep.{wobble}
";
let dep = "
pub fn wobble() {
Nil
}
";
assert_completion!(TestProject::for_source(code).add_module("dep", dep));
}
#[test]
fn imported_unqualified_public_enum() {
let code = "
import dep.{Left}
";
let dep = "
pub type Direction {
Left
Right
}
";
assert_completion!(TestProject::for_source(code).add_module("dep", dep));
}
#[test]
fn imported_unqualified_public_record() {
let code = "
import dep.{Box}
";
let dep = "
pub type Box {
Box(Int)
}
";
assert_completion!(TestProject::for_source(code).add_module("dep", dep));
}
#[test]
fn private_function() {
let code = "
fn private() {
1
}
";
let dep = "";
assert_completion!(TestProject::for_source(code).add_module("dep", dep));
}
#[test]
fn private_type() {
let code = "
type Wibble {
Wobble
}
";
let dep = "";
assert_completion!(TestProject::for_source(code).add_module("dep", dep));
}
#[test]
fn opaque_type() {
let code = "
pub opaque type Wibble {
Wobble
}
";
let dep = "";
assert_completion!(TestProject::for_source(code).add_module("dep", dep));
}
#[test]
fn private_function_in_dep() {
let code = "import dep";
let dep = "
fn private() {
1
}
";
assert_completion!(TestProject::for_source(code).add_module("dep", dep));
}
#[test]
fn private_type_in_dep() {
let code = "import dep";
let dep = "
type Wibble {
Wobble
}
";
assert_completion!(TestProject::for_source(code).add_module("dep", dep));
}
#[test]
fn in_custom_type_definition() {
let code = "import dep";
let dep = "
type Wibble {
Wobble
}
";
assert_completion!(TestProject::for_source(code).add_module("dep", dep));
}
#[test]
fn for_custom_type_definition() {
let code = "
pub type Wibble {
Wobble
}";
assert_completion!(TestProject::for_source(code), Position::new(2, 0));
}
#[test]
fn for_type_alias() {
let code = "
pub type Wibble = Result(
String,
String
)
";
assert_completion!(TestProject::for_source(code), Position::new(2, 0));
}
#[test]
fn for_function_arguments() {
let code = "
pub fn wibble(
_: String,
) -> Nil {
Nil
}
";
assert_completion!(TestProject::for_source(code), Position::new(2, 0));
}
#[test]
fn imported_type() {
let dep = "
pub type Zoo = List(String)
type Private = List(String)
";
let code = "import dep
pub fn wibble(
_: String,
) -> Nil {
Nil
}
";
assert_completion!(
TestProject::for_source(code).add_module("dep", dep),
Position::new(3, 0)
);
}
#[test]
fn imported_type_cursor_after_dot() {
let dep = "
pub type Zoo = List(String)
type Private = List(String)
";
let code = "import dep
pub fn wibble(
_: dep.Zoo,
) -> Nil {
Nil
}
";
assert_completion!(
TestProject::for_source(code).add_module("dep", dep),
Position::new(3, 12)
);
}
#[test]
fn imported_type_cursor_after_dot_other_matching_modules() {
let dep = "
pub type Zoo = List(String)
type Private = List(String)
";
let dep2 = "
pub type Zoo = List(String)
type Private = List(String)
";
let code = "import dep
import dep2
pub fn wibble(
_: dep.Zoo,
) -> Nil {
Nil
}
";
assert_completion!(
TestProject::for_source(code)
.add_module("dep", dep)
.add_module("dep2", dep2),
Position::new(4, 12)
);
}
#[test]
fn imported_type_cursor_after_dot_other_modules() {
let dep = "
pub type Zoo = List(String)
type Private = List(String)
";
let other = "
pub type Zoo = List(String)
type Private = List(String)
";
let code = "import dep
pub fn wibble(
_: dep.Zoo,
) -> Nil {
Nil
}
";
assert_completion!(
TestProject::for_source(code)
.add_module("dep", dep)
.add_module("other", other),
Position::new(3, 12)
);
}
#[test]
fn imported_type_cursor_mid_phrase_other_modules() {
let dep = "
pub type Zoo = List(String)
type Private = List(String)
";
let other = "
pub type Zoo = List(String)
type Private = List(String)
";
let code = "import dep
pub fn wibble(
_: dep.Zoo,
) -> Nil {
Nil
}
";
assert_completion!(
TestProject::for_source(code)
.add_module("dep", dep)
.add_module("other", other),
Position::new(3, 8)
);
}
#[test]
fn importable_type() {
let dep = "
pub type Zoo = List(String)
type Private = List(String)
";
let code = "
pub fn wibble(
_: String,
) -> Nil {
Nil
}
";
assert_completion!(
TestProject::for_source(code).add_module("dep", dep),
Position::new(3, 0)
);
}
#[test]
fn importable_type_with_existing_imports_at_top() {
let dep = "
pub type Zoo = List(String)
type Private = List(String)
";
let dep2 = "
pub type Zoo = List(String)
type Private = List(String)
";
let code = "import dep2
pub fn wibble(
_: String,
) -> Nil {
Nil
}
";
assert_completion!(
TestProject::for_source(code)
.add_module("dep", dep)
.add_module("dep2", dep2),
Position::new(3, 0)
);
}
#[test]
fn importable_type_with_existing_imports() {
let dep = "
pub type Zoo = List(String)
type Private = List(String)
";
let dep2 = "
pub type Zoo = List(String)
type Private = List(String)
";
let code = "
//// Some module comments
// Some other whitespace
import dep2
pub fn wibble(
_: String,
) -> Nil {
Nil
}
";
assert_completion!(
TestProject::for_source(code)
.add_module("dep", dep)
.add_module("dep2", dep2),
Position::new(7, 0)
);
}
#[test]
fn importable_type_from_deep_module() {
let dep = "
pub type Zoo = List(String)
type Private = List(String)
";
let code = "
pub fn wibble(
_: String,
) -> Nil {
Nil
}
";
assert_completion!(
TestProject::for_source(code).add_module("a/b/dep", dep),
Position::new(3, 0)
);
}
#[test]
fn unqualified_imported_type() {
let dep = "
pub type Zoo = List(String)
type Private = List(String)
";
let code = "import dep.{type Zoo}
pub fn wibble(
_: String,
) -> Nil {
Nil
}
";
assert_completion!(
TestProject::for_source(code).add_module("dep", dep),
Position::new(3, 0)
);
}
#[test]
fn local_private_type() {
let code = "
type Zoo = Int
pub fn wibble(
x: String,
) -> String {
\"ok\"
}
";
assert_completion!(TestProject::for_source(code), Position::new(4, 0));
}
#[test]
fn local_variable() {
let code = "
pub fn main(wibble: Int) {
let wobble = 1
w
let wabble = 2
}
";
assert_completion!(TestProject::for_source(code), Position::new(3, 3));
}
#[test]
fn local_variable_anonymous_function() {
let code = "
pub fn main() {
let add_one = fn(wibble: Int) { wibble + 1 }
add_one(1)
}
";
assert_completion!(TestProject::for_source(code), Position::new(2, 40));
}
#[test]
fn local_variable_nested_anonymous_function() {
let code = "
pub fn main() {
let add_one = fn(wibble: Int) {
let wabble = 1
let add_two = fn(wobble: Int) { wobble + 2 }
wibble + add_two(1)
}
add_one(1)
}
";
assert_completion!(TestProject::for_source(code), Position::new(4, 42));
}
#[test]
fn local_variable_ignore_anonymous_function_args() {
let code = "
pub fn main() {
let add_one = fn(wibble: Int) { wibble + 1 }
let wobble = 1
w
}
";
assert_completion!(TestProject::for_source(code), Position::new(4, 3));
}
#[test]
fn local_variable_ignore_anonymous_function_args_nested() {
let code = "
pub fn main() {
let add_one = fn(wibble: Int) {
let wabble = 1
let add_two = fn(wobble: Int) { wobble + 2 }
wibble + add_two(1)
}
add_one(1)
}
";
assert_completion!(TestProject::for_source(code), Position::new(5, 10));
}
#[test]
fn local_variable_ignore_anonymous_function_returned() {
let code = "
pub fn main() {
fn(wibble: Int) {
let wabble = 1
let add_two = fn(wobble: Int) { wobble + 2 }
wibble + add_two(1)
}
}
";
assert_completion!(TestProject::for_source(code), Position::new(5, 10));
}
#[test]
fn local_variable_case_expression() {
let code = "
pub fn main() {
case True {
True as wibble -> { todo }
False -> { todo }
}
}
";
assert_completion!(TestProject::for_source(code), Position::new(3, 25));
}
#[test]
fn local_variable_inside_nested_exprs() {
let code = r#"
type Wibble { Wobble(List(#(Bool))) }
fn wibble() {
Wobble([#(!{
let wibble = True
wibble
})])
todo
}
"#;
assert_completion!(TestProject::for_source(code), Position::new(5, 7));
}
#[test]
fn local_variable_pipe() {
let code = "
pub fn main() {
let add_one = fn(wibble: Int) { wibble + 1 }
let wobble = 1
wobble |> add_one
}
";
assert_completion!(TestProject::for_source(code), Position::new(4, 19));
}
#[test]
fn local_variable_pipe_with_args() {
let code = "
pub fn main() {
let add_one = fn(wibble: Int, wobble: Int) { wibble + wobble }
let wobble = 1
let wibble = 2
wobble |> add_one(1, wibble)
}
";
assert_completion!(TestProject::for_source(code), Position::new(5, 29));
}
#[test]
fn local_variable_function_call() {
let code = "
fn add_one(wibble: Int) -> Int {
wibble + 1
}
pub fn main() {
let wobble = 1
add_one(wobble)
}
";
assert_completion!(TestProject::for_source(code), Position::new(7, 16));
}
#[test]
fn local_variable_ignored() {
let code = "
fn wibble() {
let a = 1
let _b = 2
}
";
assert_completion!(TestProject::for_source(code), Position::new(4, 0));
}
#[test]
fn local_variable_as() {
let code = "
fn wibble() {
let b as c = 5
}
";
assert_completion!(TestProject::for_source(code), Position::new(3, 0));
}
#[test]
fn local_variable_tuple() {
let code = "
fn wibble() {
let assert #([d, e] as f, g) = #([0, 1], 2)
}
";
assert_completion!(TestProject::for_source(code), Position::new(3, 0));
}
#[test]
fn local_variable_bit_array() {
let code = "
fn wibble() {
let assert <<h:1>> as i = <<1:1>>
}
";
assert_completion!(TestProject::for_source(code), Position::new(3, 0));
}
#[test]
fn local_variable_string() {
let code = r#"
fn wibble() {
let assert "a" <> j = "ab"
}
"#;
assert_completion!(TestProject::for_source(code), Position::new(3, 0));
}
#[test]
fn local_variable_ignore_within_function() {
let code = "
fn main(a, b, z) {
Nil
}
";
assert_completion!(TestProject::for_source(code), Position::new(1, 14));
}
#[test]
fn internal_values_from_root_package_are_in_the_completions() {
let dep = r#"
@external(erlang, "rand", "uniform")
@internal pub fn random_float() -> Float
@internal pub fn main() { 0 }
@internal pub type Wibble { Wobble }
@internal pub const wibble = 1
"#;
assert_completion!(TestProject::for_source("import dep").add_module("dep", dep));
}
#[test]
fn internal_types_from_root_package_are_in_the_completions() {
let code = "import dep
pub fn wibble(
_: String,
) -> Nil {
Nil
}";
let dep = r#"
@internal pub type Alias = Int
@internal pub type AnotherType { Constructor }
"#;
assert_completion!(
TestProject::for_source(code).add_module("dep", dep),
Position::new(3, 0)
);
}
#[test]
fn internal_values_from_the_same_module_are_in_the_completions() {
let code = r#"
@external(erlang, "rand", "uniform")
@internal pub fn random_float() -> Float
@internal pub fn main() { 0 }
@internal pub type Wibble { Wobble }
@internal pub const wibble = 1
"#;
assert_completion!(TestProject::for_source(code));
}
#[test]
fn internal_types_from_the_same_module_are_in_the_completions() {
let code = "
@internal pub type Alias = Result(Int, String)
@internal pub type AnotherType {
Wibble
}
";
assert_completion!(TestProject::for_source(code), Position::new(3, 0));
}
#[test]
fn internal_types_from_a_dependency_are_ignored() {
let code = "import dep
pub fn wibble(
_: String,
) -> Nil {
Nil
}";
let dep = r#"
@internal pub type Alias = Int
@internal pub type AnotherType { Constructor }
"#;
assert_completion!(
TestProject::for_source(code).add_dep_module("dep", dep),
Position::new(3, 0)
);
}
#[test]
fn internal_values_from_a_dependency_are_ignored() {
let dep = r#"
@external(erlang, "rand", "uniform")
@internal pub fn random_float() -> Float
@internal pub fn main() { 0 }
@internal pub type Wibble { Wobble }
@internal pub const wibble = 1
"#;
assert_completion!(TestProject::for_source("import dep").add_dep_module("dep", dep));
}
#[test]
fn completions_for_an_import() {
let code = "import dep
pub fn main() {
0
}";
let dep = "";
assert_completion!(
TestProject::for_source(code).add_module("dep", dep),
Position::new(0, 10)
);
}
#[test]
fn completions_for_an_import_no_test() {
let code = "import gleam
pub fn main() {
0
}";
let test = "
import gleam
pub fn main() {
0
}
";
assert_completion!(
TestProject::for_source(code).add_test_module("my_tests", test),
Position::new(0, 10)
);
}
#[test]
fn completions_for_an_import_while_in_test() {
let code = "import gleam
pub fn main() {
0
}";
let test = "
import gleam
pub fn main() {
0
}
";
let test_helper = "
pub fn test_helper() {
0
}
";
let (mut engine, position_param) = TestProject::for_source(code)
.add_test_module("my_test", test)
.add_test_module("test_helper", test_helper)
.positioned_with_io_in_test(Position::new(0, 10), "my_test");
let response = engine.completion(position_param, code.into());
let mut completions = response.result.unwrap().unwrap_or_default();
completions.sort_by(|a, b| a.label.cmp(&b.label));
assert_debug_snapshot!(completions,);
}
#[test]
fn completions_for_an_import_while_in_dev() {
let code = "import gleam
pub fn main() {
0
}";
let dev_helper = "
pub fn dev_helper() {
0
}
";
let position = Position::new(0, 12);
let (mut engine, position_param) = TestProject::for_source(code)
.add_test_module("my_test", code)
.add_dev_module("my_dev_code", code)
.add_dev_module("dev_helper", dev_helper)
.positioned_with_io_in_dev(position, "my_dev_code");
let response = engine.completion(position_param, code.into());
let mut completions = response.result.unwrap().unwrap_or_default();
completions.sort_by(|a, b| a.label.cmp(&b.label));
let output = format!(
"{}\n\n----- Completion content -----\n{}",
show_complete(code, position),
format_completion_results(completions)
);
insta::assert_snapshot!(insta::internals::AutoName, output, code);
}
#[test]
fn completions_for_an_import_with_docs() {
let code = "import gleam
pub fn main() {
0
}";
let dep = "//// Some package
//// documentation!
pub fn main() { 1 }
";
assert_completion!(
TestProject::for_source(code).add_dep_module("dep", dep),
Position::new(0, 10)
);
}
#[test]
fn completions_for_an_import_from_dependency() {
let code = "import gleam
pub fn main() {
0
}";
let dep = "";
assert_completion!(
TestProject::for_source(code).add_hex_module("example_module", dep),
Position::new(0, 10)
);
}
#[test]
fn completions_for_an_import_not_from_indirect_dependency() {
let code = "import gleam
pub fn main() {
0
}";
let dep = "";
assert_completion!(
TestProject::for_source(code)
.add_hex_module("example_module", dep)
.add_indirect_hex_module("indirect_module", ""),
Position::new(0, 10)
);
}
#[test]
fn completions_for_an_import_not_from_dev_dependency() {
let code = "import gleam
pub fn main() {
0
}";
let dep = "";
assert_completion!(
TestProject::for_source(code)
.add_hex_module("example_module", dep)
.add_dev_hex_module("indirect_module", ""),
Position::new(0, 10)
);
}
#[test]
fn completions_for_an_import_not_from_dev_dependency_in_test() {
let code = "import gleam
pub fn main() {
0
}";
let test = "import gleam
pub fn main() {
0
}
";
let dep = "";
let (mut engine, position_param) = TestProject::for_source(code)
.add_test_module("my_test", test)
.add_hex_module("example_module", dep)
.add_dev_hex_module("indirect_module", "")
.positioned_with_io_in_test(Position::new(0, 10), "my_test");
let response = engine.completion(position_param, code.into());
let mut completions = response.result.unwrap().unwrap_or_default();
completions.sort_by(|a, b| a.label.cmp(&b.label));
assert_debug_snapshot!(completions,);
}
#[test]
fn completions_for_an_import_not_from_dev_dependency_in_dev() {
let code = "import gleam
pub fn main() {
0
}";
let dev = "import gleam
pub fn main() {
0
}
";
let dep = "";
let position = Position::new(0, 10);
let (mut engine, position_param) = TestProject::for_source(code)
.add_dev_module("my_dev_module", dev)
.add_hex_module("example_module", dep)
.add_dev_hex_module("indirect_module", "")
.positioned_with_io_in_dev(position, "my_dev_module");
let response = engine.completion(position_param, code.into());
let mut completions = response.result.unwrap().unwrap_or_default();
completions.sort_by(|a, b| a.label.cmp(&b.label));
let output = format!(
"{}\n\n----- Completion content -----\n{}",
show_complete(dev, position),
format_completion_results(completions)
);
insta::assert_snapshot!(insta::internals::AutoName, output, dev);
}
#[test]
fn completions_for_an_import_from_dependency_with_docs() {
let code = "//// Main package
//// documentation!
import gleam
pub fn main() {
0
}";
let dep = "//// Some package
//// documentation!
pub fn main() { 1 }
";
assert_completion!(
TestProject::for_source(code).add_hex_module("example_module", dep),
Position::new(3, 10)
);
}
#[test]
fn completions_for_an_import_start() {
let code = "import gleam
pub fn main() {
0
}";
let dep = "";
assert_completion!(
TestProject::for_source(code).add_dep_module("dep", dep),
Position::new(0, 0)
);
}
#[test]
fn completions_for_an_import_preceeding_whitespace() {
let code = " import gleam
pub fn main() {
0
}";
let dep = "";
assert_completion!(
TestProject::for_source(code).add_dep_module("dep", dep),
Position::new(0, 2)
);
}
#[test]
fn internal_modules_from_same_package_are_included() {
let code = "import gleam
pub fn main() {
0
}";
let internal_name = format!("{LSP_TEST_ROOT_PACKAGE_NAME}/internal");
assert_completion!(
TestProject::for_source(code)
// Not included
.add_dep_module("dep/internal", "")
// Included
.add_module(&internal_name, ""),
Position::new(0, 0)
);
}
#[test]
fn completions_for_an_unqualified_import() {
let code = "
import dep.{}
pub fn main() {
0
}";
let dep = "pub const wibble = \"wibble\"
const wobble = \"wobble\"
@internal
pub const wabble = \"wabble\"
pub fn myfun() {
0
}
pub type Wibble = String
";
assert_completion!(
TestProject::for_source(code).add_module("dep", dep),
Position::new(1, 12)
);
}
#[test]
fn completions_for_an_unqualified_import_on_new_line() {
let code = "
import dep.{
wibble,
}
pub fn main() {
0
}";
let dep = "pub const wibble = \"wibble\"
pub fn myfun() {
0
}
pub type Wibble = String
";
assert_completion!(
TestProject::for_source(code).add_module("dep", dep),
// putting cursor at beginning of line because some formatters
// remove the empty whitespace in the test code.
// Does also work with (3, 2) when empty spaces are not removed.
Position::new(3, 0)
);
}
#[test]
fn completions_for_an_unqualified_import_already_imported() {
let code = "
import dep.{wibble,wabble,type Wibble}
pub fn main() {
0
}";
let dep = "pub const wibble = \"wibble\"
const wobble = \"wobble\"
@internal
pub const wabble = \"wabble\"
pub fn myfun() {
0
}
pub type Wibble = String
";
assert_completion!(
TestProject::for_source(code).add_module("dep", dep),
Position::new(1, 12)
);
}
#[test]
fn completions_for_a_function_arg_annotation() {
let code = "
pub fn wibble(
_: String,
) -> Nil {
Nil
}
";
assert_completion!(TestProject::for_source(code), Position::new(2, 11));
}
#[test]
fn completions_for_a_function_return_annotation() {
let code = "
pub fn wibble(
_: String,
) -> Nil {
Nil
}
";
assert_completion!(TestProject::for_source(code), Position::new(3, 7));
}
#[test]
fn completions_for_a_var_annotation() {
let code = "
pub fn main() {
let wibble: Int = 7
}
";
assert_completion!(TestProject::for_source(code), Position::new(2, 16));
}
#[test]
fn completions_for_a_const_annotation() {
let code = "
const wibble: Int = 7
pub fn main() {
let wibble: Int = 7
}
";
assert_completion!(TestProject::for_source(code), Position::new(2, 16));
}
#[test]
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | true |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/tests/rename.rs | language-server/src/tests/rename.rs | use std::collections::HashMap;
use lsp_types::{
Position, Range, RenameParams, TextDocumentPositionParams, Url, WorkDoneProgressParams,
};
use super::{TestProject, find_position_of, hover};
/// Returns the rename range and edit to apply if the rename is valid and can be
/// carried out.
/// However if the rename produces an error response from the language server,
/// the error message is returned.
fn rename(
tester: &TestProject<'_>,
new_name: &str,
position: Position,
) -> Result<Option<(Range, lsp_types::WorkspaceEdit)>, String> {
let prepare_rename_response = tester.at(position, |engine, params, _| {
let params = TextDocumentPositionParams {
text_document: params.text_document,
position,
};
engine.prepare_rename(params).result.unwrap()
});
let Some(lsp_types::PrepareRenameResponse::Range(range)) = prepare_rename_response else {
return Ok(None);
};
let outcome = tester.at(position, |engine, params, _| {
let params = RenameParams {
text_document_position: TextDocumentPositionParams {
text_document: params.text_document,
position,
},
new_name: new_name.to_string(),
work_done_progress_params: WorkDoneProgressParams::default(),
};
engine.rename(params).result.unwrap()
});
match outcome {
Ok(Some(edit)) => Ok(Some((range, edit))),
Ok(None) => Ok(None),
Err(error) => Err(error.message),
}
}
fn apply_rename(
tester: &TestProject<'_>,
new_name: &str,
position: Position,
) -> (Range, HashMap<String, String>) {
let (range, edit) = rename(tester, new_name, position)
.expect("Rename failed")
.expect("No rename produced");
let changes = edit.changes.expect("No text edit found");
(range, apply_code_edit(tester, changes))
}
fn apply_code_edit(
tester: &TestProject<'_>,
changes: HashMap<Url, Vec<lsp_types::TextEdit>>,
) -> HashMap<String, String> {
let mut modules = HashMap::new();
for (uri, change) in changes {
let module_name = tester.module_name_from_url(&uri).expect("Valid uri");
let module_code = tester.src_from_module_url(&uri).expect("Module exists");
_ = modules.insert(module_name, super::apply_code_edit(module_code, change));
}
modules
}
macro_rules! assert_rename {
($code:literal, $new_name:literal, $position:expr $(,)?) => {
assert_rename!(TestProject::for_source($code), $new_name, $position);
};
(($module_name:literal, $module_src:literal), $code:literal, $new_name:literal, $position:expr $(,)?) => {
assert_rename!(
TestProject::for_source($code).add_module($module_name, $module_src),
$new_name,
$position
);
};
($project:expr, $new_name:literal, $position:expr $(,)?) => {
let project = $project;
let src = project.src;
let position = $position.find_position(src);
let (range, result) = apply_rename(&project, $new_name, position);
let mut output = String::from("----- BEFORE RENAME\n");
for (name, src) in project.root_package_modules.iter() {
output.push_str(&format!("-- {name}.gleam\n{src}\n\n"));
}
output.push_str(&format!(
"-- app.gleam\n{}\n\n----- AFTER RENAME\n",
hover::show_hover(src, range, range.start)
));
for (name, src) in project.root_package_modules.iter() {
output.push_str(&format!(
"-- {name}.gleam\n{}\n\n",
result
.get(*name)
.map(|string| string.as_str())
.unwrap_or(*src)
));
}
output.push_str(&format!(
"-- app.gleam\n{}",
result
.get("app")
.map(|string| string.as_str())
.unwrap_or(src)
));
insta::assert_snapshot!(insta::internals::AutoName, output, src);
};
}
macro_rules! assert_no_rename {
($code:literal, $new_name:literal, $position:expr $(,)?) => {
let project = TestProject::for_source($code);
assert_no_rename!(&project, $new_name, $position);
};
($project:expr, $new_name:literal, $position:expr $(,)?) => {
let src = $project.src;
let position = $position.find_position(src);
let result = rename($project, $new_name, position);
assert_eq!(result, Ok(None));
};
}
macro_rules! assert_rename_error {
($code:literal, $new_name:literal, $position:expr $(,)?) => {
let project = TestProject::for_source($code);
assert_rename_error!(&project, $new_name, $position);
};
($project:expr, $new_name:literal, $position:expr $(,)?) => {
let src = $project.src;
let position = $position.find_position(src);
let error = rename($project, $new_name, position).unwrap_err();
let snapshot = format!("Error response message:\n\n{error}");
insta::assert_snapshot!(insta::internals::AutoName, snapshot, src);
};
}
#[test]
fn rename_local_variable() {
assert_rename!(
"
pub fn main() {
let wibble = 10
wibble
}
",
"wobble",
find_position_of("wibble").nth_occurrence(2),
);
}
#[test]
fn rename_shadowed_local_variable() {
assert_rename!(
"
pub fn main() {
let wibble = 10
let wibble = wibble / 2
wibble
}
",
"wobble",
find_position_of("wibble /"),
);
}
#[test]
fn rename_shadowing_local_variable() {
assert_rename!(
"
pub fn main() {
let wibble = 10
let wibble = wibble / 2
wibble
}
",
"wobble",
find_position_of("wibble").nth_occurrence(4),
);
}
#[test]
fn rename_local_variable_record_access() {
assert_rename!(
"
type Wibble {
Wibble(wibble: Int)
}
pub fn main() {
let wibble = Wibble(wibble: 1)
wibble.wibble
}
",
"wobble",
find_position_of("wibble."),
);
}
#[test]
fn rename_local_variable_guard_clause() {
assert_rename!(
"
pub fn main() {
let wibble = True
case Nil {
Nil if wibble -> todo
_ -> panic
}
wibble || False
}
",
"wobble",
find_position_of("wibble ||"),
);
}
#[test]
fn rename_local_variable_from_definition() {
assert_rename!(
"
pub fn main() {
let wibble = 10
let wobble = wibble + 1
wobble - wibble
}
",
"some_value",
find_position_of("wibble =")
);
}
#[test]
fn rename_local_variable_from_definition_nested_pattern() {
assert_rename!(
"
pub fn main() {
let assert Ok([_, wibble, ..]) = Error(12)
wibble
}
",
"second_element",
find_position_of("wibble,")
);
}
#[test]
fn rename_local_variable_assignment_pattern() {
assert_rename!(
"
pub fn main() {
let assert Error(12 as something) = Error(12)
something
}
",
"the_error",
find_position_of("something").nth_occurrence(2)
);
}
#[test]
fn rename_local_variable_from_definition_assignment_pattern() {
assert_rename!(
"
pub fn main() {
let assert Error(12 as something) = Error(12)
something
}
",
"the_error",
find_position_of("something)")
);
}
#[test]
fn rename_local_variable_argument() {
assert_rename!(
"
pub fn add(first_number: Int, x: Int) -> Int {
x + first_number
}
",
"second_number",
find_position_of("x +")
);
}
#[test]
fn rename_local_variable_argument_from_definition() {
assert_rename!(
"
pub fn wibble(wibble: Float) {
wibble /. 0.3
}
",
"wobble",
find_position_of("wibble:")
);
}
#[test]
fn rename_local_variable_label_shorthand() {
assert_rename!(
"
type Wibble {
Wibble(wibble: Int)
}
pub fn main() {
let Wibble(wibble:) = todo
wibble + 1
}
",
"wobble",
find_position_of("wibble +")
);
}
#[test]
fn rename_local_variable_label_shorthand_from_definition() {
assert_rename!(
"
type Wibble {
Wibble(wibble: Int)
}
pub fn main() {
let Wibble(wibble:) = todo
wibble + 1
}
",
"wobble",
find_position_of("wibble:)")
);
}
#[test]
fn rename_local_variable_from_label_shorthand() {
assert_rename!(
"
type Wibble {
Wibble(wibble: Int)
}
pub fn main() {
let wibble = todo
Wibble(wibble:)
}
",
"wobble",
find_position_of("wibble:)")
);
}
#[test]
fn rename_local_variable_in_bit_array_pattern() {
assert_rename!(
"
pub fn starts_with(bits: BitArray, prefix: BitArray) -> Bool {
let prefix_size = bit_size(prefix)
case bits {
<<pref:bits-size(prefix_size), _:bits>> if pref == prefix -> True
_ -> False
}
}
",
"size_of_prefix",
find_position_of("prefix_size =")
);
}
#[test]
fn rename_local_variable_from_bit_array_pattern() {
assert_rename!(
"
pub fn starts_with(bits: BitArray, prefix: BitArray) -> Bool {
let prefix_size = bit_size(prefix)
case bits {
<<pref:bits-size(prefix_size), _:bits>> if pref == prefix -> True
_ -> False
}
}
",
"size_of_prefix",
find_position_of("prefix_size)")
);
}
#[test]
fn no_rename_keyword() {
assert_no_rename!(
"
pub fn main() {}
",
"wibble",
find_position_of("fn"),
);
}
#[test]
fn no_rename_invalid_name() {
assert_rename_error!(
"
pub fn main() {
let wibble = 10
wibble
}
",
"Not_AValid_Name",
find_position_of("wibble").nth_occurrence(2)
);
}
#[test]
fn rename_function_from_definition() {
assert_rename!(
(
"mod",
"
import app
fn wibble() {
app.something()
}
"
),
"
pub fn something() {
something()
}
fn something_else() {
something()
}
",
"some_function",
find_position_of("something")
);
}
#[test]
fn rename_function_from_reference() {
assert_rename!(
(
"mod",
"
import app
fn wibble() {
app.something()
}
"
),
"
pub fn something() {
something()
}
fn something_else() {
something()
}
",
"some_function",
find_position_of("something").nth_occurrence(2)
);
}
#[test]
fn rename_function_from_qualified_reference() {
assert_rename!(
(
"mod",
"
pub fn wibble() {
wibble()
}
"
),
"
import mod
pub fn main() {
mod.wibble()
}
",
"some_function",
find_position_of("wibble")
);
}
#[test]
fn rename_function_from_unqualified_reference() {
assert_rename!(
(
"mod",
"
pub fn wibble() {
wibble()
}
"
),
"
import mod.{wibble}
pub fn main() {
wibble()
mod.wibble()
}
",
"some_function",
find_position_of("wibble(")
);
}
#[test]
fn rename_aliased_function() {
assert_rename!(
(
"mod",
"
import app.{something as something_else}
fn wibble() {
something_else()
}
"
),
"
pub fn something() {
something()
}
fn something_else() {
something()
}
",
"some_function",
find_position_of("something")
);
}
#[test]
fn rename_function_shadowing_module() {
let src = "
import gleam/list
pub fn list() {
[]
}
pub fn main() {
list.map(todo, todo)
}
";
assert_rename!(
TestProject::for_source(src).add_hex_module("gleam/list", "pub fn map(_, _) {}"),
"empty_list",
find_position_of("list()")
);
}
#[test]
fn rename_function_shadowed_by_field_access() {
assert_rename!(
(
"mod",
"
import app
type App {
App(something: Int)
}
pub fn main() {
let app = App(10)
app.something
}
"
),
"
pub fn something() {
todo
}
",
"function",
find_position_of("something")
);
}
#[test]
fn no_rename_function_with_invalid_name() {
assert_rename_error!(
"
pub fn main() {
let wibble = 10
wibble
}
",
"Not_AValid_Name",
find_position_of("main")
);
}
#[test]
fn no_rename_function_from_other_package() {
let src = "
import wibble
pub fn main() {
wibble.wobble()
}
";
assert_no_rename!(
&TestProject::for_source(src).add_hex_module("wibble", "pub fn wobble() { todo }"),
"something",
find_position_of("wobble")
);
}
#[test]
fn rename_constant_from_definition() {
assert_rename!(
(
"mod",
"
import app
fn wibble() {
app.something
}
"
),
"
pub const something = 10
pub fn main() {
something + { 4 * something }
}
",
"ten",
find_position_of("something")
);
}
#[test]
fn rename_constant_from_reference() {
assert_rename!(
(
"mod",
"
import app
fn wibble() {
app.something
}
"
),
"
pub const something = 10
pub fn main() {
something + { 4 * something }
}
",
"ten",
find_position_of("something").nth_occurrence(2)
);
}
#[test]
fn rename_constant_from_qualified_reference() {
assert_rename!(
(
"mod",
"
pub const something = 10
fn wibble() {
something
}
"
),
"
import mod
pub fn main() {
mod.something
}
",
"ten",
find_position_of("something")
);
}
#[test]
fn rename_constant_from_unqualified_reference() {
assert_rename!(
(
"mod",
"
pub const something = 10
fn wibble() {
something
}
"
),
"
import mod.{something}
pub fn main() {
something + mod.something
}
",
"ten",
find_position_of("something +")
);
}
#[test]
fn rename_aliased_constant() {
assert_rename!(
(
"mod",
"
import app.{something as some_constant}
fn wibble() {
some_constant
}
"
),
"
pub const something = 10
pub fn main() {
something + { 4 * something }
}
",
"ten",
find_position_of("something")
);
}
#[test]
fn rename_constant_shadowing_module() {
let src = "
import gleam/list
const list = []
pub fn main() {
list.map(todo, todo)
}
";
assert_rename!(
TestProject::for_source(src).add_hex_module("gleam/list", "pub fn map(_, _) {}"),
"empty_list",
find_position_of("list =")
);
}
#[test]
fn rename_constant_shadowed_by_field_access() {
assert_rename!(
(
"mod",
"
import app
type App {
App(something: Int)
}
pub fn main() {
let app = App(10)
app.something
}
"
),
"
pub const something = 10
",
"constant",
find_position_of("something")
);
}
#[test]
fn no_rename_constant_with_invalid_name() {
assert_rename_error!(
"
const value = 10
",
"Ten",
find_position_of("value")
);
}
#[test]
fn no_rename_constant_from_other_package() {
let src = "
import wibble
pub fn main() {
wibble.wobble
}
";
assert_no_rename!(
&TestProject::for_source(src).add_hex_module("wibble", "pub const wobble = 2"),
"something",
find_position_of("wobble")
);
}
#[test]
fn rename_type_variant_from_definition() {
assert_rename!(
(
"mod",
"
import app
fn wibble() {
app.Constructor(4)
}
"
),
"
pub type Wibble {
Constructor(Int)
}
pub fn main() {
Constructor(10)
}
",
"Wibble",
find_position_of("Constructor(Int")
);
}
#[test]
fn rename_type_variant_from_reference() {
assert_rename!(
(
"mod",
"
import app
fn wibble() {
app.Constructor(4)
}
"
),
"
pub type Wibble {
Constructor(Int)
}
pub fn main() {
Constructor(10)
}
",
"Wibble",
find_position_of("Constructor(10")
);
}
#[test]
fn rename_type_variant_from_qualified_reference() {
assert_rename!(
(
"mod",
"
pub type Wibble {
Constructor(Int)
}
fn wibble() {
Constructor(42)
}
"
),
"
import mod
pub fn main() {
mod.Constructor
}
",
"Variant",
find_position_of("Constructor")
);
}
#[test]
fn rename_type_variant_from_unqualified_reference() {
assert_rename!(
(
"mod",
"
pub type Wibble {
Constructor(Int)
}
fn wibble() {
Constructor(81)
}
"
),
"
import mod.{Constructor}
pub fn main() {
#(Constructor(75), mod.Constructor(57))
}
",
"Number",
find_position_of("Constructor(75")
);
}
#[test]
fn rename_aliased_type_variant() {
assert_rename!(
(
"mod",
"
import app.{Constructor as ValueConstructor}
fn wibble() {
ValueConstructor(172)
}
"
),
"
pub type Wibble {
Constructor(Int)
}
pub fn main() {
Constructor(42)
}
",
"MakeAWibble",
find_position_of("Constructor")
);
}
#[test]
fn no_rename_type_variant_with_invalid_name() {
assert_rename_error!(
"
pub type Wibble {
Constructor(Int)
}
",
"name_in_snake_case",
find_position_of("Constructor")
);
}
#[test]
fn rename_custom_type_variant_pattern() {
assert_rename!(
"
pub type Type {
X
Y
}
pub fn main(t) {
case t {
X -> 0
Y -> 0
}
}
",
"Renamed",
find_position_of("X")
);
}
#[test]
fn rename_imported_custom_type_variant_pattern() {
assert_rename!(
(
"other",
"
import app
pub fn main(t) {
case t {
app.X -> 0
app.Y -> 0
}
}
"
),
"
pub type Type {
X
Y
}
",
"Renamed",
find_position_of("X")
);
}
#[test]
fn rename_imported_unqualified_custom_type_variant_pattern() {
assert_rename!(
(
"other",
"
import app.{X, Y}
pub fn main(t) {
case t {
X -> 0
Y -> 0
}
}
"
),
"
pub type Type {
X
Y
}
",
"Renamed",
find_position_of("X")
);
}
#[test]
fn rename_type_variant_pattern_with_arguments() {
assert_rename!(
"
pub type Wibble {
Wibble(Int)
Wobble(Float)
}
fn wibble() {
case Wibble(10) {
Wibble(20) -> todo
Wibble(_) -> panic
}
}
",
"Variant",
find_position_of("Wibble(10)")
);
}
#[test]
fn rename_type_variant_from_pattern() {
assert_rename!(
"
pub type Type {
X
Y
}
pub fn main(t) {
case t {
X -> 0
Y -> 0
}
}
",
"Renamed",
find_position_of("X ->")
);
}
#[test]
fn no_rename_type_variant_from_other_package() {
let src = "
import wibble
pub fn main() {
wibble.Wibble(10)
}
";
assert_no_rename!(
&TestProject::for_source(src).add_hex_module("wibble", "pub type Wibble { Wibble(Int) }"),
"Constructor",
find_position_of("Wibble")
);
}
#[test]
fn rename_value_in_nested_module() {
assert_rename!(
(
"sub/mod",
"
pub fn wibble() {
wibble()
}
"
),
"
import sub/mod
pub fn main() {
mod.wibble()
}
",
"some_function",
find_position_of("wibble")
);
}
#[test]
fn rename_value_in_aliased_module() {
assert_rename!(
(
"mod",
"
pub fn wibble() {
wibble()
}
"
),
"
import mod as the_module
pub fn main() {
the_module.wibble()
}
",
"some_function",
find_position_of("wibble")
);
}
#[test]
fn rename_aliased_value() {
assert_rename!(
(
"mod",
"
import app.{Wibble as Wobble}
fn wobble() {
Wobble
}
"
),
"
pub type Wibble { Wibble }
pub fn main() {
Wibble
}
",
"Wubble",
find_position_of("Wibble }")
);
}
#[test]
fn rename_type_from_definition() {
assert_rename!(
(
"mod",
"
import app
fn wibble() -> app.Wibble { todo }
"
),
"
pub type Wibble { Constructor }
pub fn main(w: Wibble) -> Wibble { todo }
",
"SomeType",
find_position_of("Wibble")
);
}
#[test]
fn rename_type_from_reference() {
assert_rename!(
(
"mod",
"
import app
fn wibble() -> app.Wibble { todo }
"
),
"
pub type Wibble { Constructor }
pub fn main(w: Wibble) -> Wibble { todo }
",
"SomeType",
find_position_of("Wibble").nth_occurrence(2)
);
}
#[test]
fn rename_type_from_qualified_reference() {
assert_rename!(
(
"mod",
"
pub type Wibble { Constructor }
fn wibble(w: Wibble) -> Wibble { todo }
"
),
"
import mod
pub fn main(w: mod.Wibble) -> mod.Wibble { todo }
",
"SomeType",
find_position_of("Wibble")
);
}
#[test]
fn rename_type_from_unqualified_reference() {
assert_rename!(
(
"mod",
"
pub type Wibble { Constructor }
fn wibble(w: Wibble) -> Wibble { todo }
"
),
"
import mod.{type Wibble}
pub fn main(w: Wibble) -> mod.Wibble { todo }
",
"SomeType",
find_position_of("Wibble)")
);
}
#[test]
fn rename_aliased_type() {
assert_rename!(
(
"mod",
"
import app.{type Wibble as Wobble}
fn wibble() -> Wobble { todo }
"
),
"
pub type Wibble { Constructor }
pub fn main(w: Wibble) -> Wibble { todo }
",
"SomeType",
find_position_of("Wibble")
);
}
#[test]
fn no_rename_type_with_invalid_name() {
assert_rename_error!(
"
type Wibble { Wobble }
",
"a_type_name",
find_position_of("Wibble")
);
}
#[test]
fn no_rename_type_from_other_package() {
let src = "
import wibble
pub fn main() -> wibble.Wibble { todo }
";
assert_no_rename!(
&TestProject::for_source(src).add_hex_module("wibble", "pub type Wibble { Wibble }"),
"SomeType",
find_position_of("Wibble")
);
}
// https://github.com/gleam-lang/gleam/issues/4372
#[test]
fn rename_type_referenced_in_variant_constructor_argument() {
assert_rename!(
(
"mod",
"
import app
pub type Wobble {
Wobble(w: app.Wibble)
}
"
),
"
pub type Wibble {
Wibble
}
pub fn main() {
let wibble = Wibble
}
",
"SomeType",
find_position_of("Wibble")
);
}
// https://github.com/gleam-lang/gleam/issues/4372
#[test]
fn rename_type_from_variant_constructor_argument() {
assert_rename!(
(
"mod",
"
pub type Wibble {
Wibble
}
pub fn main() {
let wibble = Wibble
}
"
),
"
import mod
pub type Wobble {
Wobble(w: mod.Wibble)
}
",
"SomeType",
find_position_of("Wibble")
);
}
// https://github.com/gleam-lang/gleam/issues/4553
#[test]
fn rename_local_variable_with_label_shorthand() {
assert_rename!(
"
pub type Wibble {
Wibble(first: Int, second: Int)
}
pub fn main() {
let second = 2
Wibble(first: 1, second:)
}
",
"something",
find_position_of("second =")
);
}
// https://github.com/gleam-lang/gleam/issues/4748
#[test]
fn rename_alternative_pattern() {
assert_rename!(
"
pub fn main(x) {
case x {
#(wibble, [wobble]) | #(wobble, [wibble, _]) | #(_, [wibble, wobble, ..]) ->
wibble + wobble
_ -> 0
}
}
",
"new_name",
find_position_of("wibble")
);
}
// https://github.com/gleam-lang/gleam/issues/5091
#[test]
fn rename_alternative_pattern_aliases() {
assert_rename!(
"
pub fn main(x) {
case x {
[] as list | [_] as list -> list
_ -> []
}
}
",
"new_name",
find_position_of("list")
);
}
#[test]
fn rename_alternative_pattern_aliases_from_alternative() {
assert_rename!(
"
pub fn main(x) {
case x {
[] as list | [_] as list -> list
_ -> []
}
}
",
"new_name",
find_position_of("list").nth_occurrence(2)
);
}
#[test]
fn rename_alternative_pattern_aliases_from_usage() {
assert_rename!(
"
pub fn main(x) {
case x {
[] as list | [_] as list -> list
_ -> []
}
}
",
"new_name",
find_position_of("list").nth_occurrence(3)
);
}
#[test]
fn rename_alternative_pattern_alias_and_variable_1() {
assert_rename!(
"
pub fn main(x) {
case x {
[] as list | [_, ..list] -> list
_ -> []
}
}
",
"new_name",
find_position_of("list").nth_occurrence(1)
);
}
#[test]
fn rename_alternative_pattern_alias_and_variable_2() {
assert_rename!(
"
pub fn main(x) {
case x {
[] as list | [_, ..list] -> list
_ -> []
}
}
",
"new_name",
find_position_of("list").nth_occurrence(2)
);
}
#[test]
fn rename_alternative_pattern_alias_and_variable_3() {
assert_rename!(
"
pub fn main(x) {
case x {
[_, ..list] | [] as list -> list
_ -> []
}
}
",
"new_name",
find_position_of("list").nth_occurrence(1)
);
}
#[test]
fn rename_alternative_pattern_alias_and_variable_4() {
assert_rename!(
"
pub fn main(x) {
case x {
[_, ..list] | [] as list -> list
_ -> []
}
}
",
"new_name",
find_position_of("list").nth_occurrence(2)
);
}
#[test]
fn rename_alternative_pattern_from_usage() {
assert_rename!(
"
pub fn main(x) {
case x {
#(wibble, [wobble]) | #(wobble, [wibble, _]) | #(_, [wibble, wobble, ..]) ->
wibble + wobble
_ -> 0
}
}
",
"new_name",
find_position_of("wibble +")
);
}
// https://github.com/gleam-lang/gleam/issues/4605
#[test]
fn rename_prelude_value() {
assert_rename!(
"
pub fn main() {
Ok(10)
}
",
"Success",
find_position_of("Ok")
);
}
#[test]
fn rename_prelude_type() {
assert_rename!(
"
pub fn main() -> Result(Int, Nil) {
Ok(10)
}
",
"SuccessOrFailure",
find_position_of("Result")
);
}
#[test]
fn rename_variable_with_alternative_pattern_with_same_name() {
assert_rename!(
"
pub fn main(x) {
let some_var = 10
case x {
#(some_var, []) | #(_, [some_var]) ->
some_var
_ -> 0
}
some_var
}
",
"new_name",
find_position_of("some_var")
);
}
#[test]
fn rename_prelude_value_with_prelude_already_imported() {
assert_rename!(
"
import gleam
pub fn main() {
Ok(gleam.Error(10))
}
",
"Success",
find_position_of("Ok")
);
}
#[test]
fn rename_prelude_value_with_prelude_import_with_empty_braces() {
assert_rename!(
"
import gleam.{}
pub fn main() {
Ok(gleam.Error(10))
}
",
"Success",
find_position_of("Ok")
);
}
#[test]
fn rename_prelude_value_with_other_prelude_value_imported() {
assert_rename!(
"
import gleam.{Error}
pub fn main() {
Ok(Error(10))
}
",
"Success",
find_position_of("Ok")
);
}
#[test]
fn rename_prelude_type_with_prelude_value_imported_with_trailing_comma() {
assert_rename!(
"
import gleam.{Error,}
pub fn main() -> Result(Int, Nil) {
Error(10)
}
",
"OkOrError",
find_position_of("Result")
);
}
#[test]
fn rename_prelude_value_with_other_module_imported() {
assert_rename!(
("something", "pub type Something"),
"
import something
pub fn main() {
Ok(10)
}
",
"Success",
find_position_of("Ok")
);
}
#[test]
fn rename_module_access_in_clause_guard() {
assert_rename!(
(
"wibble",
"
import app
pub fn main() {
case app.something {
thing if thing == app.something -> True
_ -> False
}
}
"
),
"
pub const something = 10
",
"new_name",
find_position_of("something")
);
}
#[test]
fn rename_variable_used_in_record_update() {
assert_rename!(
"
type Wibble {
Wibble(a: Int, b: Int, c: Int)
}
fn wibble(wibble: Wibble) {
Wibble(..wibble, c: 1)
}
",
"value",
find_position_of("wibble:")
);
}
//https://github.com/gleam-lang/gleam/issues/4941
#[test]
fn rename_external_function() {
assert_rename!(
r#"
pub fn main() { wibble() }
@external(erlang, "a", "a")
fn wibble() -> Nil
"#,
"new_name",
find_position_of("wibble").nth_occurrence(2)
);
}
#[test]
fn rename_external_javascript_function_with_pure_gleam_fallback() {
assert_rename!(
r#"
pub fn main() { wibble() }
@external(javascript, "a", "a")
fn wibble() -> Nil {
Nil
}
"#,
"new_name",
find_position_of("wibble").nth_occurrence(2)
);
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/tests/hover.rs | language-server/src/tests/hover.rs | use lsp_types::{Hover, HoverParams, Position, Range};
use super::*;
fn hover(tester: TestProject<'_>, position: Position) -> Option<Hover> {
tester.at(position, |engine, param, _| {
let params = HoverParams {
text_document_position_params: param,
work_done_progress_params: Default::default(),
};
let response = engine.hover(params);
response.result.unwrap()
})
}
pub fn show_hover(code: &str, range: Range, position: Position) -> String {
let Range { start, end } = range;
// When we display the over range the end character is always excluded!
let end = Position::new(end.line, end.character);
let mut buffer: String = "".into();
for (line_number, line) in code.lines().enumerate() {
let mut underline: String = "".into();
let mut underline_empty = true;
for (column_number, _) in line.chars().enumerate() {
let current_position = Position::new(line_number as u32, column_number as u32);
if current_position == position {
underline_empty = false;
underline.push('↑');
} else if start <= current_position && current_position < end {
underline_empty = false;
underline.push('▔');
} else {
underline.push(' ');
}
}
buffer.push_str(line);
if !underline_empty {
buffer.push('\n');
buffer.push_str(&underline);
}
buffer.push('\n');
}
buffer
}
#[macro_export]
macro_rules! assert_hover {
($code:literal, $position:expr $(,)?) => {
let project = TestProject::for_source($code);
assert_hover!(project, $position);
};
($project:expr, $position:expr $(,)?) => {
let src = $project.src;
let position = $position.find_position(src);
let result = hover($project, position).expect("no hover produced");
let pretty_hover = show_hover(src, result.range.expect("hover with no range"), position);
let output = format!(
"{}\n\n----- Hover content -----\n{:#?}",
pretty_hover, result.contents
);
insta::assert_snapshot!(insta::internals::AutoName, output, src);
};
}
#[test]
fn hover_function_definition() {
assert_hover!(
"
fn add_2(x) {
x + 2
}
",
find_position_of("add_2")
);
}
#[test]
fn hover_local_function() {
assert_hover!(
"
fn my_fn() {
Nil
}
fn main() {
my_fn
}
",
find_position_of("my_fn").under_char('y').nth_occurrence(2)
);
}
// https://github.com/gleam-lang/gleam/issues/2654
#[test]
fn hover_local_function_in_pipe() {
assert_hover!(
"
fn add1(num: Int) -> Int {
num + 1
}
pub fn main() {
add1(1)
1
|> add1
|> add1
|> add1
}
",
find_position_of("add1")
.with_char_offset(1)
.nth_occurrence(2)
);
}
// https://github.com/gleam-lang/gleam/issues/2654
#[test]
fn hover_local_function_in_pipe_1() {
assert_hover!(
"
fn add1(num: Int) -> Int {
num + 1
}
pub fn main() {
add1(1)
1
|> add1
|> add1
|> add1
}
",
find_position_of("add1")
.with_char_offset(2)
.nth_occurrence(3)
);
}
// https://github.com/gleam-lang/gleam/issues/2654
#[test]
fn hover_local_function_in_pipe_2() {
assert_hover!(
"
fn add1(num: Int) -> Int {
num + 1
}
pub fn main() {
add1(1)
1
|> add1
|> add1
|> add1
}
",
find_position_of("add1")
.with_char_offset(2)
.nth_occurrence(4)
);
}
// https://github.com/gleam-lang/gleam/issues/2654
#[test]
fn hover_local_function_in_pipe_3() {
assert_hover!(
"
fn add1(num: Int) -> Int {
num + 1
}
pub fn main() {
add1(1)
1
|> add1
|> add1
|> add1
}
",
find_position_of("add1")
.with_char_offset(2)
.nth_occurrence(5)
);
}
#[test]
fn hover_imported_function() {
let code = "
import example_module
fn main() {
example_module.my_fn
}
";
assert_hover!(
TestProject::for_source(code).add_module("example_module", "pub fn my_fn() { Nil }"),
find_position_of("my_fn").under_char('_'),
);
}
#[test]
fn hover_external_imported_function() {
let code = "
import example_module
fn main() {
example_module.my_fn
}
";
assert_hover!(
TestProject::for_source(code).add_hex_module("example_module", "pub fn my_fn() { Nil }"),
find_position_of("my_fn").under_char('_'),
);
}
#[test]
fn hover_external_imported_unqualified_function() {
let code = "
import example_module.{my_fn}
fn main() {
my_fn
}
";
assert_hover!(
TestProject::for_source(code).add_hex_module("example_module", "pub fn my_fn() { Nil }"),
find_position_of("my_fn").under_char('f').nth_occurrence(2),
);
}
#[test]
fn hover_external_imported_function_renamed_module() {
let code = "
import example_module as renamed_module
fn main() {
renamed_module.my_fn
}
";
assert_hover!(
TestProject::for_source(code).add_hex_module("example_module", "pub fn my_fn() { Nil }"),
find_position_of("my_fn").under_char('f'),
);
}
#[test]
fn hover_external_unqualified_imported_function_renamed_module() {
let code = "
import example_module.{my_fn} as renamed_module
fn main() {
my_fn
}
";
assert_hover!(
TestProject::for_source(code).add_hex_module("example_module", "pub fn my_fn() { Nil }"),
find_position_of("my_fn").under_char('_').nth_occurrence(2),
);
}
#[test]
fn hover_external_imported_function_nested_module() {
// Example of HexDocs link with nested modules: https://hexdocs.pm/lustre/lustre/element/svg.html
let code = "
import my/nested/example_module
fn main() {
example_module.my_fn
}
";
assert_hover!(
TestProject::for_source(code)
.add_hex_module("my/nested/example_module", "pub fn my_fn() { Nil }"),
find_position_of("my_fn").under_char('f'),
);
}
#[test]
fn hover_external_imported_ffi_renamed_function() {
let code = r#"
import example_module
fn main() {
example_module.my_fn
}
"#;
let hex_module = r#"
@external(erlang, "my_mod_ffi", "renamed_fn")
pub fn my_fn() -> Nil
"#;
assert_hover!(
TestProject::for_source(code).add_hex_module("example_module", hex_module,),
find_position_of("my_fn").under_char('f'),
);
}
#[test]
fn hover_external_imported_constants() {
let code = "
import example_module
fn main() {
example_module.my_const
}
";
assert_hover!(
TestProject::for_source(code).add_hex_module("example_module", "pub const my_const = 42"),
find_position_of("my_const").under_char('_'),
);
}
#[test]
fn hover_external_imported_unqualified_constants() {
let code = "
import example_module.{my_const}
fn main() {
my_const
}
";
assert_hover!(
TestProject::for_source(code).add_hex_module("example_module", "pub const my_const = 42"),
find_position_of("my_const")
.under_char('c')
.nth_occurrence(2),
);
}
#[test]
fn hover_external_value_with_two_modules_same_name() {
let code = "
import a/example_module as _
import b/example_module
fn main() {
example_module.my_const
}
";
assert_hover!(
TestProject::for_source(code)
.add_hex_module("a/example_module", "pub const my_const = 42")
.add_hex_module("b/example_module", "pub const my_const = 42"),
find_position_of("my_const").under_char('c'),
);
}
#[test]
fn hover_external_function_with_another_value_same_name() {
let code = "
import a/example_module.{my_const as discarded}
import b/example_module.{my_const} as _
fn main() {
my_const
}
";
assert_hover!(
TestProject::for_source(code)
.add_hex_module("a/example_module", "pub const my_const = 42")
.add_hex_module("b/example_module", "pub const my_const = 42"),
find_position_of("my_const")
.under_char('o')
.nth_occurrence(3),
);
}
#[test]
fn hover_function_definition_with_docs() {
assert_hover!(
"
/// Exciting documentation
/// Maybe even multiple lines
fn append(x, y) {
x <> y
}
",
find_position_of("append")
);
}
#[test]
fn hover_function_argument() {
assert_hover!(
"
/// Exciting documentation
/// Maybe even multiple lines
fn append(x, y) {
x <> y
}
",
find_position_of("append(x, y)").under_char('x')
);
}
#[test]
fn hover_function_body() {
let code = "
/// Exciting documentation
/// Maybe even multiple lines
fn append(x, y) {
x <> y
}
";
assert_eq!(
hover(TestProject::for_source(code), Position::new(4, 1)),
None
);
}
#[test]
fn hover_expressions_in_function_body() {
assert_hover!(
"
fn append(x, y) {
x <> y
}
",
find_position_of("x").nth_occurrence(2)
);
}
#[test]
fn hover_module_constant() {
assert_hover!(
"
/// Exciting documentation
/// Maybe even multiple lines
const one = 1
",
find_position_of("one")
);
}
#[test]
fn hover_variable_in_use_expression() {
assert_hover!(
"
fn b(fun: fn(Int) -> String) {
fun(42)
}
fn do_stuff() {
let c = \"done\"
use a <- b
c
}
",
find_position_of("use a").under_last_char()
);
}
#[test]
fn hover_variable_in_use_expression_1() {
assert_hover!(
"
fn b(fun: fn(Int) -> String) {
fun(42)
}
fn do_stuff() {
let c = \"done\"
use a <- b
c
}
",
find_position_of("b").nth_occurrence(2)
);
}
#[test]
fn hover_variable_in_use_expression_2() {
assert_hover!(
"
fn b(fun: fn(Int) -> String) {
fun(42)
}
fn do_stuff() {
let c = \"done\"
use a <- b
c
}
",
find_position_of("c").nth_occurrence(2)
);
}
#[test]
fn hover_function_arg_annotation_2() {
assert_hover!(
"
/// Exciting documentation
/// Maybe even multiple lines
fn append(x: String, y: String) -> String {
x <> y
}
",
find_position_of("String").under_char('n')
);
}
#[test]
fn hover_function_return_annotation() {
assert_hover!(
"
/// Exciting documentation
/// Maybe even multiple lines
fn append(x: String, y: String) -> String {
x <> y
}
",
find_position_of("String").under_char('n').nth_occurrence(3)
);
}
#[test]
fn hover_function_return_annotation_with_tuple() {
assert_hover!(
"
/// Exciting documentation
/// Maybe even multiple lines
fn append(x: String, y: String) -> #(String, String) {
#(x, y)
}
",
find_position_of("String").under_char('r').nth_occurrence(3)
);
}
#[test]
fn hover_module_constant_annotation() {
assert_hover!(
"
/// Exciting documentation
/// Maybe even multiple lines
const one: Int = 1
",
find_position_of("Int").under_last_char()
);
}
#[test]
fn hover_type_constructor_annotation() {
assert_hover!(
"
type Wibble {
Wibble(arg: String)
}
",
find_position_of("String").under_char('n')
);
}
#[test]
fn hover_type_alias_annotation() {
assert_hover!("type Wibble = Int", find_position_of("Int").under_char('n'));
}
#[test]
fn hover_assignment_annotation() {
assert_hover!(
"
fn wibble() {
let wobble: Int = 7
wobble
}
",
find_position_of("Int").under_last_char()
);
}
#[test]
fn hover_function_arg_annotation_with_documentation() {
assert_hover!(
"
/// Exciting documentation
/// Maybe even multiple lines
type Wibble {
Wibble(arg: String)
}
fn identity(x: Wibble) -> Wibble {
x
}
",
find_position_of("Wibble")
.under_last_char()
.nth_occurrence(3)
);
}
#[test]
fn hover_import_unqualified_value() {
let code = "
import example_module.{my_num}
fn main() {
my_num
}
";
assert_hover!(
TestProject::for_source(code).add_module(
"example_module",
"
/// Exciting documentation
/// Maybe even multiple lines
pub const my_num = 1"
),
find_position_of("my_num").under_char('n')
);
}
#[test]
fn hover_import_unqualified_value_from_hex() {
let code = "
import example_module.{my_num}
fn main() {
my_num
}
";
assert_hover!(
TestProject::for_source(code).add_hex_module(
"example_module",
"
/// Exciting documentation
/// Maybe even multiple lines
pub const my_num = 1"
),
find_position_of("my_num").under_char('n')
);
}
#[test]
fn hover_import_unqualified_type() {
let code = "
import example_module.{type MyType, MyType}
fn main() -> MyType {
MyType
}
";
assert_hover!(
TestProject::for_source(code).add_module(
"example_module",
"
/// Exciting documentation
/// Maybe even multiple lines
pub type MyType {
MyType
}"
),
find_position_of("MyType").under_last_char()
);
}
#[test]
fn hover_works_even_for_invalid_code() {
assert_hover!(
"
fn invalid() { 1 + Nil }
fn valid() { Nil }
",
find_position_of("fn valid").under_char('v')
);
}
#[test]
fn hover_for_pattern_spread_ignoring_all_fields() {
assert_hover!(
"
pub type Model {
Model(
Int,
Float,
label1: Int,
label2: String,
)
}
pub fn main() {
case todo {
Model(..) -> todo
}
}
",
find_position_of("..")
);
}
#[test]
fn hover_for_pattern_spread_ignoring_some_fields() {
assert_hover!(
"
pub type Model {
Model(
Int,
Float,
label1: Int,
label2: String,
)
}
pub fn main() {
case todo {
Model(_, label1: _, ..) -> todo
}
}
",
find_position_of("..").under_last_char()
);
}
#[test]
fn hover_for_pattern_spread_ignoring_all_positional_fields() {
assert_hover!(
"
pub type Model {
Model(
Int,
Float,
label1: Int,
label2: String,
)
}
pub fn main() {
case todo {
Model(_, _, _, ..) -> todo
}
}
",
find_position_of("..")
);
}
#[test]
fn hover_label_shorthand_in_call_arg() {
assert_hover!(
"
fn wibble(arg1 arg1: Int, arg2 arg2: Bool) { Nil }
fn main() {
let arg1 = 1
let arg2 = True
wibble(arg2:, arg1:)
}
",
find_position_of("arg2:").nth_occurrence(2)
);
}
#[test]
fn hover_label_shorthand_in_pattern_call_arg() {
assert_hover!(
"
pub type Wibble { Wibble(arg1: Int, arg2: Bool) }
pub fn main() {
case todo {
Wibble(arg2:, ..) -> todo
}
}
",
find_position_of("arg2:")
.nth_occurrence(2)
.under_last_char()
);
}
#[test]
fn hover_label_shorthand_in_pattern_call_arg_2() {
assert_hover!(
"
pub type Wibble { Wibble(arg1: Int, arg2: Bool) }
pub fn main() {
let Wibble(arg2:, ..) = todo
}
",
find_position_of("arg2:").nth_occurrence(2).under_char('r')
);
}
#[test]
fn hover_contextual_type() {
let code = "
import wibble/wobble
const value = wobble.Wobble
";
assert_hover!(
TestProject::for_source(code).add_hex_module("wibble/wobble", "pub type Wibble { Wobble }"),
find_position_of("value").under_char('v')
);
}
#[test]
fn hover_contextual_type_aliased_module() {
let code = "
import wibble/wobble as wubble
const value = wubble.Wobble
";
assert_hover!(
TestProject::for_source(code).add_hex_module("wibble/wobble", "pub type Wibble { Wobble }"),
find_position_of("value").under_char('v')
);
}
#[test]
fn hover_contextual_type_unqualified() {
let code = "
import wibble/wobble.{type Wibble}
const value = wobble.Wobble
";
assert_hover!(
TestProject::for_source(code).add_hex_module("wibble/wobble", "pub type Wibble { Wobble }"),
find_position_of("value").under_char('v')
);
}
#[test]
fn hover_contextual_type_unqualified_aliased() {
let code = "
import wibble/wobble.{type Wibble as Wobble}
const value = wobble.Wobble
";
assert_hover!(
TestProject::for_source(code).add_hex_module("wibble/wobble", "pub type Wibble { Wobble }"),
find_position_of("value").under_char('v')
);
}
#[test]
fn hover_contextual_type_aliased() {
let code = "
import wibble/wobble
type Local = wobble.Wibble
const value = wobble.Wobble
";
assert_hover!(
TestProject::for_source(code).add_hex_module("wibble/wobble", "pub type Wibble { Wobble }"),
find_position_of("value").under_char('v')
);
}
#[test]
fn hover_contextual_type_function() {
let code = "
import wibble/wobble
type MyInt = Int
fn func(value: wobble.Wibble) -> MyInt { 1 }
";
assert_hover!(
TestProject::for_source(code).add_hex_module("wibble/wobble", "pub type Wibble { Wobble }"),
find_position_of("func").under_char('f')
);
}
#[test]
fn hover_contextual_type_unqualified_import() {
let code = "
import wibble/wobble.{type Wibble as Wobble, Wobble}
";
assert_hover!(
TestProject::for_source(code).add_hex_module("wibble/wobble", "pub type Wibble { Wobble }"),
find_position_of("Wobble}").under_char('W')
);
}
#[test]
fn hover_contextual_type_pattern() {
let code = "
import wibble/wobble.{Wibble, Wobble, Wubble}
pub fn cycle(wibble: wobble.Wibble) {
case wibble {
Wibble -> Wobble
Wobble -> Wubble
Wubble -> Wibble
}
}
";
assert_hover!(
TestProject::for_source(code)
.add_hex_module("wibble/wobble", "pub type Wibble { Wibble Wobble Wubble }"),
find_position_of("Wubble ->").under_char('u')
);
}
#[test]
fn hover_contextual_type_pattern_spread() {
let code = "
import wibble/wobble.{type Wibble as Wobble}
type Thing {
Thing(id: Int, value: Wobble)
}
pub fn main(thing: Thing) {
case thing {
Thing(id: 0, ..) -> 12
_ -> 14
}
}
";
assert_hover!(
TestProject::for_source(code).add_hex_module("wibble/wobble", "pub type Wibble { Wibble }"),
find_position_of("..").under_char('.')
);
}
#[test]
fn hover_contextual_type_expression() {
let code = "
import wibble/wobble
pub fn main() {
let wibble = wobble.Wibble
}
";
assert_hover!(
TestProject::for_source(code).add_hex_module("wibble/wobble", "pub type Wibble { Wibble }"),
find_position_of(".Wibble").under_char('l')
);
}
#[test]
fn hover_contextual_type_arg() {
let code = "
import wibble/wobble
fn do_things(wibble: wobble.Wibble) { wibble }
";
assert_hover!(
TestProject::for_source(code).add_hex_module("wibble/wobble", "pub type Wibble { Wibble }"),
find_position_of("wibble:").under_char('w')
);
}
#[test]
fn hover_print_type_variable_names() {
let code = "
fn main(value: Result(ok, error)) {
let v = value
v
}
";
assert_hover!(
TestProject::for_source(code),
find_position_of("let v").under_char('v')
);
}
#[test]
fn hover_print_unbound_type_variable_names() {
let code = "
fn make_ok(value: some_type) {
let result = Ok(value)
result
}
";
assert_hover!(
TestProject::for_source(code),
find_position_of("result =").under_char('s')
);
}
#[test]
fn hover_print_unbound_type_variable_name_without_conflicts() {
let code = "
fn make_ok(value: a) {
let result = Ok(value)
result
}
";
assert_hover!(
TestProject::for_source(code),
find_position_of("result =").under_char('s')
);
}
#[test]
fn hover_print_imported_alias() {
let code = "
import aliases.{type Aliased}
const thing: Aliased = 10
";
assert_hover!(
TestProject::for_source(code).add_hex_module("aliases", "pub type Aliased = Int"),
find_position_of("thing").under_char('g')
);
}
#[test]
fn hover_prelude_type() {
let code = "
const number = 100
";
assert_hover!(
TestProject::for_source(code),
find_position_of("number").under_char('b')
);
}
#[test]
fn hover_shadowed_prelude_type() {
let code = "
type Int { Int }
const number = 100
";
assert_hover!(
TestProject::for_source(code),
find_position_of("number").under_char('b')
);
}
#[test]
fn hover_shadowed_prelude_type_imported() {
let code = "
import numbers.{type Int}
const number = 100
";
assert_hover!(
TestProject::for_source(code).add_hex_module("numbers", "pub type Int"),
find_position_of("number =").under_char('b')
);
}
#[test]
fn hover_contextual_type_annotation() {
let code = "
import wibble/wobble
fn make_wibble() -> wobble.Wibble { wobble.Wibble }
";
assert_hover!(
TestProject::for_source(code).add_hex_module("wibble/wobble", "pub type Wibble { Wibble }"),
find_position_of("-> wobble.Wibble").under_char('i')
);
}
#[test]
fn hover_contextual_type_annotation_prelude() {
let code = "
fn add_one(a: Int) -> Int {
a + 1
}
";
assert_hover!(
TestProject::for_source(code).add_hex_module("wibble/wobble", "pub type Wibble { Wibble }"),
find_position_of("-> Int").under_char('I')
);
}
#[test]
fn hover_contextual_type_annotation_unqualified() {
let code = "
import wibble/wobble.{type Wibble}
fn main(wibble: Wibble) {
wibble
}
";
assert_hover!(
TestProject::for_source(code).add_hex_module("wibble/wobble", "pub type Wibble { Wibble }"),
find_position_of(": Wibble").under_char('W')
);
}
#[test]
fn hover_contextual_type_annotation_unqualified_aliased() {
let code = "
import wibble/wobble.{type Wibble as Wubble}
fn main(wibble: Wubble) {
wibble
}
";
assert_hover!(
TestProject::for_source(code).add_hex_module("wibble/wobble", "pub type Wibble { Wibble }"),
find_position_of(": Wubble").under_char('W')
);
}
#[test]
fn hover_contextual_type_annotation_aliased_module() {
let code = "
import wibble/wobble as wubble
fn main(wibble: wubble.Wibble) {
wibble
}
";
assert_hover!(
TestProject::for_source(code).add_hex_module("wibble/wobble", "pub type Wibble { Wibble }"),
find_position_of(": wubble.Wibble").under_char('W')
);
}
#[test]
fn hover_contextual_type_annotation_aliased() {
let code = "
import wibble/wobble
type Wubble = wobble.Wibble
fn main(wibble: Wubble) {
wibble
}
";
assert_hover!(
TestProject::for_source(code).add_hex_module("wibble/wobble", "pub type Wibble { Wibble }"),
find_position_of(": Wubble").under_char('e')
);
}
#[test]
fn hover_print_underlying_for_alias_with_parameters() {
let code = "
type LocalResult = Result(String, Int)
fn do_thing() -> LocalResult {
Error(1)
}
";
assert_hover!(
TestProject::for_source(code),
find_position_of("do_thing").under_char('d')
);
}
#[test]
fn hover_print_alias_when_parameters_match() {
let code = "
type MyResult(a, b) = Result(a, b)
fn do_thing() -> MyResult(Int, Int) {
Error(1)
}
";
assert_hover!(
TestProject::for_source(code),
find_position_of("do_thing").under_char('d')
);
}
#[test]
fn hover_print_underlying_for_imported_alias() {
let code = "
import alias.{type A}
fn wibble() -> Result(Int, String) {
todo
}
";
assert_hover!(
TestProject::for_source(code).add_hex_module("alias", "pub type A = Result(Int, String)"),
find_position_of("wibble").under_char('l')
);
}
#[test]
fn hover_print_aliased_imported_generic_type() {
let code = "
import gleam/option.{type Option as Maybe}
const none: Maybe(Int) = option.None
";
assert_hover!(
TestProject::for_source(code)
.add_hex_module("gleam/option", "pub type Option(a) { None Some(a) }"),
find_position_of("none").under_char('e')
);
}
#[test]
fn hover_print_qualified_prelude_type_when_shadowed_by_alias() {
let code = "
type Result = #(Bool, String)
const ok = Ok(10)
";
assert_hover!(
TestProject::for_source(code),
find_position_of("ok").under_char('k')
);
}
#[test]
fn hover_print_qualified_prelude_type_when_shadowed_by_imported_alias() {
let code = "
import alias.{type Bool}
const value = True
";
assert_hover!(
TestProject::for_source(code).add_hex_module("alias", "pub type Bool = #(Int, Int)"),
find_position_of("value").under_char('v')
);
}
// https://github.com/gleam-lang/gleam/issues/3761
#[test]
fn hover_over_block_in_list_spread() {
let code = "
pub fn main() {
[1, 2, ..{
let x = 1
[x]
}]
}
";
assert_hover!(TestProject::for_source(code), find_position_of("x"));
}
// https://github.com/gleam-lang/gleam/issues/3758
#[test]
fn hover_for_anonymous_function_annotation() {
let code = "
/// An example type.
pub type Wibble
pub fn main() {
fn(w: Wibble) { todo }
}
";
assert_hover!(
TestProject::for_source(code),
find_position_of("w: Wibble").under_char('b')
);
}
#[test]
fn hover_for_label_in_pattern() {
let code = "
type Wibble {
Wibble(wibble: Int, wobble: Int)
}
pub fn main() {
let Wibble(wibble: _, wobble: _) = todo
todo
}
";
assert_hover!(
TestProject::for_source(code),
find_position_of("wibble: _").under_char('l')
);
}
#[test]
fn hover_for_label_in_expression() {
let code = "
fn add(wibble a, wobble b) {
a + b
}
pub fn main() {
add(wibble: 1, wobble: 2)
}
";
assert_hover!(
TestProject::for_source(code),
find_position_of("wibble:").under_char('i')
);
}
#[test]
fn hover_for_pattern_in_use() {
let code = "
type Wibble {
Wibble(Int, Float)
}
pub fn main() {
use Wibble(int, float) <- todo
todo
}
";
assert_hover!(
TestProject::for_source(code),
find_position_of("int").under_char('i')
);
}
#[test]
fn hover_for_annotation_in_use() {
let code = "
pub fn main() {
use something: Int <- todo
todo
}
";
assert_hover!(
TestProject::for_source(code),
find_position_of("Int").under_char('n')
);
}
#[test]
fn hover_on_pipe_with_invalid_step() {
assert_hover!(
"
pub fn main() {
[1, 2, 3]
|> map(wibble)
|> filter(fn(value) { value })
}
fn map(list: List(a), fun: fn(a) -> b) -> List(b) {}
fn filter(list: List(a), fun: fn(a) -> Bool) -> List(a) {}
",
find_position_of("[")
);
}
#[test]
fn hover_on_pipe_with_invalid_step_1() {
assert_hover!(
"
pub fn main() {
[1, 2, 3]
|> map(wibble)
|> filter(fn(value) { value })
}
fn map(list: List(a), fun: fn(a) -> b) -> List(b) {}
fn filter(list: List(a), fun: fn(a) -> Bool) -> List(a) {}
",
find_position_of("1")
);
}
#[test]
fn hover_on_pipe_with_invalid_step_2() {
assert_hover!(
"
pub fn main() {
[1, 2, 3]
|> map(wibble)
|> filter(fn(value) { value })
}
fn map(list: List(a), fun: fn(a) -> b) -> List(b) {}
fn filter(list: List(a), fun: fn(a) -> Bool) -> List(a) {}
",
find_position_of("map")
);
}
#[test]
fn hover_on_pipe_with_invalid_step_3() {
assert_hover!(
"
pub fn main() {
[1, 2, 3]
|> map(wibble)
|> filter(fn(value) { value })
}
fn map(list: List(a), fun: fn(a) -> b) -> List(b) {}
fn filter(list: List(a), fun: fn(a) -> Bool) -> List(a) {}
",
find_position_of("wibble")
);
}
#[test]
fn hover_on_pipe_with_invalid_step_4() {
assert_hover!(
"
pub fn main() {
[1, 2, 3]
|> map(wibble)
|> filter(fn(value) { value })
}
fn map(list: List(a), fun: fn(a) -> b) -> List(b) {}
fn filter(list: List(a), fun: fn(a) -> Bool) -> List(a) {}
",
find_position_of("filter")
);
}
#[test]
fn hover_on_pipe_with_invalid_step_5() {
assert_hover!(
"
pub fn main() {
[1, 2, 3]
|> map(wibble)
|> filter(fn(value) { value })
}
fn map(list: List(a), fun: fn(a) -> b) -> List(b) { todo }
fn filter(list: List(a), fun: fn(a) -> Bool) -> List(a) { todo }
",
find_position_of("fn(value)")
);
}
#[test]
fn hover_on_pipe_with_invalid_step_6() {
assert_hover!(
"
pub fn main() {
[1, 2, 3]
|> wibble
|> filter(fn(value) { value })
}
fn filter(list: List(a), fun: fn(a) -> Bool) -> List(a) { todo }
",
find_position_of("wibble")
);
}
#[test]
fn hover_on_pipe_with_invalid_step_8() {
assert_hover!(
"
pub fn main() {
[1, 2, 3]
|> wibble
|> filter(fn(value) { value })
}
fn filter(list: List(a), fun: fn(a) -> Bool) -> List(a) { todo }
",
find_position_of("fn(value)")
);
}
#[test]
fn hover_over_module_name() {
let src = "
import wibble
pub fn main() {
wibble.wibble()
}
";
assert_hover!(
TestProject::for_source(src).add_hex_module(
"wibble",
"
//// This is the wibble module.
//// Here is some documentation about it.
//// This module does stuff
pub fn wibble() {
todo
}
"
),
find_position_of("wibble.")
);
}
#[test]
fn hover_over_module_with_path() {
let src = "
import wibble/wobble
pub fn main() {
wobble.wibble()
}
";
assert_hover!(
TestProject::for_source(src).add_hex_module(
"wibble/wobble",
"
//// The module documentation
pub fn wibble() {
todo
}
"
),
find_position_of("wobble.")
);
}
#[test]
fn hover_over_module_name_in_annotation() {
let src = "
import wibble
pub fn main(w: wibble.Wibble) {
todo
}
";
assert_hover!(
TestProject::for_source(src).add_hex_module(
"wibble",
"
//// This is the wibble module.
//// Here is some documentation about it.
//// This module does stuff
pub type Wibble
"
),
find_position_of("wibble.")
);
}
#[test]
fn hover_over_imported_module() {
let src = "
import wibble
";
assert_hover!(
TestProject::for_source(src).add_hex_module(
"wibble",
"
//// This is the wibble module.
//// Here is some documentation about it.
//// This module does stuff
"
),
find_position_of("wibble")
);
}
#[test]
fn no_hexdocs_link_when_hovering_over_local_module() {
let src = "
import wibble
";
assert_hover!(
TestProject::for_source(src).add_module(
"wibble",
"
//// This is the wibble module.
//// Here is some documentation about it.
//// This module does stuff
"
),
find_position_of("wibble")
);
}
#[test]
fn hover_for_constant_int() {
assert_hover!(
"
const ten = 10
",
find_position_of("10")
);
}
#[test]
fn hover_for_constant_float() {
assert_hover!(
"
const pi = 3.14
",
find_position_of("3.14")
);
}
#[test]
fn hover_for_constant_string() {
assert_hover!(
r#"
const message = "Hello!"
"#,
find_position_of("!")
);
}
#[test]
fn hover_for_constant_other_constant() {
assert_hover!(
"
const constant1 = 10
const constant2 = constant1
",
find_position_of("= constant1").under_char('s')
);
}
#[test]
fn hover_for_constant_record() {
assert_hover!(
"
type Wibble {
Wibble(Int)
}
const w = Wibble(10)
",
find_position_of("Wibble(10)").under_char('i')
);
}
#[test]
fn hover_for_constant_tuple() {
assert_hover!(
"
const tuple = #(1, 3.5, False)
",
find_position_of("#(")
);
}
#[test]
fn hover_for_constant_tuple_element() {
assert_hover!(
"
const tuple = #(1, 3.5, False)
",
find_position_of("False")
);
}
#[test]
fn hover_for_constant_list() {
assert_hover!(
"
const numbers = [2, 4, 6, 8]
",
find_position_of("[")
);
}
#[test]
fn hover_for_constant_list_element() {
assert_hover!(
"
const numbers = [2, 4, 6, 8]
",
find_position_of("4")
);
}
#[test]
fn hover_for_constant_string_concatenation() {
assert_hover!(
r#"
const name = "Bob"
const message = "Hello " <> name
"#,
find_position_of("<>")
);
}
#[test]
fn hover_for_constant_string_concatenation_side() {
assert_hover!(
r#"
const name = "Bob"
const message = "Hello " <> name
"#,
find_position_of("<> name").under_char('n')
);
}
#[test]
fn hover_for_constant_bit_array() {
assert_hover!(
"
const bits = <<1:2, 3:4>>
",
find_position_of(",")
);
}
#[test]
fn hover_for_constant_bit_array_segment() {
assert_hover!(
"
const bits = <<1:2, 3:4>>
",
find_position_of("1")
);
}
#[test]
fn hover_for_constant_bit_array_segment_option() {
assert_hover!(
"
const bits = <<1:size(2), 3:4>>
",
find_position_of("2")
);
}
#[test]
fn hover_for_nested_constant() {
assert_hover!(
"
type Wibble {
Wibble
Wobble(BitArray)
}
const value = #(1, 2, [Wibble, Wobble(<<1, 2, 3>>), Wibble])
",
find_position_of("3")
);
}
#[test]
fn record_field_documentation() {
assert_hover!(
"
pub type Wibble {
Wibble(
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | true |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/tests/document_symbols.rs | language-server/src/tests/document_symbols.rs | use insta::assert_debug_snapshot;
use lsp_types::{DocumentSymbol, DocumentSymbolParams};
use super::*;
fn doc_symbols(tester: TestProject<'_>) -> Vec<DocumentSymbol> {
tester.at(Position::default(), |engine, param, _| {
let params = DocumentSymbolParams {
text_document: param.text_document,
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
};
let response = engine.document_symbol(params);
response.result.unwrap()
})
}
#[test]
fn doc_symbols_type_no_constructors() {
let code = "
pub type A";
assert_debug_snapshot!(doc_symbols(TestProject::for_source(code)))
}
#[test]
fn doc_symbols_type_no_constructors_starting_at_documentation() {
let code = "
/// My type
pub type A";
assert_debug_snapshot!(doc_symbols(TestProject::for_source(code)))
}
#[test]
fn doc_symbols_type_no_constructors_starting_at_empty_doc() {
let code = "
// Some prior code...
///
pub type A";
assert_debug_snapshot!(doc_symbols(TestProject::for_source(code)))
}
#[test]
fn doc_symbols_type_constructor_no_args() {
let code = "
pub type B {
C
D
/// E
E
}";
assert_debug_snapshot!(doc_symbols(TestProject::for_source(code)))
}
#[test]
fn doc_symbols_type_constructor_pos_args() {
let code = "
pub type B {
C(Int)
/// D
D(List(Int))
/// E
E(
Result(Int, Bool)
)
}";
assert_debug_snapshot!(doc_symbols(TestProject::for_source(code)))
}
#[test]
fn doc_symbols_type_constructor_labeled_args() {
let code = "
pub type B {
C(argc: Int)
/// D
D(argd: List(Int))
/// E
E(
/// Arg
arge: Result(Int, Bool)
)
}";
assert_debug_snapshot!(doc_symbols(TestProject::for_source(code)))
}
#[test]
fn doc_symbols_type_constructor_pos_and_labeled_args() {
let code = "
pub type B {
C(Int, argc: Int)
/// D
D(Int, argd: List(Int))
/// E
E(
Int,
/// Arg
arge: Result(Int, Bool)
)
}";
assert_debug_snapshot!(doc_symbols(TestProject::for_source(code)))
}
#[test]
fn doc_symbols_type_alias() {
let code = "
/// DOC
pub type FFF = Int
pub type FFFF = List(Int)";
assert_debug_snapshot!(doc_symbols(TestProject::for_source(code)))
}
#[test]
fn doc_symbols_function() {
let code = "
/// DOC
pub fn super_func(a: Int) -> List(Int) {
[a + 5]
}
pub fn super_func2(a: Int) -> List(Int) {
[a + 5]
}";
assert_debug_snapshot!(doc_symbols(TestProject::for_source(code)))
}
#[test]
fn doc_symbols_constant() {
let code = "
/// DOC
pub const my_const = 5
pub const my_const2 = [25]";
assert_debug_snapshot!(doc_symbols(TestProject::for_source(code)))
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/tests/compilation.rs | language-server/src/tests/compilation.rs | use crate::engine::Compilation;
use super::*;
#[test]
fn compile_please() {
let io = LanguageServerTestIO::new();
let mut engine = setup_engine(&io);
let response = engine.compile_please();
assert!(response.result.is_ok());
assert!(response.warnings.is_empty());
assert_eq!(response.compilation, Compilation::Yes(vec![]));
drop(engine);
let actions = io.into_actions();
assert_eq!(
actions,
vec![
// new
Action::DependencyDownloadingStarted,
Action::DownloadDependencies,
Action::DependencyDownloadingFinished,
Action::LockBuild,
Action::UnlockBuild,
// compile_please
Action::CompilationStarted,
Action::LockBuild,
Action::UnlockBuild,
Action::CompilationFinished,
]
)
}
#[test]
fn compile_error_in_src() {
let io = LanguageServerTestIO::new();
let mut engine = setup_engine(&io);
_ = io.src_module("app/error", "pub type Error {");
let response = engine.compile_please();
assert!(response.result.is_err());
assert!(response.warnings.is_empty());
assert_eq!(response.compilation, Compilation::Yes(vec![]));
drop(engine);
let actions = io.into_actions();
assert_eq!(
actions,
vec![
// new
Action::DependencyDownloadingStarted,
Action::DownloadDependencies,
Action::DependencyDownloadingFinished,
Action::LockBuild,
Action::UnlockBuild,
// compile_please
Action::CompilationStarted,
Action::LockBuild,
Action::UnlockBuild,
Action::CompilationFinished,
]
)
}
#[test]
fn compile_error_in_test() {
let io = LanguageServerTestIO::new();
let mut engine = setup_engine(&io);
_ = io.test_module("app/error", "pub type Error {");
let response = engine.compile_please();
assert!(response.result.is_err());
assert!(response.warnings.is_empty());
assert_eq!(response.compilation, Compilation::Yes(vec![]));
drop(engine);
let actions = io.into_actions();
assert_eq!(
actions,
vec![
// new
Action::DependencyDownloadingStarted,
Action::DownloadDependencies,
Action::DependencyDownloadingFinished,
Action::LockBuild,
Action::UnlockBuild,
// compile_please
Action::CompilationStarted,
Action::LockBuild,
Action::UnlockBuild,
Action::CompilationFinished,
]
)
}
#[test]
fn compile_error_in_dev() {
let io = LanguageServerTestIO::new();
let mut engine = setup_engine(&io);
_ = io.dev_module("app/error", "pub type Error {");
let response = engine.compile_please();
assert!(response.result.is_err());
assert!(response.warnings.is_empty());
assert_eq!(response.compilation, Compilation::Yes(vec![]));
drop(engine);
let actions = io.into_actions();
assert_eq!(
actions,
vec![
// new
Action::DependencyDownloadingStarted,
Action::DownloadDependencies,
Action::DependencyDownloadingFinished,
Action::LockBuild,
Action::UnlockBuild,
// compile_please
Action::CompilationStarted,
Action::LockBuild,
Action::UnlockBuild,
Action::CompilationFinished,
]
)
}
#[test]
fn compile_recompile() {
let io = LanguageServerTestIO::new();
let mut engine = setup_engine(&io);
let path = io.src_module("app", "pub fn main() { 0 }");
// The first time it compiles.
let response = engine.compile_please();
assert!(response.result.is_ok());
assert!(response.warnings.is_empty());
assert_eq!(response.compilation, Compilation::Yes(vec![path.clone()]));
// The source file has been updated, so the file is compiled again.
_ = io.src_module("app", "pub fn main() { 1 }");
let response = engine.compile_please();
assert!(response.result.is_ok());
assert!(response.warnings.is_empty());
assert_eq!(response.compilation, Compilation::Yes(vec![path]));
// This time it does not compile the module again, instead using the
// cache from the previous run.
let response = engine.compile_please();
assert_eq!(response.result, Ok(()));
assert!(response.warnings.is_empty());
assert_eq!(response.compilation, Compilation::Yes(vec![]));
drop(engine);
let actions = io.into_actions();
assert_eq!(
actions,
vec![
// new
Action::DependencyDownloadingStarted,
Action::DownloadDependencies,
Action::DependencyDownloadingFinished,
Action::LockBuild,
Action::UnlockBuild,
// compile_please
Action::CompilationStarted,
Action::LockBuild,
Action::UnlockBuild,
Action::CompilationFinished,
// compile_please
Action::CompilationStarted,
Action::LockBuild,
Action::UnlockBuild,
Action::CompilationFinished,
// compile_please
Action::CompilationStarted,
Action::LockBuild,
Action::UnlockBuild,
Action::CompilationFinished,
]
)
}
#[test]
fn dep_compile_recompile() {
let io = LanguageServerTestIO::new();
let mut engine = setup_engine(&io);
add_path_dep(&mut engine, "mydep");
let path = io.path_dep_module("mydep", "moddy", "pub fn main() { 0 }");
// The first time it compiles.
let response = engine.compile_please();
assert!(response.result.is_ok());
assert!(response.warnings.is_empty());
assert_eq!(response.compilation, Compilation::Yes(vec![path.clone()]));
assert!(!engine.compiler.project_compiler.packages.is_empty());
// The source file has been updated, so the file is compiled again.
_ = io.path_dep_module("mydep", "moddy", "pub fn main() { 1 }");
let response = engine.compile_please();
assert!(response.result.is_ok());
assert!(response.warnings.is_empty());
assert_eq!(response.compilation, Compilation::Yes(vec![path]));
// This time it does not compile the module again, instead using the
// cache from the previous run.
let response = engine.compile_please();
assert!(response.result.is_ok());
assert!(response.warnings.is_empty());
assert_eq!(response.compilation, Compilation::Yes(vec![]));
drop(engine);
let actions = io.into_actions();
assert_eq!(
actions,
vec![
// new
Action::DependencyDownloadingStarted,
Action::DownloadDependencies,
Action::DependencyDownloadingFinished,
Action::LockBuild,
Action::UnlockBuild,
// compile_please
Action::CompilationStarted,
Action::LockBuild,
Action::UnlockBuild,
Action::CompilationFinished,
// compile_please
Action::CompilationStarted,
Action::LockBuild,
Action::UnlockBuild,
Action::CompilationFinished,
// compile_please
Action::CompilationStarted,
Action::LockBuild,
Action::UnlockBuild,
Action::CompilationFinished,
]
)
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/tests/definition.rs | language-server/src/tests/definition.rs | use lsp_types::{
GotoDefinitionParams, Location, Position, Range, Url, request::GotoTypeDefinitionParams,
};
use super::*;
fn definition(tester: &TestProject<'_>, position: Position) -> Option<Location> {
tester.at(position, |engine, param, _| {
let params = GotoDefinitionParams {
text_document_position_params: param,
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
};
let response = engine.goto_definition(params);
response.result.unwrap()
})
}
fn pretty_definition(project: TestProject<'_>, position_finder: PositionFinder) -> String {
let position = position_finder.find_position(project.src);
let location = definition(&project, position).expect("a location to jump to");
jump_locations_to_string(project, position, vec![location])
}
fn type_definition(tester: &TestProject<'_>, position: Position) -> Vec<Location> {
tester.at(position, |engine, param, _| {
let params = GotoTypeDefinitionParams {
text_document_position_params: param,
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
};
let response = engine.goto_type_definition(params);
response.result.unwrap()
})
}
fn pretty_type_definition(project: TestProject<'_>, position_finder: PositionFinder) -> String {
let position = position_finder.find_position(project.src);
let location = type_definition(&project, position);
format!(
"Jumping to type definition\n\n{}",
jump_locations_to_string(project, position, location)
)
}
fn jump_locations_to_string(
project: TestProject<'_>,
original_position: Position,
locations: Vec<Location>,
) -> String {
let src = hover::show_hover(
project.src,
Range {
start: original_position,
end: original_position,
},
original_position,
);
let destinations = locations
.iter()
.map(|location| {
let pretty_destination = location
.uri
.path_segments()
.expect("a location to jump to")
// To make snapshots the same both on windows and unix systems we need
// to discard windows' `C:` path segment at the beginning of a uri.
.skip_while(|segment| *segment == "C:")
.join("/");
let destination_code = hover::show_hover(
project
.src_from_module_url(&location.uri)
.expect("a module to jump to"),
location.range,
location.range.start,
);
format!(
"----- Jumped to `{pretty_destination}`
{destination_code}"
)
})
.join("\n\n");
format!(
"----- Jumping from `src/app.gleam`
{src}
{destinations}",
)
}
#[macro_export]
macro_rules! assert_goto {
($src:literal, $position:expr) => {
let project = TestProject::for_source($src);
assert_goto!(project, $position);
};
($project:expr, $position:expr) => {
let output = pretty_definition($project, $position);
insta::assert_snapshot!(insta::internals::AutoName, output);
};
}
#[macro_export]
macro_rules! assert_goto_type {
($src:literal, $position:expr) => {
let project = TestProject::for_source($src);
assert_goto_type!(project, $position);
};
($project:expr, $position:expr) => {
let output = pretty_type_definition($project, $position);
insta::assert_snapshot!(insta::internals::AutoName, output);
};
}
#[test]
fn goto_type_definition_in_same_file() {
assert_goto_type!(
"
pub type Wibble {
Wibble
}
pub fn main() {
let x = Wibble
x
}",
find_position_of("x").nth_occurrence(2)
);
}
#[test]
fn goto_type_definition_in_different_file_of_same_project() {
let src = "
import wibble.{type Wibble}
pub fn main() {
use_wibble(todo)
}
pub fn use_wibble(wibble: Wibble) { todo }
";
assert_goto_type!(
TestProject::for_source(src).add_module("wibble", "pub type Wibble"),
find_position_of("todo")
);
}
#[test]
fn goto_type_definition_in_different_file_of_dependency() {
let src = "
import wibble.{type Wibble}
pub fn main() {
use_wibble(todo)
}
pub fn use_wibble(wibble: Wibble) { todo }
";
assert_goto_type!(
TestProject::for_source(src).add_dep_module("wibble", "pub type Wibble"),
find_position_of("todo")
);
}
#[test]
fn goto_type_definition_can_jump_to_multiple_types() {
let src = "
import wibble.{type Wibble, Wibble}
import box.{Box}
pub fn main() {
let a = Box(Wibble)
}
";
assert_goto_type!(
TestProject::for_source(src)
.add_dep_module("wibble", "pub type Wibble { Wibble }")
.add_dep_module("box", "pub type Box(a) { Box(a) }"),
find_position_of("let a")
);
}
#[test]
fn goto_type_definition_can_jump_to_all_types_in_a_tuple() {
let src = "
import wibble.{type Wibble}
import wobble.{type Wobble}
import box.{type Box}
pub fn main() {
let a: #(Box(Wibble), Wobble) = todo
}
";
assert_goto_type!(
TestProject::for_source(src)
.add_dep_module("wibble", "pub type Wibble { Wibble }")
.add_dep_module("wobble", "pub type Wobble { Wobble }")
.add_dep_module("box", "pub type Box(a) { Box(a) }"),
find_position_of("let a")
);
}
#[test]
fn goto_type_definition_can_jump_to_all_types_in_a_function_type() {
let src = "
import wibble.{type Wibble}
import wobble.{type Wobble}
import box.{type Box}
pub fn main() {
let a = fn(wibble: Wibble) { box.Box(wobble.Wobble) }
}
";
assert_goto_type!(
TestProject::for_source(src)
.add_dep_module("wibble", "pub type Wibble { Wibble }")
.add_dep_module("wobble", "pub type Wobble { Wobble }")
.add_dep_module("box", "pub type Box(a) { Box(a) }"),
find_position_of("let a")
);
}
#[test]
fn goto_definition_local_variable() {
assert_goto!(
"
pub fn main() {
let x = 1
x
}",
find_position_of("x").nth_occurrence(2)
);
}
#[test]
fn goto_definition_record_update() {
assert_goto!(
"
pub type Wibble { Wibble(one: Int, two: Int) }
pub fn main() {
Wibble(..todo, one: 1)
}",
find_position_of("Wibble").nth_occurrence(3)
);
}
#[test]
fn goto_definition_same_module_constants() {
assert_goto!(
"
const x = 1
pub fn main() {
x
}",
find_position_of("x").nth_occurrence(2)
);
}
#[test]
fn goto_definition_same_module_functions() {
assert_goto!(
"
fn add_2(x) {
x + 2
}
pub fn main() {
add_2(1)
}",
find_position_of("add_2(1)")
);
}
#[test]
fn goto_definition_same_module_records() {
assert_goto!(
"
pub type Rec {
Var1(Int)
Var2(Int, Int)
}
pub fn main() {
let a = Var1(1)
let b = Var2(2, 3)
}",
find_position_of("Var1(1)")
);
}
#[test]
fn goto_definition_imported_module_constants() {
let code = "
import example_module
fn main() {
example_module.my_num
}
";
assert_goto!(
TestProject::for_source(code).add_module("example_module", "pub const my_num = 1"),
find_position_of("my_num")
);
}
#[test]
fn goto_definition_unqualified_imported_module_constants() {
let code = "
import example_module.{my_num}
fn main() {
my_num
}
";
assert_goto!(
TestProject::for_source(code).add_module("example_module", "pub const my_num = 1"),
find_position_of("my_num").nth_occurrence(2)
);
}
#[test]
fn goto_definition_module_function_calls() {
let code = "
import example_module
fn main() {
example_module.my_fn
}
";
assert_goto!(
TestProject::for_source(code).add_module("example_module", "pub fn my_fn() { Nil }"),
find_position_of("my_fn")
);
}
#[test]
fn goto_definition_imported_module_records() {
let dep_src = "
pub type Rec {
Var1(Int)
Var2(Int, Int)
}";
let code = "
import example_module
fn main() {
example_module.Var1(1)
}
";
assert_goto!(
TestProject::for_source(code).add_module("example_module", dep_src),
find_position_of("Var1(1)")
);
}
#[test]
fn goto_definition_unqualified_imported_module_records() {
let dep_src = "
pub type Rec {
Var1(Int)
Var2(Int, Int)
}";
let code = "
import example_module.{Var1}
fn main() {
Var1(1)
}
";
assert_goto!(
TestProject::for_source(code).add_module("example_module", dep_src),
find_position_of("Var1(1)").under_char('a')
);
}
#[test]
fn goto_definition_external_module_constants() {
let code = "
import example_module
fn main() {
example_module.my_num
}
";
assert_goto!(
TestProject::for_source(code).add_hex_module("example_module", "pub const my_num = 1"),
find_position_of("my_num").under_char('u')
);
}
#[test]
fn goto_definition_external_module_function_calls() {
let code = "
import example_module
fn main() {
example_module.my_fn
}
";
assert_goto!(
TestProject::for_source(code).add_hex_module("example_module", "pub fn my_fn() { Nil }"),
find_position_of("my_fn")
);
}
#[test]
fn goto_definition_external_module_function_calls_with_multiple_compiles() {
let dep = "pub fn my_fn() { Nil }";
let code = "
import example_module
fn main() {
example_module.my_fn
}
";
let (mut engine, position_param) = TestProject::for_source(code)
.add_hex_module("example_module", dep)
.positioned_with_io(Position::new(3, 20));
let params = GotoDefinitionParams {
text_document_position_params: position_param.clone(),
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
};
let response = engine.goto_definition(params.clone());
let response = response.result.unwrap();
assert_eq!(
response,
Some(Location {
uri: Url::from_file_path(Utf8PathBuf::from(if cfg!(target_family = "windows") {
r"\\?\C:\build\packages\hex\src\example_module.gleam"
} else {
"/build/packages/hex/src/example_module.gleam"
}))
.unwrap(),
range: Range {
start: Position {
line: 0,
character: 0
},
end: Position {
line: 0,
character: 14
}
}
})
);
engine.compiler.sources.clear();
let response = engine.compile_please();
assert!(response.result.is_ok());
let response = engine.goto_definition(params.clone());
let response = response.result.unwrap();
assert_eq!(
response,
Some(Location {
uri: Url::from_file_path(Utf8PathBuf::from(if cfg!(target_family = "windows") {
r"\\?\C:\build\packages\hex\src\example_module.gleam"
} else {
"/build/packages/hex/src/example_module.gleam"
}))
.unwrap(),
range: Range {
start: Position {
line: 0,
character: 0
},
end: Position {
line: 0,
character: 14
}
}
})
)
}
#[test]
fn goto_definition_path_module_function_calls_with_multiple_compiles() {
let dep = "pub fn my_fn() { Nil }";
let code = "
import example_module
fn main() {
example_module.my_fn
}
";
let (mut engine, position_param) = TestProject::for_source(code)
.add_dep_module("example_module", dep)
.positioned_with_io(Position::new(3, 20));
let params = GotoDefinitionParams {
text_document_position_params: position_param.clone(),
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
};
let response = engine.goto_definition(params.clone());
let response = response.result.unwrap();
assert_eq!(
response,
Some(Location {
uri: Url::from_file_path(Utf8PathBuf::from(if cfg!(target_family = "windows") {
r"\\?\C:\dep\src\example_module.gleam"
} else {
"/dep/src/example_module.gleam"
}))
.unwrap(),
range: Range {
start: Position {
line: 0,
character: 0
},
end: Position {
line: 0,
character: 14
}
}
})
);
engine.compiler.sources.clear();
let response = engine.compile_please();
assert!(response.result.is_ok());
let response = engine.goto_definition(params.clone());
let response = response.result.unwrap();
assert_eq!(
response,
Some(Location {
uri: Url::from_file_path(Utf8PathBuf::from(if cfg!(target_family = "windows") {
r"\\?\C:\dep\src\example_module.gleam"
} else {
"/dep/src/example_module.gleam"
}))
.unwrap(),
range: Range {
start: Position {
line: 0,
character: 0
},
end: Position {
line: 0,
character: 14
}
}
})
)
}
#[test]
fn goto_definition_external_module_records() {
let hex_src = "
pub type Rec {
Var1(Int)
Var2(Int, Int)
}
";
let code = "
import example_module
fn main() {
example_module.Var1(1)
}
";
assert_goto!(
TestProject::for_source(code).add_hex_module("example_module", hex_src),
find_position_of("Var1(1)").under_char('r')
);
}
#[test]
fn goto_definition_path_module_function_calls() {
let code = "
import example_module
fn main() {
example_module.my_fn
}
";
assert_goto!(
TestProject::for_source(code).add_dep_module("example_module", "pub fn my_fn() { Nil }"),
find_position_of("my_fn").under_char('y')
);
}
#[test]
fn goto_definition_type() {
assert_goto!(
"
pub type Rec {
Var1(Int)
Var2(Int, Int)
}
pub fn make_var() -> Rec {
Var1(1)
}",
find_position_of("Rec").nth_occurrence(2)
);
}
#[test]
fn goto_definition_type_in_module() {
let hex_src = "
pub type Rec {
Var1(Int)
Var2(Int, Int)
}
";
let code = "
import example_module
fn make_var() -> example_module.Rec {
example_module.Var1(1)
}
";
assert_goto!(
TestProject::for_source(code).add_hex_module("example_module", hex_src),
find_position_of("Rec")
);
}
#[test]
fn goto_definition_type_in_path_dep() {
let dep = "
pub type Rec {
Var1(Int)
Var2(Int, Int)
}
";
let code = "
import example_module
fn make_var() -> example_module.Rec {
example_module.Var1(1)
}
";
assert_goto!(
TestProject::for_source(code).add_dep_module("example_module", dep),
find_position_of("Rec")
);
}
#[test]
fn goto_definition_deep_type_in_module() {
let hex_src = "
pub type Wobble {
Wobble(Int)
}
pub type Wibble(a) {
Wibble(a)
}
pub type Wabble(a) {
Wabble(a)
}
";
let code = "
import example_module
fn make_var() -> example_module.Wabble(example_module.Wibble(example_module.Wobble)) {
example_module.Wabble(example_module.Wibble(example_module.Wobble(1)))
}
";
assert_goto!(
TestProject::for_source(code).add_hex_module("example_module", hex_src),
find_position_of("Wobble").under_char('o')
);
}
#[test]
fn goto_definition_import() {
let code = "
import example_module
fn main() {
example_module.my_num
}
";
assert_goto!(
TestProject::for_source(code).add_module("example_module", "pub const my_num = 1"),
find_position_of("example_module").under_char('p')
);
}
#[test]
fn goto_definition_import_aliased() {
let code = "
import example_module as example
fn main() {
example.my_num
}
";
assert_goto!(
TestProject::for_source(code).add_module("example_module", "pub const my_num = 1"),
find_position_of("example")
.nth_occurrence(2)
.under_char('x')
);
}
#[test]
fn goto_definition_import_unqualified_value() {
let code = "
import example_module.{my_num}
fn main() {
my_num
}
";
assert_goto!(
TestProject::for_source(code).add_module("example_module", "pub const my_num = 1"),
find_position_of("my_num").under_char('_')
);
}
#[test]
fn goto_definition_unqualified_function() {
let code = "
import wibble.{wobble}
fn main() {
wobble()
}
";
assert_goto!(
TestProject::for_source(code).add_module("wibble", "pub fn wobble() {}"),
find_position_of("wobble").nth_occurrence(2).under_char('o')
);
}
#[test]
fn goto_definition_import_unqualified_type() {
let code = "
import example_module.{type MyType}
fn main() -> MyType {
0
}
";
assert_goto!(
TestProject::for_source(code).add_module("example_module", "pub type MyType = Int"),
find_position_of("MyType").under_char('T')
);
}
// https://github.com/gleam-lang/gleam/issues/3610
#[test]
fn goto_definition_of_external_function_in_same_module() {
let code = "
@external(erlang, \"wibble\", \"wobble\")
fn external_function() -> Nil
fn main() {
external_function()
}
";
assert_goto!(
TestProject::for_source(code),
find_position_of("external_function")
.nth_occurrence(2)
.under_char('l')
);
}
// https://github.com/gleam-lang/gleam/issues/3758
#[test]
fn goto_definition_from_anonymous_function() {
let code = "
pub type Wibble
pub fn main() {
fn(w: Wibble) { todo }
}
";
assert_goto!(
TestProject::for_source(code),
find_position_of("w: Wibble").under_char('i')
);
}
#[test]
fn goto_definition_module() {
let code = "
import wibble
pub fn main() {
wibble.wibble()
}
";
assert_goto!(
TestProject::for_source(code).add_module("wibble", "pub fn wibble() {}"),
find_position_of("wibble.").under_char('i')
);
}
#[test]
fn goto_definition_constant() {
assert_goto!(
"
const value = 25
const my_constant = value
",
find_position_of("= value").under_char('a')
);
}
#[test]
fn goto_definition_constant_record() {
assert_goto!(
"
type Wibble {
Wibble(Int)
}
const wibble = Wibble(10)
",
find_position_of("Wibble(10)").under_char('l')
);
}
#[test]
fn goto_definition_imported_constant() {
let src = "
import wibble
const my_constant = wibble.value
";
assert_goto!(
TestProject::for_source(src).add_hex_module("wibble", "pub const value = 10"),
find_position_of("= wibble").under_char('w')
);
}
#[test]
fn goto_definition_constant_imported_record() {
let src = "
import wibble
const my_constant = wibble.Wibble(10)
";
assert_goto!(
TestProject::for_source(src).add_hex_module("wibble", "pub type Wibble { Wibble(Int) }"),
find_position_of("= wibble").under_char('w')
);
}
#[test]
fn goto_definition_from_alternative_pattern() {
assert_goto!(
"
type Wibble {
Wibble
Wobble
}
fn warble(wibble: Wibble) {
case wibble {
Wibble | Wobble -> 0
}
}
",
find_position_of("Wobble ->")
);
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/tests/action.rs | language-server/src/tests/action.rs | use itertools::Itertools;
use lsp_types::{
CodeActionContext, CodeActionParams, PartialResultParams, Position, Range, Url,
WorkDoneProgressParams,
};
use super::*;
fn code_actions(tester: &TestProject<'_>, range: Range) -> Option<Vec<lsp_types::CodeAction>> {
let position = Position {
line: 0,
character: 0,
};
tester.at(position, |engine, params, _| {
let params = CodeActionParams {
text_document: params.text_document,
range,
context: CodeActionContext::default(),
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
};
engine.code_actions(params).result.unwrap()
})
}
fn actions_with_title(
titles: Vec<&str>,
tester: &TestProject<'_>,
range: Range,
) -> Vec<lsp_types::CodeAction> {
code_actions(tester, range)
.into_iter()
.flatten()
.filter(|action| titles.contains(&action.title.as_str()))
.collect_vec()
}
fn owned_actions_with_title(
titles: Vec<&str>,
tester: TestProject<'_>,
range: Range,
) -> Vec<lsp_types::CodeAction> {
actions_with_title(titles, &tester, range)
}
fn apply_code_action(title: &str, tester: TestProject<'_>, range: Range) -> String {
let titles = vec![title];
let changes = actions_with_title(titles, &tester, range)
.pop()
.expect("No action with the given title")
.edit
.expect("No workspace edit found")
.changes
.expect("No text edit found");
apply_code_edit(tester, changes)
}
fn apply_code_edit(
tester: TestProject<'_>,
changes: HashMap<Url, Vec<lsp_types::TextEdit>>,
) -> String {
let mut changed_files: HashMap<Url, String> = HashMap::new();
for (uri, change) in changes {
let code = match changed_files.get(&uri) {
Some(code) => code,
None => tester
.src_from_module_url(&uri)
.expect(&format!("no src for url {:?}", uri)),
};
let code = super::apply_code_edit(code, change);
let _ = changed_files.insert(uri, code);
}
show_code_edits(tester, changed_files)
}
fn show_code_edits(tester: TestProject<'_>, changed_files: HashMap<Url, String>) -> String {
let format_code = |url: &Url, code: &String| {
format!(
"// --- Edits applied to module '{}'\n{}",
tester.module_name_from_url(url).expect("a module"),
code
)
};
// If the file that changed is the main one we just show its code.
if changed_files.len() == 1 {
let mut changed = changed_files.iter().peekable();
let (url, code) = changed.peek().unwrap();
if tester.module_name_from_url(url) == Some("app".into()) {
code.to_string()
} else {
format_code(url, code)
}
} else {
// If more than a single file changed we want to add the name of the
// file before each!
changed_files
.iter()
.map(|(url, code)| format_code(url, code))
.join("\n")
}
}
const REMOVE_UNUSED_IMPORTS: &str = "Remove unused imports";
const REMOVE_REDUNDANT_TUPLES: &str = "Remove redundant tuples";
const CONVERT_TO_CASE: &str = "Convert to case";
const USE_LABEL_SHORTHAND_SYNTAX: &str = "Use label shorthand syntax";
const FILL_LABELS: &str = "Fill labels";
const ASSIGN_UNUSED_RESULT: &str = "Assign unused Result value to `_`";
const ADD_MISSING_PATTERNS: &str = "Add missing patterns";
const ADD_ANNOTATION: &str = "Add type annotation";
const ADD_ANNOTATIONS: &str = "Add type annotations";
const ANNOTATE_TOP_LEVEL_DEFINITIONS: &str = "Annotate all top level definitions";
const CONVERT_FROM_USE: &str = "Convert from `use`";
const CONVERT_TO_USE: &str = "Convert to `use`";
const EXTRACT_VARIABLE: &str = "Extract variable";
const EXTRACT_CONSTANT: &str = "Extract constant";
const EXPAND_FUNCTION_CAPTURE: &str = "Expand function capture";
const GENERATE_DYNAMIC_DECODER: &str = "Generate dynamic decoder";
const GENERATE_TO_JSON_FUNCTION: &str = "Generate to-JSON function";
const PATTERN_MATCH_ON_ARGUMENT: &str = "Pattern match on argument";
const PATTERN_MATCH_ON_VARIABLE: &str = "Pattern match on variable";
const GENERATE_FUNCTION: &str = "Generate function";
const CONVERT_TO_FUNCTION_CALL: &str = "Convert to function call";
const INLINE_VARIABLE: &str = "Inline variable";
const CONVERT_TO_PIPE: &str = "Convert to pipe";
const INTERPOLATE_STRING: &str = "Interpolate string";
const FILL_UNUSED_FIELDS: &str = "Fill unused fields";
const REMOVE_ALL_ECHOS_FROM_THIS_MODULE: &str = "Remove all `echo`s from this module";
const WRAP_IN_BLOCK: &str = "Wrap in block";
const GENERATE_VARIANT: &str = "Generate variant";
const REMOVE_BLOCK: &str = "Remove block";
const REMOVE_OPAQUE_FROM_PRIVATE_TYPE: &str = "Remove opaque from private type";
const COLLAPSE_NESTED_CASE: &str = "Collapse nested case";
const REMOVE_UNREACHABLE_CLAUSES: &str = "Remove unreachable clauses";
const ADD_OMITTED_LABELS: &str = "Add omitted labels";
const EXTRACT_FUNCTION: &str = "Extract function";
const MERGE_CASE_BRANCHES: &str = "Merge case branches";
macro_rules! assert_code_action {
($title:expr, $code:literal, $range:expr $(,)?) => {
let project = TestProject::for_source($code);
assert_code_action!($title, project, $range);
};
($title:expr, $project:expr, $range:expr $(,)?) => {
let src = $project.src;
let range = $range.find_range(src);
let result = apply_code_action($title, $project, range);
let output = format!(
"----- BEFORE ACTION\n{}\n\n----- AFTER ACTION\n{}",
hover::show_hover(src, range, range.end),
result
);
insta::assert_snapshot!(insta::internals::AutoName, output, src);
};
}
macro_rules! assert_no_code_actions {
($title:ident $(| $titles:ident)*, $code:literal, $range:expr $(,)?) => {
let project = TestProject::for_source($code);
assert_no_code_actions!($title $(| $titles)*, project, $range);
};
($title:ident $(| $titles:ident)*, $project:expr, $range:expr $(,)?) => {
let src = $project.src;
let range = $range.find_range(src);
let all_titles = vec![$title $(, $titles)*];
let expected: Vec<lsp_types::CodeAction> = vec![];
let result = owned_actions_with_title(all_titles, $project, range);
assert_eq!(expected, result);
};
}
#[test]
fn fix_truncated_segment_1() {
let name = "Replace with `1`";
assert_code_action!(
name,
r#"
pub fn main() {
<<1, 257, 259:size(1)>>
}"#,
find_position_of("257").to_selection()
);
}
#[test]
fn fix_truncated_segment_2() {
let name = "Replace with `0`";
assert_code_action!(
name,
r#"
pub fn main() {
<<1, 1024:size(10)>>
}"#,
find_position_of("size").to_selection()
);
}
#[test]
fn generate_variant_with_fields_in_same_module() {
assert_code_action!(
GENERATE_VARIANT,
r#"
pub type Wibble {
Wibble
}
pub fn main() -> Wibble {
Wobble(1)
}"#,
find_position_of("Wobble").to_selection()
);
}
#[test]
fn generate_variant_with_no_fields_in_same_module() {
assert_code_action!(
GENERATE_VARIANT,
r#"
pub type Wibble {
Wibble
}
pub fn main() -> Wibble {
Wobble
}"#,
find_position_of("Wobble").to_selection()
);
}
#[test]
fn generate_variant_with_labels_in_same_module() {
assert_code_action!(
GENERATE_VARIANT,
r#"
pub type Wibble {
Wibble
}
pub fn main() -> Wibble {
Wobble("hello", label: 1)
}"#,
find_position_of("Wobble").to_selection()
);
}
#[test]
fn generate_variant_from_pattern_with_fields() {
assert_code_action!(
GENERATE_VARIANT,
r#"
pub type Wibble {
Wibble
}
pub fn new() { Wibble }
pub fn main() -> Wibble {
let assert Wobble(1) = new()
}
"#,
find_position_of("Wobble").to_selection()
);
}
#[test]
fn generate_variant_from_pattern_with_labelled_fields() {
assert_code_action!(
GENERATE_VARIANT,
r#"
pub type Wibble {
Wibble
}
pub fn new() { Wibble }
pub fn main() -> Wibble {
let assert Wobble("hello", label: 1) = new()
}
"#,
find_position_of("Wobble").to_selection()
);
}
#[test]
fn generate_variant_from_pattern_with_no_fields() {
assert_code_action!(
GENERATE_VARIANT,
r#"
pub type Wibble {
Wibble
}
pub fn new() { Wibble }
pub fn main() -> Wibble {
let assert Wobble = new()
}
"#,
find_position_of("Wobble").to_selection()
);
}
#[test]
fn generate_unqualified_variant_in_other_module() {
let src = r#"
import other
pub fn main() -> other.Wibble {
let assert Wobble = new()
}
pub fn new() -> other.Wibble { todo }
"#;
assert_code_action!(
GENERATE_VARIANT,
TestProject::for_source(src).add_module("other", "pub type Wibble"),
find_position_of("Wobble").to_selection()
);
}
#[test]
fn generate_qualified_variant_in_other_module() {
let src = r#"
import other
pub fn main() -> other.Wibble {
let assert other.Wobble = new()
}
pub fn new() -> other.Wibble { todo }
"#;
assert_code_action!(
GENERATE_VARIANT,
TestProject::for_source(src).add_module("other", "pub type Wibble"),
find_position_of("Wobble").to_selection()
);
}
#[test]
fn do_not_generate_variant_if_one_with_the_same_name_exists() {
assert_no_code_actions!(
GENERATE_VARIANT,
r#"
pub fn main() -> Wibble {
let assert Wobble = new()
}
pub type Wibble {
Wobble(n: Int)
}
pub fn new() -> Wibble { todo }
"#,
find_position_of("Wobble").to_selection()
);
}
#[test]
fn do_not_generate_variant_if_one_with_the_same_name_exists_in_other_module() {
let src = r#"
import other.{type Wibble}
pub fn main() -> Wibble {
let assert Wobble = new()
}
pub fn new() -> Wibble { todo }
"#;
assert_no_code_actions!(
GENERATE_VARIANT,
TestProject::for_source(src).add_module("other", "pub type Wibble { Wobble(String) }"),
find_position_of("Wobble").to_selection()
);
}
#[test]
fn do_not_generate_qualified_variant_if_one_with_the_same_name_exists_in_other_module() {
let src = r#"
import other.{type Wibble}
pub fn main() -> Wibble {
let assert other.Wobble = new()
}
pub fn new() -> Wibble { todo }
"#;
assert_no_code_actions!(
GENERATE_VARIANT,
TestProject::for_source(src).add_module("other", "pub type Wibble { Wobble(String) }"),
find_position_of("Wobble").to_selection()
);
}
#[test]
fn fill_unused_fields_with_ignored_labelled_fields() {
assert_code_action!(
FILL_UNUSED_FIELDS,
r#"
pub type Wibble { Wibble(Int, label1: String, label2: Int) }
pub fn main() {
let Wibble(_, ..) = todo
}"#,
find_position_of("..").to_selection()
);
}
#[test]
fn fill_unused_fields_with_ignored_positional_fields() {
assert_code_action!(
FILL_UNUSED_FIELDS,
r#"
pub type Wibble { Wibble(Int, label1: String, label2: Int) }
pub fn main() {
let Wibble(label1:, label2:, ..) = todo
}"#,
find_position_of("..").to_selection()
);
}
#[test]
fn fill_unused_fields_with_all_positional_fields() {
assert_code_action!(
FILL_UNUSED_FIELDS,
r#"
pub type Wibble { Wibble(Int, String) }
pub fn main() {
let Wibble(..) = todo
}"#,
find_position_of("..").to_selection()
);
}
#[test]
fn fill_unused_fields_with_ignored_mixed_fields() {
assert_code_action!(
FILL_UNUSED_FIELDS,
r#"
pub type Wibble { Wibble(Int, String, label1: String, label2: Int) }
pub fn main() {
let Wibble(_, label2:, ..) = todo
}"#,
find_position_of("..").to_selection()
);
}
#[test]
fn fill_unused_fields_with_all_ignored_fields() {
assert_code_action!(
FILL_UNUSED_FIELDS,
r#"
pub type Wibble { Wibble(Int, label1: String, label2: Int) }
pub fn main() {
let Wibble(..) = todo
}"#,
find_position_of("..").to_selection()
);
}
#[test]
fn fill_unused_fields_with_ignored_fields_never_calls_a_positional_arg_as_a_labelled_one() {
assert_code_action!(
FILL_UNUSED_FIELDS,
r#"
pub type Wibble { Wibble(Int, int: Int) }
pub fn main() {
let Wibble(..) = todo
}"#,
find_position_of("..").to_selection()
);
}
#[test]
fn remove_echo() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
"pub fn main() {
echo 1 + 2
}",
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_with_message() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
r#"pub fn main() {
echo 1 + 2 as "message"
}"#,
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_with_message_and_comment() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
r#"pub fn main() {
echo 1 + 2
// Hello!
as "message"
}"#,
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_with_message_and_comment_2() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
r#"pub fn main() {
echo 1 + 2 as
// Hello!
"message"
}"#,
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_with_message_and_comment_3() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
r#"pub fn main() {
echo 1 + 2 as
// Hello!
"message"
Nil
}"#,
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_selecting_expression() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
"pub fn main() {
echo 1 + 2
}",
find_position_of("1").select_until(find_position_of("2"))
);
}
#[test]
fn remove_echo_selecting_message() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
r#"pub fn main() {
echo 1 + 2 as "message"
}"#,
find_position_of("message").to_selection()
);
}
#[test]
fn remove_echo_as_function_arg() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
"pub fn main() {
wibble([], echo 1 + 2)
}",
find_position_of("1").to_selection()
);
}
#[test]
fn remove_echo_in_pipeline_step() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
"pub fn main() {
[1, 2, 3]
|> echo
|> wibble
}",
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_in_pipeline_step_with_message() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
r#"pub fn main() {
[1, 2, 3]
|> echo as message
|> wibble
}"#,
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_in_single_line_pipeline_step() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
"pub fn main() {
[1, 2, 3] |> echo |> wibble
}",
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_in_single_line_pipeline_step_with_message() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
r#"pub fn main() {
[1, 2, 3] |> echo as "message" |> wibble
}"#,
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_last_in_long_pipeline_step() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
"pub fn main() {
[1, 2, 3]
|> wibble
|> echo
}",
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_last_in_long_pipeline_step_with_message() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
r#"pub fn main() {
[1, 2, 3]
|> wibble
|> echo as "message"
}"#,
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_last_in_short_pipeline_step() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
"pub fn main() {
[1, 2, 3]
|> echo
}",
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_last_in_short_pipeline_step_with_message() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
r#"pub fn main() {
[1, 2, 3]
|> echo as "message"
}"#,
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_before_pipeline() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
"pub fn main() {
echo [1, 2, 3] |> wibble
}",
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_before_pipeline_selecting_step() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
"pub fn main() {
echo [1, 2, 3] |> wibble
}",
find_position_of("wibble").to_selection()
);
}
#[test]
fn remove_echo_removes_all_echos() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
"pub fn main() {
echo wibble(echo 1, 2)
}",
find_position_of("echo").nth_occurrence(2).to_selection()
);
}
#[test]
fn remove_echo_removes_all_echos_1() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
"pub fn main() {
echo 1 |> echo |> echo |> wibble |> echo
echo wibble(echo 1, echo 2)
echo 1
}",
find_position_of("echo").nth_occurrence(2).to_selection()
);
}
#[test]
fn remove_echo_removes_entire_echo_statement_used_with_literals() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
"pub fn main() {
echo 1
Nil
}",
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_removes_entire_echo_statement_used_with_literals_and_message() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
r#"pub fn main() {
echo 1 as "message"
Nil
}"#,
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_removes_entire_echo_statement_used_with_a_var() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
"pub fn main() {
let a = 1
echo a
Nil
}",
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_removes_multiple_entire_echo_statement_used_with_literals() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
r#"pub fn main() {
echo 1
echo "wibble"
Nil
}"#,
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_removes_multiple_entire_echo_statement_used_with_literals_but_stops_at_comments() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
r#"pub fn main() {
echo 1
// Oh no I hope I'm not deleted by the code action!!
Nil
}"#,
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_removes_entire_echo_statement_used_with_literals_in_a_fn() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
"pub fn main() {
fn() {
echo 1
Nil
}
}",
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_removes_multiple_entire_echo_statement_used_with_literals_in_a_fn() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
r#"pub fn main() {
fn() {
echo 1
echo "wibble"
Nil
}
}"#,
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_removes_does_not_remove_entire_echo_statement_if_its_the_return() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
"pub fn main() {
echo 1
}",
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_with_message_removes_does_not_remove_entire_echo_statement_if_its_the_return() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
r#"pub fn main() {
echo 1 as "message"
}"#,
find_position_of("echo").to_selection()
);
}
#[test]
fn remove_echo_removes_does_not_remove_entire_echo_statement_if_its_the_return_of_a_fn() {
assert_code_action!(
REMOVE_ALL_ECHOS_FROM_THIS_MODULE,
r#"pub fn main() {
fn() {
echo 1
}
}"#,
find_position_of("echo").to_selection()
);
}
#[test]
fn split_string() {
assert_code_action!(
INTERPOLATE_STRING,
r#"pub fn main() {
"wibble wobble woo"
}"#,
find_position_of("wobble").to_selection()
);
}
#[test]
fn no_split_string_right_at_the_start() {
assert_no_code_actions!(
INTERPOLATE_STRING,
r#"pub fn main() {
"wibble wobble woo"
}"#,
find_position_of("wibble").to_selection()
);
}
#[test]
fn no_split_string_right_at_the_end() {
assert_no_code_actions!(
INTERPOLATE_STRING,
r#"pub fn main() {
"wibble wobble woo"
}"#,
find_position_of("\"").nth_occurrence(2).to_selection()
);
}
#[test]
fn no_split_string_before_the_start() {
assert_no_code_actions!(
INTERPOLATE_STRING,
r#"pub fn main() {
"wibble wobble woo"
}"#,
find_position_of("\"").to_selection()
);
}
#[test]
fn no_split_string_after_the_end() {
assert_no_code_actions!(
INTERPOLATE_STRING,
r#"pub fn main() {
"wibble wobble woo"//we need this comment so we can put the cursor _after_ the closing quote
}"#,
find_position_of("\"/").under_last_char().to_selection()
);
}
#[test]
fn interpolate_string_inside_string() {
assert_code_action!(
INTERPOLATE_STRING,
r#"pub fn main() {
"wibble wobble woo"
}"#,
find_position_of("wobble").select_until(find_position_of("wobble ").under_last_char()),
);
}
#[test]
fn fallback_to_split_string_when_selecting_invalid_name() {
assert_code_action!(
INTERPOLATE_STRING,
r#"pub fn main() {
"wibble wobble woo woo"
}"#,
find_position_of("wobble").select_until(find_position_of("woo ").under_last_char()),
);
}
#[test]
fn splitting_string_as_first_pipeline_step_inserts_brackets() {
assert_code_action!(
INTERPOLATE_STRING,
r#"pub fn main() {
"wibble wobble" |> io.println
}"#,
find_position_of(" wobble").to_selection(),
);
}
#[test]
fn interpolating_string_as_first_pipeline_step_inserts_brackets() {
assert_code_action!(
INTERPOLATE_STRING,
r#"pub fn main() {
"wibble wobble woo" |> io.println
}"#,
find_position_of("wobble ").select_until(find_position_of("wobble ").under_last_char()),
);
}
#[test]
fn test_remove_unused_simple() {
let src = "
// test
import // comment
list as lispy
import result
import option
pub fn main() {
result.is_ok
}
";
assert_code_action!(
REMOVE_UNUSED_IMPORTS,
TestProject::for_source(src)
.add_hex_module("list", "")
.add_hex_module("result", "")
.add_hex_module("option", ""),
find_position_of("// test").select_until(find_position_of("option")),
);
}
#[test]
fn test_remove_unused_start_of_file() {
let src = "import option
import result
pub fn main() {
result.is_ok
}
";
assert_code_action!(
REMOVE_UNUSED_IMPORTS,
TestProject::for_source(src)
.add_hex_module("option", "")
.add_hex_module("result", ""),
find_position_of("import").select_until(find_position_of("pub")),
);
}
#[test]
fn test_remove_unused_alias() {
let src = "
// test
import result.{is_ok} as res
import option
pub fn main() {
is_ok
}
";
assert_code_action!(
REMOVE_UNUSED_IMPORTS,
TestProject::for_source(src)
.add_hex_module("result", "pub fn is_ok() {}")
.add_hex_module("option", ""),
find_position_of("// test").select_until(find_position_of("pub")),
);
}
#[test]
fn test_remove_unused_value() {
let src = "
// test
import result.{is_ok}
import option
pub fn main() {
result.is_ok
}
";
assert_code_action!(
REMOVE_UNUSED_IMPORTS,
TestProject::for_source(src)
.add_hex_module("result", "pub fn is_ok() {}")
.add_hex_module("option", ""),
find_position_of("// test").select_until(find_position_of("pub")),
);
}
#[test]
fn test_remove_aliased_unused_value() {
let src = "
// test
import result.{is_ok as ok}
import option
pub fn main() {
result.is_ok
}
";
assert_code_action!(
REMOVE_UNUSED_IMPORTS,
TestProject::for_source(src)
.add_hex_module("result", "pub fn is_ok() {}")
.add_hex_module("option", ""),
find_position_of("// test").select_until(find_position_of("pub")),
);
}
#[test]
fn test_remove_multiple_unused_values() {
let src = "
// test
import result.{type Unused, used, unused, unused_again, type Used, used_again}
pub fn main(x: Used) {
#(used, used_again)
}
";
assert_code_action!(
REMOVE_UNUSED_IMPORTS,
TestProject::for_source(src).add_hex_module(
"result",
"
pub const used = 1
pub const unused = 2
pub const unused_again = 3
pub const used_again = 4
pub type Unused
pub type Used
"
),
find_position_of("// test").select_until(find_position_of("pub")),
);
}
#[test]
fn test_remove_multiple_unused_values_2() {
let src = "
// test
import result.{type Unused, used, unused, type Used, unused_again}
pub fn main(x: Used) {
used
}
";
assert_code_action!(
REMOVE_UNUSED_IMPORTS,
TestProject::for_source(src).add_hex_module(
"result",
"
pub const used = 1
pub const unused = 2
pub const unused_again = 3
pub type Unused
pub type Used
"
),
find_position_of("// test").select_until(find_position_of("pub")),
);
}
#[test]
fn test_remove_entire_unused_import() {
let src = "
// test
import result.{unused, unused_again}
pub fn main() {
todo
}
";
assert_code_action!(
REMOVE_UNUSED_IMPORTS,
TestProject::for_source(src).add_hex_module(
"result",
"
pub const used = 1
pub const unused = 2
pub const unused_again = 3
pub type Unused
pub type Used
"
),
find_position_of("// test").select_until(find_position_of("pub")),
);
}
#[test]
fn test_remove_redundant_tuple_in_case_subject_simple() {
assert_code_action!(
REMOVE_REDUNDANT_TUPLES,
"pub fn main() {
case #(1) { #(a) -> 0 }
case #(1, 2) { #(a, b) -> 0 }
}",
find_position_of("case").select_until(find_position_of("#(1, 2)").under_last_char())
);
}
#[test]
fn test_remove_redundant_tuple_with_catch_all_pattern() {
assert_code_action!(
REMOVE_REDUNDANT_TUPLES,
"pub fn main() {
case #(1, 2) {
#(1, 2) -> 0
_ -> 1
}
}",
find_position_of("case").select_until(find_position_of("#(1, 2)").under_last_char())
);
}
#[test]
fn test_remove_multiple_redundant_tuple_with_catch_all_pattern() {
assert_code_action!(
REMOVE_REDUNDANT_TUPLES,
"pub fn main() {
case #(1, 2), #(3, 4) {
#(2, 2), #(2, 2) -> 0
#(1, 2), _ -> 0
_, #(1, 2) -> 0
_, _ -> 1
}
}",
find_position_of("case").select_until(find_position_of("#(3, 4)"))
);
}
#[test]
fn test_remove_redundant_tuple_in_case_subject_nested() {
assert_code_action!(
REMOVE_REDUNDANT_TUPLES,
"pub fn main() {
case #(case #(0) { #(a) -> 0 }) { #(b) -> 0 }
}",
find_position_of("case").select_until(find_position_of("#(b)"))
);
}
#[test]
fn test_remove_redundant_tuple_in_case_retain_extras() {
assert_code_action!(
REMOVE_REDUNDANT_TUPLES,
"
pub fn main() {
case
#(
// first comment
1,
// second comment
2,
3 // third comment before comma
,
// fourth comment after comma
)
{
#(
// first comment
a,
// second comment
b,
c // third comment before comma
,
// fourth comment after comma
) -> 0
}
}
",
find_position_of("#").select_until(find_position_of("// first"))
);
}
#[test]
fn test_remove_redundant_tuple_in_case_subject_ignore_empty_tuple() {
assert_no_code_actions!(
REMOVE_REDUNDANT_TUPLES,
"
pub fn main() {
case #() { #() -> 0 }
}
",
find_position_of("case").select_until(find_position_of("0"))
);
}
#[test]
fn test_remove_redundant_tuple_in_case_subject_only_safe_remove() {
assert_code_action!(
REMOVE_REDUNDANT_TUPLES,
"
pub fn main() {
case #(0), #(1) {
#(1), #(b) -> 0
a, #(0) -> 1 // The first of this clause is not a tuple
#(a), #(b) -> 2
}
}
",
find_position_of("#(0)").select_until(find_position_of("#(1)"))
);
}
#[test]
fn rename_invalid_const() {
assert_code_action!(
"Rename to my_invalid_constant",
"const myInvalid_Constant = 42",
find_position_of("_Constant").to_selection(),
);
}
#[test]
fn rename_invalid_parameter() {
assert_code_action!(
"Rename to num_a",
"fn add(numA: Int, num_b: Int) { numA + num_b }",
find_position_of("numA").to_selection()
);
}
#[test]
fn rename_invalid_parameter_name2() {
assert_code_action!(
"Rename to param_name",
"fn pass(label paramName: Bool) { paramName }",
find_position_of("paramName").to_selection()
);
}
#[test]
fn rename_invalid_parameter_name3() {
assert_code_action!(
"Rename to num_a",
"pub fn main() {
let add = fn(numA: Int, num_b: Int) { numA + num_b }
}",
find_position_of("let add").select_until(find_position_of("num_b"))
);
}
#[test]
fn rename_invalid_parameter_discard() {
assert_code_action!(
"Rename to _ignore_me",
"fn ignore(_ignoreMe: Bool) { 98 }",
find_position_of("ignore").select_until(find_position_of("98"))
);
}
#[test]
fn rename_invalid_parameter_discard_name2() {
assert_code_action!(
"Rename to _ignore_me",
"fn ignore(labelled_discard _ignoreMe: Bool) { 98 }",
find_position_of("ignore").select_until(find_position_of("98"))
);
}
#[test]
fn rename_invalid_parameter_discard_name3() {
assert_code_action!(
"Rename to _ignore_me",
"pub fn main() {
let ignore = fn(_ignoreMe: Bool) { 98 }
}",
find_position_of("ignore").select_until(find_position_of("98"))
);
}
#[test]
fn rename_invalid_parameter_label() {
assert_code_action!(
"Rename to this_is_a_label",
"fn func(thisIsALabel param: Int) { param }",
find_position_of("thisIs").select_until(find_position_of("Int"))
);
}
#[test]
fn rename_invalid_parameter_label2() {
assert_code_action!(
"Rename to this_is_a_label",
"fn ignore(thisIsALabel _ignore: Int) { 25 }",
find_position_of("thisIs").under_char('i').to_selection()
);
}
#[test]
fn rename_invalid_constructor() {
assert_code_action!(
"Rename to TheConstructor",
"type MyType { The_Constructor(Int) }",
find_position_of("The_").under_char('h').to_selection(),
);
}
#[test]
fn rename_invalid_constructor_arg() {
assert_code_action!(
"Rename to inner_int",
"type IntWrapper { IntWrapper(innerInt: Int) }",
find_position_of("IntWrapper")
.nth_occurrence(2)
.select_until(find_position_of(": Int"))
);
}
#[test]
fn rename_invalid_custom_type() {
assert_code_action!(
"Rename to BoxedValue",
"type Boxed_value { Box(Int) }",
find_position_of("Box").select_until(find_position_of("_value"))
);
}
#[test]
fn rename_invalid_type_alias() {
assert_code_action!(
"Rename to FancyBool",
"type Fancy_Bool = Bool",
find_position_of("Fancy")
.under_char('a')
.select_until(find_position_of("="))
);
}
#[test]
fn rename_invalid_function() {
assert_code_action!(
"Rename to do_stuff",
"fn doStuff() {}",
find_position_of("fn").select_until(find_position_of("{}"))
);
}
#[test]
fn rename_invalid_variable() {
assert_code_action!(
"Rename to the_answer",
"pub fn main() {
let theAnswer = 42
}",
find_position_of("theAnswer").select_until(find_position_of("Answer"))
);
}
#[test]
fn rename_invalid_variable_discard() {
assert_code_action!(
"Rename to _boring_number",
"pub fn main() {
let _boringNumber = 72
}",
find_position_of("let").select_until(find_position_of("72"))
);
}
#[test]
fn rename_invalid_use() {
assert_code_action!(
"Rename to use_var",
"fn use_test(f) { f(Nil) }
pub fn main() {use useVar <- use_test()}",
find_position_of("use")
.nth_occurrence(2)
.select_until(find_position_of("use_test()"))
);
}
#[test]
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | true |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/language-server/src/tests/signature_help.rs | language-server/src/tests/signature_help.rs | use super::*;
use lsp_types::{
ParameterInformation, ParameterLabel, SignatureHelp, SignatureHelpParams, SignatureInformation,
};
fn signature_help(tester: TestProject<'_>, position: Position) -> Option<SignatureHelp> {
tester.at(position, |engine, param, _| {
let params = SignatureHelpParams {
context: None,
text_document_position_params: param,
work_done_progress_params: Default::default(),
};
let response = engine.signature_help(params);
response.result.unwrap()
})
}
fn pretty_signature_help(signature_help: SignatureHelp) -> String {
let SignatureHelp {
signatures,
active_signature,
active_parameter,
} = signature_help;
let SignatureInformation {
label,
documentation,
parameters,
active_parameter: _,
} = signatures
.get(active_signature.expect("an active signature") as usize)
.expect("an active signature");
let parameters = parameters
.as_ref()
.expect("no signature help for function with no parameters");
let documentation = match documentation {
Some(d) => format!("Documentation:\n{d:#?}"),
None => "No documentation".to_string(),
};
let label = match active_parameter {
None => label.to_string(),
Some(i) => match parameters.get(i as usize) {
None => label.to_string(),
Some(ParameterInformation {
label: ParameterLabel::LabelOffsets([start, end]),
..
}) => {
let spaces = " ".repeat(*start as usize);
let underlined = "▔".repeat((end - start) as usize);
format!("{label}\n{spaces}{underlined}")
}
Some(_) => panic!("unexpected response"),
},
};
format!("{label}\n\n{documentation}")
}
#[macro_export]
macro_rules! assert_signature_help {
($code:literal, $position:expr $(,)?) => {
let project = TestProject::for_source($code);
assert_signature_help!(project, $position);
};
($project:expr, $position:expr $(,)?) => {
let src = $project.src;
let position = $position.find_position(src);
let result = signature_help($project, position).expect("no signature help produced");
let pretty_hover = hover::show_hover(
src,
lsp_types::Range {
start: Position {
character: 1,
line: 1,
},
end: Position {
character: 0,
line: 0,
},
},
position,
);
let output = format!(
"{}\n\n----- Signature help -----\n{}",
pretty_hover,
pretty_signature_help(result)
);
insta::assert_snapshot!(insta::internals::AutoName, output, src);
};
}
#[macro_export]
macro_rules! assert_no_signature_help {
($code:literal, $position:expr $(,)?) => {
let project = TestProject::for_source($code);
assert_no_signature_help!(project, $position);
};
($project:expr, $position:expr $(,)?) => {
let src = $project.src;
let position = $position.find_position(src);
let result = signature_help($project, position);
match result {
Some(_) => panic!("Expected no signature help"),
None => (),
}
};
}
#[test]
pub fn help_for_calling_local_variable_first_arg() {
assert_signature_help!(
r#"
pub fn main() {
let wibble = fn(a: Int, b: String) { 1.0 }
wibble()
}
"#,
find_position_of("wibble()").under_last_char()
);
}
#[test]
pub fn help_for_calling_local_variable_last_arg() {
assert_signature_help!(
r#"
pub fn main() {
let wibble = fn(a: Int, b: String) { 1.0 }
wibble(1,)
}
"#,
find_position_of("wibble(1,)").under_last_char()
);
}
#[test]
pub fn help_for_calling_local_variable_with_module_function() {
assert_signature_help!(
r#"
pub fn wibble(a: Int, b: String) { 1.0 }
pub fn main() {
let wobble = fn(a: Int, b: String) { 1.0 }
wobble(1,)
}
"#,
find_position_of("wobble(1,)").under_last_char()
);
}
#[test]
pub fn help_for_calling_module_function() {
assert_signature_help!(
r#"
pub fn wibble(a: Int, b: String) { 1.0 }
pub fn main() {
wibble()
}
"#,
find_position_of("wibble()").under_last_char()
);
}
#[test]
pub fn help_for_calling_module_constant_referencing_function() {
assert_signature_help!(
r#"
pub fn wibble(a: Int, b: String) { 1.0 }
const wobble = wibble
pub fn main() {
wobble()
}
"#,
find_position_of("wobble()").under_last_char()
);
}
#[test]
pub fn help_for_calling_local_variable_referencing_constant_referencing_function() {
assert_signature_help!(
r#"
pub fn wibble(a: Int, b: String) { 1.0 }
const wobble = wibble
pub fn main() {
let woo = wobble
woo()
}
"#,
find_position_of("woo()").under_last_char()
);
}
#[test]
pub fn help_still_shows_up_even_if_an_argument_has_the_wrong_type() {
assert_signature_help!(
r#"
pub fn wibble(a: Int, b: String) { 1.0 }
pub fn main() {
wibble("wrong",)
}
"#,
find_position_of("wibble(\"wrong\",)").under_last_char()
);
}
#[test]
pub fn help_shows_documentation_for_local_function() {
assert_signature_help!(
r#"
/// Some doc!
pub fn wibble(a: Int, b: String) { 1.0 }
pub fn main() {
wibble()
}
"#,
find_position_of("wibble()").under_last_char()
);
}
#[test]
pub fn help_shows_documentation_for_imported_function() {
let code = r#"
import example
pub fn main() {
example.example_fn()
}
"#;
assert_signature_help!(
TestProject::for_source(code).add_module(
"example",
"/// Some doc!
pub fn example_fn(a: Int, b: String) { Nil }"
),
find_position_of("example_fn()").under_last_char()
);
}
#[test]
pub fn help_for_unqualified_call() {
let code = r#"
import example.{example_fn}
pub fn main() {
example_fn()
}
"#;
assert_signature_help!(
TestProject::for_source(code)
.add_module("example", "pub fn example_fn(a: Int, b: String) { Nil }"),
find_position_of("example_fn()").under_last_char()
);
}
#[test]
pub fn help_for_aliased_unqualified_call() {
let code = r#"
import example.{example_fn as wibble}
pub fn main() {
wibble()
}
"#;
assert_signature_help!(
TestProject::for_source(code)
.add_module("example", "pub fn example_fn(a: Int, b: String) { Nil }"),
find_position_of("wibble()").under_last_char()
);
}
#[test]
pub fn help_for_qualified_call() {
let code = r#"
import example
pub fn main() {
example.example_fn()
}
"#;
assert_signature_help!(
TestProject::for_source(code)
.add_module("example", "pub fn example_fn(a: Int, b: String) { Nil }"),
find_position_of("example_fn()").under_last_char()
);
}
#[test]
pub fn help_for_aliased_qualified_call() {
let code = r#"
import example as wibble
pub fn main() {
wibble.example_fn()
}
"#;
assert_signature_help!(
TestProject::for_source(code)
.add_module("example", "pub fn example_fn(a: Int, b: String) { Nil }"),
find_position_of("example_fn()").under_last_char()
);
}
#[test]
pub fn help_shows_labels() {
assert_signature_help!(
r#"
pub fn wibble(a: Int, b b: Int, c c: String) { 1.0 }
pub fn main() {
wibble()
}
"#,
find_position_of("wibble()").under_last_char()
);
}
#[test]
pub fn help_shows_labelled_argument_after_all_unlabelled() {
assert_signature_help!(
r#"
pub fn wibble(a: Int, b b: Int, c c: String) { 1.0 }
pub fn main() {
wibble(1,)
}
"#,
find_position_of("wibble(1,)").under_last_char()
);
}
#[test]
pub fn help_shows_first_missing_labelled_argument_if_out_of_order() {
assert_signature_help!(
r#"
pub fn wibble(a a: Int, b b: Int, c c: String) { 1.0 }
pub fn main() {
wibble(c: "c",)
}
"#,
find_position_of("wibble(c: \"c\",)").under_last_char()
);
}
#[test]
pub fn help_for_piped_imported_function_starts_from_second_argument() {
let code = r#"
import example
pub fn main() {
1 |> example.example_fn()
}
"#;
assert_signature_help!(
TestProject::for_source(code)
.add_module("example", "pub fn example_fn(a: Int, b: String) { Nil }"),
find_position_of("example_fn()").under_last_char()
);
}
#[test]
pub fn help_for_piped_function_starts_from_second_argument() {
assert_signature_help!(
r#"
pub fn wibble(a a: Int, b b: Int, c c: String) { 1.0 }
pub fn main() {
1 |> wibble()
}
"#,
find_position_of("wibble()").under_last_char()
);
}
#[test]
pub fn help_for_use_function_call_starts_from_first_argument() {
assert_signature_help!(
r#"
pub fn wibble(a: Int, b: Int, c: fn() -> Int) { 1.0 }
pub fn main() {
use <- wibble()
}
"#,
find_position_of("wibble()").under_last_char()
);
}
#[test]
pub fn help_for_use_function_call_uses_precise_types_when_missing_some_arguments() {
assert_signature_help!(
r#"
pub fn guard(a: Bool, b: a, c: fn() -> a) { 1.0 }
pub fn main() {
use <- guard(True,)
}
"#,
find_position_of("guard(True,)").under_last_char()
);
}
#[test]
pub fn help_for_use_function_shows_next_unlabelled_argument() {
assert_signature_help!(
r#"
pub fn guard(a a: Bool, b b: a, c c: fn() -> a) { 1.0 }
pub fn main() {
use <- guard(b: 1,)
}
"#,
find_position_of("guard(b: 1,)").under_last_char()
);
}
#[test]
pub fn help_does_not_come_up_for_function_that_does_not_exist() {
assert_no_signature_help!(
r#"
pub fn main() {
use <- to_be_or_not_to_be()
}
"#,
find_position_of("to_be_or_not_to_be()").under_last_char()
);
}
#[test]
// Regression introduced by 4112682cdb5d5b0bb6d1defc6cde849b6a6f65ab.
pub fn help_with_labelled_constructor() {
assert_signature_help!(
r#"
pub type Pokemon {
Pokemon(name: String, types: List(String), moves: List(String))
}
pub fn main() {
Pokemon(name: "Jirachi",)
}
"#,
find_position_of(r#"Pokemon(name: "Jirachi",)"#).under_last_char()
);
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-wasm/src/lib.rs | compiler-wasm/src/lib.rs | #[cfg(test)]
mod tests;
mod wasm_filesystem;
use camino::Utf8PathBuf;
use gleam_core::{
Error,
analyse::TargetSupport,
build::{
Mode, NullTelemetry, PackageCompiler, StaleTracker, Target, TargetCodegenConfiguration,
},
config::PackageConfig,
io::{FileSystemReader, FileSystemWriter},
uid::UniqueIdGenerator,
warning::{VectorWarningEmitterIO, WarningEmitter},
};
use hexpm::version::Version;
use im::HashMap;
use std::{cell::RefCell, collections::HashSet, rc::Rc};
use wasm_filesystem::WasmFileSystem;
use wasm_bindgen::prelude::*;
#[derive(Debug, Clone, Default)]
struct Project {
fs: WasmFileSystem,
warnings: VectorWarningEmitterIO,
}
thread_local! {
static PROJECTS: RefCell<HashMap<usize, Project>> = RefCell::new(HashMap::new());
}
/// You should call this once to ensure that if the compiler crashes it gets
/// reported in JavaScript.
///
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen]
pub fn initialise_panic_hook(debug: bool) {
console_error_panic_hook::set_once();
if debug {
let _ = tracing_wasm::try_set_as_global_default();
}
}
/// Reset the virtual file system to an empty state.
///
#[wasm_bindgen]
pub fn reset_filesystem(project_id: usize) {
let fs = get_filesystem(project_id);
fs.reset();
}
/// Delete project, freeing any memory associated with it.
///
#[wasm_bindgen]
pub fn delete_project(project_id: usize) {
PROJECTS.with(|lock| {
_ = lock.borrow_mut().remove(&project_id);
})
}
fn get_project(project_id: usize) -> Project {
PROJECTS.with(|lock| lock.borrow_mut().entry(project_id).or_default().clone())
}
fn get_filesystem(project_id: usize) -> WasmFileSystem {
get_project(project_id).fs
}
fn get_warnings(project_id: usize) -> VectorWarningEmitterIO {
get_project(project_id).warnings
}
/// Write a Gleam module to the `/src` directory of the virtual file system.
///
#[wasm_bindgen]
pub fn write_module(project_id: usize, module_name: &str, code: &str) {
let fs = get_filesystem(project_id);
let path = format!("/src/{module_name}.gleam");
fs.write(&Utf8PathBuf::from(path), code)
.expect("writing file")
}
/// Write a file to the virtual file system.
///
#[wasm_bindgen]
pub fn write_file(project_id: usize, path: &str, content: &str) {
let fs = get_filesystem(project_id);
fs.write(&Utf8PathBuf::from(path), content)
.expect("writing file")
}
/// Write a non-text file to the virtual file system.
///
#[wasm_bindgen]
pub fn write_file_bytes(project_id: usize, path: &str, content: &[u8]) {
let fs = get_filesystem(project_id);
fs.write_bytes(&Utf8PathBuf::from(path), content)
.expect("writing file")
}
/// Read a file from the virtual file system.
///
#[wasm_bindgen]
pub fn read_file_bytes(project_id: usize, path: &str) -> Option<Vec<u8>> {
let fs = get_filesystem(project_id);
fs.read_bytes(&Utf8PathBuf::from(path)).ok()
}
/// Run the package compiler. If this succeeds you can use
///
#[wasm_bindgen]
pub fn compile_package(project_id: usize, target: &str) -> Result<(), String> {
let target = match target.to_lowercase().as_str() {
"erl" | "erlang" => Target::Erlang,
"js" | "javascript" => Target::JavaScript,
_ => {
let msg = format!("Unknown target `{target}`, expected `erlang` or `javascript`");
return Err(msg);
}
};
do_compile_package(get_project(project_id), target).map_err(|e| e.pretty_string())
}
/// Get the compiled JavaScript output for a given module.
///
/// You need to call `compile_package` before calling this function.
///
#[wasm_bindgen]
pub fn read_compiled_javascript(project_id: usize, module_name: &str) -> Option<String> {
let fs = get_filesystem(project_id);
let path = format!("/build/{module_name}.mjs");
fs.read(&Utf8PathBuf::from(path)).ok()
}
/// Get the compiled Erlang output for a given module.
///
/// You need to call `compile_package` before calling this function.
///
#[wasm_bindgen]
pub fn read_compiled_erlang(project_id: usize, module_name: &str) -> Option<String> {
let fs = get_filesystem(project_id);
let path = format!(
"/build/_gleam_artefacts/{}.erl",
module_name.replace('/', "@")
);
fs.read(&Utf8PathBuf::from(path)).ok()
}
/// Clear any stored warnings. This is performed automatically when before compilation.
///
#[wasm_bindgen]
pub fn reset_warnings(project_id: usize) {
get_warnings(project_id).reset();
}
/// Pop the latest warning from the compiler.
///
#[wasm_bindgen]
pub fn pop_warning(project_id: usize) -> Option<String> {
get_warnings(project_id).pop().map(|w| w.to_pretty_string())
}
fn do_compile_package(project: Project, target: Target) -> Result<(), Error> {
let ids = UniqueIdGenerator::new();
let mut type_manifests = im::HashMap::new();
let mut defined_modules = im::HashMap::new();
#[allow(clippy::arc_with_non_send_sync)]
let warning_emitter = WarningEmitter::new(Rc::new(project.warnings));
let config = PackageConfig {
name: "library".into(),
version: Version::new(1, 0, 0),
target,
..Default::default()
};
let target = match target {
Target::Erlang => TargetCodegenConfiguration::Erlang { app_file: None },
Target::JavaScript => TargetCodegenConfiguration::JavaScript {
emit_typescript_definitions: false,
prelude_location: Utf8PathBuf::from("./gleam_prelude.mjs"),
},
};
tracing::info!("Compiling package");
let lib = Utf8PathBuf::from("/lib");
let out = Utf8PathBuf::from("/build");
let package = Utf8PathBuf::from("/");
let mut compiler = PackageCompiler::new(
&config,
Mode::Dev,
&package,
&out,
&lib,
&target,
ids,
project.fs,
);
compiler.write_entrypoint = false;
compiler.write_metadata = false;
compiler.compile_beam_bytecode = true;
compiler.target_support = TargetSupport::Enforced;
compiler
.compile(
&warning_emitter,
&mut type_manifests,
&mut defined_modules,
&mut StaleTracker::default(),
&mut HashSet::new(),
&NullTelemetry,
)
.into_result()
.map(|_| ())
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-wasm/src/tests.rs | compiler-wasm/src/tests.rs | use super::*;
use wasm_bindgen_test::wasm_bindgen_test;
#[wasm_bindgen_test]
fn test_reset_filesystem() {
reset_filesystem(0);
assert_eq!(read_file_bytes(0, "hello"), None);
write_file_bytes(0, "hello", vec![1, 2, 3].as_slice());
assert_eq!(read_file_bytes(0, "hello"), Some(vec![1, 2, 3]));
reset_filesystem(0);
assert_eq!(read_file_bytes(0, "hello"), None);
}
#[wasm_bindgen_test]
fn test_write_module() {
reset_filesystem(0);
assert_eq!(read_file_bytes(0, "/src/some/module.gleam"), None);
write_module(0, "some/module", "const x = 1");
assert_eq!(
read_file_bytes(0, "/src/some/module.gleam"),
Some(vec![99, 111, 110, 115, 116, 32, 120, 32, 61, 32, 49]),
);
reset_filesystem(0);
assert_eq!(read_file_bytes(0, "/src/some/module.gleam"), None);
}
#[wasm_bindgen_test]
fn test_compile_package_bad_target() {
reset_filesystem(0);
assert!(compile_package(0, "ruby").is_err());
}
#[wasm_bindgen_test]
fn test_compile_package_empty() {
reset_filesystem(0);
assert!(compile_package(0, "javascript").is_ok());
}
#[wasm_bindgen_test]
fn test_compile_package_js() {
reset_filesystem(0);
write_module(0, "one/two", "pub const x = 1");
write_module(0, "up/down", "import one/two pub fn go() { two.x }");
assert!(compile_package(0, "javascript").is_ok());
assert_eq!(
read_compiled_javascript(0, "one/two"),
Some("export const x = 1;\n".into())
);
assert_eq!(
read_compiled_javascript(0, "up/down"),
Some(
r#"import * as $two from "../one/two.mjs";
export function go() {
return $two.x;
}
"#
.into()
)
);
// And now an error!
write_module(0, "up/down", "import one/two/three");
assert!(compile_package(0, "javascript").is_err());
// Let's fix that.
write_module(0, "up/down", "pub const y = 1");
assert!(compile_package(0, "javascript").is_ok());
assert_eq!(
read_compiled_javascript(0, "up/down"),
Some("export const y = 1;\n".into())
);
}
#[wasm_bindgen_test]
fn test_compile_package_js_unsupported_feature() {
reset_filesystem(0);
write_module(
0,
"one",
r#"
fn wibble() { <<0:16-native>> }
pub fn main() { wibble() }
"#,
);
assert!(
compile_package(0, "javascript")
.unwrap_err()
.contains("The javascript target does not support")
);
}
#[wasm_bindgen_test]
fn test_warnings() {
reset_filesystem(0);
write_module(0, "one", "const x = 1");
assert!(pop_warning(0).is_none());
assert!(compile_package(0, "javascript").is_ok());
assert!(pop_warning(0).is_some());
assert!(pop_warning(0).is_none());
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-wasm/src/wasm_filesystem.rs | compiler-wasm/src/wasm_filesystem.rs | use camino::{Utf8Path, Utf8PathBuf};
use gleam_core::{
Error, Result,
io::{
BeamCompiler, Command, CommandExecutor, FileSystemReader, FileSystemWriter, ReadDir, Stdio,
WrappedReader, memory::InMemoryFileSystem,
},
};
use std::collections::HashSet;
#[derive(Clone, Debug, Default)]
pub struct WasmFileSystem {
imfs: InMemoryFileSystem,
}
impl WasmFileSystem {
pub fn reset(&self) {
self.imfs.reset();
}
}
impl CommandExecutor for WasmFileSystem {
fn exec(&self, _command: Command) -> Result<i32, Error> {
Ok(0) // Always succeed.
}
}
impl BeamCompiler for WasmFileSystem {
fn compile_beam(
&self,
_out: &Utf8Path,
_lib: &Utf8Path,
_modules: &HashSet<Utf8PathBuf>,
_stdio: Stdio,
) -> Result<Vec<String>, Error> {
Ok(Vec::new()) // Always succeed.
}
}
impl FileSystemWriter for WasmFileSystem {
fn delete_directory(&self, path: &Utf8Path) -> Result<(), Error> {
tracing::trace!("delete {:?}", path);
self.imfs.delete_directory(path)
}
fn copy(&self, _from: &Utf8Path, _to: &Utf8Path) -> Result<(), Error> {
Ok(())
}
fn copy_dir(&self, _: &Utf8Path, _: &Utf8Path) -> Result<(), Error> {
Ok(())
}
fn mkdir(&self, path: &Utf8Path) -> Result<(), Error> {
tracing::trace!("mkdir {:?}", path);
self.imfs.mkdir(path)
}
fn hardlink(&self, _: &Utf8Path, _: &Utf8Path) -> Result<(), Error> {
Ok(())
}
fn symlink_dir(&self, _: &Utf8Path, _: &Utf8Path) -> Result<(), Error> {
Ok(())
}
fn delete_file(&self, path: &Utf8Path) -> Result<(), Error> {
tracing::trace!("delete file {:?}", path);
self.imfs.delete_file(path)
}
fn write(&self, path: &Utf8Path, content: &str) -> Result<(), Error> {
tracing::trace!("write {:?}", path);
self.imfs.write(path, content)
}
fn write_bytes(&self, path: &Utf8Path, content: &[u8]) -> Result<(), Error> {
tracing::trace!("write_bytes {:?}", path);
self.imfs.write_bytes(path, content)
}
fn exists(&self, path: &Utf8Path) -> bool {
self.imfs.exists(path)
}
}
impl FileSystemReader for WasmFileSystem {
fn read(&self, path: &Utf8Path) -> Result<String, Error> {
tracing::trace!("read {:?}", path);
self.imfs.read(path)
}
fn is_file(&self, path: &Utf8Path) -> bool {
tracing::info!("is_file {:?}", path);
self.imfs.is_file(path)
}
fn is_directory(&self, path: &Utf8Path) -> bool {
tracing::trace!("is_directory {:?}", path);
self.imfs.is_directory(path)
}
fn reader(&self, path: &Utf8Path) -> Result<WrappedReader, Error> {
tracing::trace!("reader {:?}", path);
self.imfs.reader(path)
}
fn read_dir(&self, path: &Utf8Path) -> Result<ReadDir> {
tracing::trace!("read_dir {:?}", path);
self.imfs.read_dir(path)
}
fn modification_time(&self, path: &Utf8Path) -> Result<std::time::SystemTime, Error> {
self.imfs.modification_time(path)
}
fn read_bytes(&self, path: &Utf8Path) -> Result<Vec<u8>, Error> {
self.imfs.read_bytes(path)
}
fn canonicalise(&self, path: &Utf8Path) -> Result<Utf8PathBuf, Error> {
self.imfs.canonicalise(path)
}
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/test-helpers-rs/src/lib.rs | test-helpers-rs/src/lib.rs | use camino::{Utf8Path, Utf8PathBuf};
use gleam_core::{
io::{Content, FileSystemWriter, memory::InMemoryFileSystem},
version::COMPILER_VERSION,
};
use itertools::Itertools;
use regex::Regex;
use std::{collections::HashMap, fmt::Write, sync::LazyLock};
#[derive(Debug)]
pub struct TestCompileOutput {
pub files: HashMap<Utf8PathBuf, Content>,
pub warnings: Vec<gleam_core::Warning>,
}
impl TestCompileOutput {
pub fn as_overview_text(&self) -> String {
let mut buffer = String::new();
for (path, content) in self.files.iter().sorted_by(|a, b| a.0.cmp(b.0)) {
let normalised_path = if path.as_str().contains("cases") {
path.as_str()
.split("cases")
.skip(1)
.collect::<String>()
.as_str()
.replace('\\', "/")
.split('/')
.skip(1)
.join("/")
} else {
path.as_str().replace('\\', "/")
};
buffer.push_str("//// ");
buffer.push_str(&normalised_path);
buffer.push('\n');
let extension = path.extension();
match content {
_ if extension == Some("cache") => buffer.push_str("<.cache binary>"),
Content::Binary(data) => write!(buffer, "<{} byte binary>", data.len()).unwrap(),
Content::Text(_) if normalised_path.ends_with("@@main.erl") => {
write!(buffer, "<erlang entrypoint>").unwrap()
}
Content::Text(text) => {
let format_path = |caps: ®ex::Captures| {
caps.get(1)
.expect("file path")
.as_str()
.replace("\\\\", "/")
};
let text = FILE_LINE_REGEX.replace_all(text, |caps: ®ex::Captures| {
let path = format_path(caps);
let line_number = caps.get(2).expect("line number").as_str();
format!("-file(\"{path}\", {line_number}).")
});
let text = FILEPATH_MACRO_REGEX
.replace_all(text.to_string().as_str(), |caps: ®ex::Captures| {
let path = format_path(caps);
format!("-define(FILEPATH, \"{path}\").")
})
.replace(COMPILER_VERSION, "<gleam compiler version string>");
buffer.push_str(&text)
}
};
buffer.push('\n');
buffer.push('\n');
}
for warning in self.warnings.iter().map(|w| w.to_pretty_string()).sorted() {
write!(buffer, "//// Warning\n{}", normalise_diagnostic(&warning)).unwrap();
buffer.push('\n');
buffer.push('\n');
}
buffer
}
}
pub fn to_in_memory_filesystem(path: &Utf8Path) -> InMemoryFileSystem {
let fs = InMemoryFileSystem::new();
let files = walkdir::WalkDir::new(path)
.follow_links(true)
.into_iter()
.filter_map(Result::ok)
.filter(|entry| entry.file_type().is_file())
.map(|entry| entry.into_path());
for fullpath in files {
let content = std::fs::read(&fullpath).unwrap();
let path = fullpath.strip_prefix(path).unwrap();
fs.write_bytes(Utf8Path::from_path(path).unwrap(), &content)
.unwrap();
}
fs
}
static FILE_LINE_REGEX: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r#"-file\("([^"]+)", (\d+)\)\."#).expect("Invalid regex"));
static FILEPATH_MACRO_REGEX: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r#"-define\(FILEPATH, "([^"]+)"\)\."#).expect("Invalid regex"));
pub fn normalise_diagnostic(text: &str) -> String {
// There is an extra ^ on Windows in some error messages' code
// snippets.
// I've not managed to determine why this is yet (it is especially
// tricky without a Windows computer) so for now we just squash them
// in these cross-platform tests.
Regex::new(r"\^+")
.expect("^ sequence regex")
.replace_all(text, "^")
.replace('\\', "/")
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/config.rs | compiler-cli/src/config.rs | use camino::Utf8PathBuf;
use gleam_core::{
config::PackageConfig,
error::{Error, FileIoAction, FileKind},
manifest::{Manifest, ManifestPackage, ManifestPackageSource},
paths::ProjectPaths,
};
#[derive(Debug, Clone, Copy)]
pub enum PackageKind {
Dependency,
Root,
}
/// Get the config for a dependency module. Return the config for the current
/// project if a dependency doesn't have a config file.
pub fn find_package_config_for_module(
mod_path: &str,
manifest: &Manifest,
project_paths: &ProjectPaths,
) -> Result<(PackageConfig, PackageKind), Error> {
for package in &manifest.packages {
// Not a Gleam package
if !package.build_tools.contains(&"gleam".into()) {
continue;
}
let root = package_root(package, project_paths);
let mut module_path = root.join("src").join(mod_path);
_ = module_path.set_extension("gleam");
// This package doesn't have the module we're looking for
if !module_path.is_file() {
continue;
}
let configuration = read(root.join("gleam.toml"))?;
return Ok((configuration, PackageKind::Dependency));
}
Ok((root_config(project_paths)?, PackageKind::Root))
}
fn package_root(package: &ManifestPackage, project_paths: &ProjectPaths) -> Utf8PathBuf {
match &package.source {
ManifestPackageSource::Local { path } => project_paths.root().join(path),
ManifestPackageSource::Hex { .. } | ManifestPackageSource::Git { .. } => {
project_paths.build_packages_package(&package.name)
}
}
}
pub fn root_config(paths: &ProjectPaths) -> Result<PackageConfig, Error> {
read(paths.root_config())
}
pub fn read(config_path: Utf8PathBuf) -> Result<PackageConfig, Error> {
let toml = crate::fs::read(&config_path)?;
let config: PackageConfig = toml::from_str(&toml).map_err(|e| Error::FileIo {
action: FileIoAction::Parse,
kind: FileKind::File,
path: config_path,
err: Some(e.to_string()),
})?;
config.check_gleam_compatibility()?;
Ok(config)
}
pub fn ensure_config_exists(paths: &ProjectPaths) -> Result<(), Error> {
let path = paths.root_config();
if !path.is_file() {
return Err(Error::FileIo {
action: FileIoAction::Read,
kind: FileKind::File,
path,
err: Some("File not found".into()),
});
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use gleam_core::manifest::Base16Checksum;
#[test]
fn package_root_hex() {
let paths = ProjectPaths::new(Utf8PathBuf::from("/app"));
let package = ManifestPackage {
name: "the_package".into(),
version: hexpm::version::Version::new(1, 0, 0),
build_tools: vec!["gleam".into()],
otp_app: None,
requirements: vec![],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![]),
},
};
assert_eq!(
package_root(&package, &paths),
Utf8PathBuf::from("/app/build/packages/the_package")
);
}
#[test]
fn package_root_git() {
let paths = ProjectPaths::new(Utf8PathBuf::from("/app"));
let package = ManifestPackage {
name: "the_package".into(),
version: hexpm::version::Version::new(1, 0, 0),
build_tools: vec!["gleam".into()],
otp_app: None,
requirements: vec![],
source: ManifestPackageSource::Git {
repo: "repo".into(),
commit: "commit".into(),
},
};
assert_eq!(
package_root(&package, &paths),
Utf8PathBuf::from("/app/build/packages/the_package")
);
}
#[test]
fn package_root_local() {
let paths = ProjectPaths::new(Utf8PathBuf::from("/app"));
let package = ManifestPackage {
name: "the_package".into(),
version: hexpm::version::Version::new(1, 0, 0),
build_tools: vec!["gleam".into()],
otp_app: None,
requirements: vec![],
source: ManifestPackageSource::Local {
path: Utf8PathBuf::from("../wibble"),
},
};
assert_eq!(
package_root(&package, &paths),
Utf8PathBuf::from("/app/../wibble")
);
}
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/publish.rs | compiler-cli/src/publish.rs | use camino::{Utf8Path, Utf8PathBuf};
use ecow::EcoString;
use flate2::{Compression, write::GzEncoder};
use gleam_core::{
Error, Result,
analyse::TargetSupport,
ast::{CallArg, Statement, TypedExpr, TypedFunction},
build::{Codegen, Compile, Mode, Options, Package, Target},
config::{GleamVersion, PackageConfig, SpdxLicense},
docs::{Dependency, DependencyKind, DocContext},
error::{SmallVersion, wrap},
hex,
manifest::ManifestPackageSource,
paths::{self, ProjectPaths},
requirement::Requirement,
type_,
};
use hexpm::version::{Range, Version};
use itertools::Itertools;
use sha2::Digest;
use std::{collections::HashMap, io::Write, path::PathBuf, time::Instant};
use crate::{build, cli, docs, fs, http::HttpClient};
const CORE_TEAM_PUBLISH_PASSWORD: &str = "Trans rights are human rights";
pub fn command(paths: &ProjectPaths, replace: bool, i_am_sure: bool) -> Result<()> {
let mut config = crate::config::root_config(paths)?;
let should_publish = check_for_gleam_prefix(&config)?
&& check_for_version_zero(&config)?
&& check_repo_url(&config, i_am_sure)?;
if !should_publish {
println!("Not publishing.");
return Ok(());
}
let Tarball {
mut compile_result,
cached_modules,
data: package_tarball,
src_files_added,
generated_files_added,
dependencies,
} = do_build_hex_tarball(paths, &mut config)?;
check_for_name_squatting(&compile_result)?;
check_for_multiple_top_level_modules(&compile_result, i_am_sure)?;
check_for_default_main(&compile_result)?;
// Build HTML documentation
let docs_tarball = fs::create_tar_archive(docs::build_documentation(
paths,
&config,
dependencies,
&mut compile_result,
DocContext::HexPublish,
&cached_modules,
)?)?;
// Ask user if this is correct
if !generated_files_added.is_empty() {
println!("\nGenerated files:");
for file in generated_files_added.iter().sorted() {
println!(" - {}", file.0);
}
}
println!("\nSource files:");
for file in src_files_added.iter().sorted() {
println!(" - {file}");
}
println!("\nName: {}", config.name);
println!("Version: {}", config.version);
let should_publish = i_am_sure || cli::confirm("\nDo you wish to publish this package?")?;
if !should_publish {
println!("Not publishing.");
return Ok(());
}
let runtime = tokio::runtime::Runtime::new().expect("Unable to start Tokio async runtime");
let hex_config = hexpm::Config::new();
let api_key =
crate::hex::HexAuthentication::new(&runtime, hex_config.clone()).get_or_create_api_key()?;
let start = Instant::now();
cli::print_publishing(&config.name, &config.version);
runtime.block_on(hex::publish_package(
package_tarball,
config.version.to_string(),
&api_key,
&hex_config,
replace,
&HttpClient::new(),
))?;
cli::print_publishing_documentation();
runtime.block_on(hex::publish_documentation(
&config.name,
&config.version,
docs_tarball,
&api_key,
&hex_config,
&HttpClient::new(),
))?;
cli::print_published(start.elapsed());
println!(
"\nView your package at https://hex.pm/packages/{}",
&config.name
);
// Prompt the user to make a git tag if they have not.
let has_repo = config.repository.is_some();
let git = PathBuf::from(".git");
let tag_name = config.tag_for_version(&config.version);
let git_tag = git.join("refs").join("tags").join(&tag_name);
if has_repo && git.exists() && !git_tag.exists() {
println!(
"
Please push a git tag for this release so source code links in the
HTML documentation will work:
git tag {tag_name}
git push origin {tag_name}
"
)
}
Ok(())
}
fn check_for_name_squatting(package: &Package) -> Result<(), Error> {
if package.modules.len() > 1 {
return Ok(());
}
let Some(module) = package.modules.first() else {
return Err(Error::HexPackageSquatting);
};
if module.dependencies.len() > 1 {
return Ok(());
}
if module.ast.definitions_len() > 2 {
return Ok(());
}
let Some(main) = module
.ast
.definitions
.functions
.iter()
.find_map(|function| function.main_function())
else {
return Ok(());
};
if let Some(first) = &main.body.first()
&& first.is_println()
{
return Err(Error::HexPackageSquatting);
}
Ok(())
}
/// Checks if publishing packages contain default main functions.
/// Main functions with documentation are considered intentional and allowed.
fn check_for_default_main(package: &Package) -> Result<(), Error> {
let package_name = &package.config.name;
let has_default_main = package
.modules
.iter()
.flat_map(|module| module.ast.definitions.functions.iter())
.filter_map(|function| function.main_function())
.any(|main| main.documentation.is_none() && is_default_main(main, package_name));
if has_default_main {
return Err(Error::CannotPublishWithDefaultMain {
package_name: package_name.clone(),
});
}
Ok(())
}
fn is_default_main(main: &TypedFunction, package_name: &EcoString) -> bool {
if main.body.len() != 1 {
return false;
}
let Some(Statement::Expression(expression)) = main.body.first() else {
return false;
};
if !expression.is_println() {
return false;
}
match expression {
TypedExpr::Call { arguments, .. } => {
if arguments.len() != 1 {
return false;
}
match arguments.first() {
Some(CallArg {
value: TypedExpr::String { value, .. },
..
}) => {
let default_argument = format!("Hello from {}!", &package_name);
value == &default_argument
}
_ => false,
}
}
_ => false,
}
}
fn check_for_multiple_top_level_modules(package: &Package, i_am_sure: bool) -> Result<(), Error> {
// Collect top-level module names
let mut top_level_module_names = package
.modules
.iter()
.filter_map(|module| {
// Top-level modules are those that don't contain any path separators
if module.name.contains('/') {
None
} else {
Some(module.name.clone())
}
})
.collect::<Vec<_>>();
// Remove duplicates
top_level_module_names.sort_unstable();
top_level_module_names.dedup();
// If more than one top-level module name is found, prompt for confirmation
if top_level_module_names.len() > 1 {
let text = wrap(&format!(
"Your package defines multiple top-level modules: {}.
Defining multiple top-level modules can lead to namespace pollution \
and potential conflicts for consumers.
To fix this, move all your modules under a single top-level module of your choice.
For example:
src/{1}.gleam
src/{1}/module1.gleam
src/{1}/module2.gleam",
top_level_module_names.join(", "),
package.config.name
));
println!("{text}\n");
let should_publish =
i_am_sure || cli::confirm("\nDo you wish to continue publishing this package?")?;
println!();
if !should_publish {
println!("Not publishing.");
std::process::exit(0);
}
}
Ok(())
}
fn check_repo_url(config: &PackageConfig, i_am_sure: bool) -> Result<bool, Error> {
let Some(repo) = config.repository.as_ref() else {
return Ok(true);
};
let url = repo.url();
let runtime = tokio::runtime::Runtime::new().expect("Unable to start Tokio async runtime");
let response = runtime.block_on(reqwest::get(&url)).map_err(Error::http)?;
if response.status().is_success() {
return Ok(true);
}
println!(
"The repository configuration in your `gleam.toml` file does not appear to be
valid, {} returned status {}",
&url,
response.status()
);
let should_publish = i_am_sure || cli::confirm("\nDo you wish to continue?")?;
println!();
Ok(should_publish)
}
/// Ask for confirmation if the package name if a v0.x.x version
fn check_for_version_zero(config: &PackageConfig) -> Result<bool, Error> {
if config.version.major != 0 {
return Ok(true);
}
println!(
"You are about to publish a release that is below version 1.0.0.
Semantic versioning doesn't apply to version 0.x.x releases, so your
users will not be protected from breaking changes. This can result
in a poor user experience where packages can break unexpectedly with
updates that would normally be safe.
If your package is not ready to be used in production it should not
be published.
\n"
);
let should_publish = cli::confirm_with_text("I am not using semantic versioning")?;
println!();
Ok(should_publish)
}
/// Ask for confirmation if the package name if `gleam_*`
fn check_for_gleam_prefix(config: &PackageConfig) -> Result<bool, Error> {
if !config.name.starts_with("gleam_") || config.name.starts_with("gleam_community_") {
return Ok(true);
}
println!(
"You are about to publish a package with a name that starts with
the prefix `gleam_`, which is for packages maintained by the Gleam
core team.\n",
);
let password = cli::ask_password("Please enter the core team password to continue")?;
println!();
Ok(password == CORE_TEAM_PUBLISH_PASSWORD)
}
struct Tarball {
compile_result: Package,
cached_modules: im::HashMap<EcoString, type_::ModuleInterface>,
data: Vec<u8>,
src_files_added: Vec<Utf8PathBuf>,
generated_files_added: Vec<(Utf8PathBuf, String)>,
dependencies: HashMap<EcoString, Dependency>,
}
pub fn build_hex_tarball(paths: &ProjectPaths, config: &mut PackageConfig) -> Result<Vec<u8>> {
let Tarball { data, .. } = do_build_hex_tarball(paths, config)?;
Ok(data)
}
fn do_build_hex_tarball(paths: &ProjectPaths, config: &mut PackageConfig) -> Result<Tarball> {
let target = config.target;
check_config_for_publishing(config)?;
// Reset the build directory so we know the state of the project
fs::delete_directory(&paths.build_directory_for_target(Mode::Prod, target))?;
let manifest = build::download_dependencies(paths, cli::Reporter::new())?;
let dependencies = manifest
.packages
.iter()
.map(|package| {
(
package.name.clone(),
Dependency {
version: package.version.clone(),
kind: match &package.source {
ManifestPackageSource::Hex { .. } => DependencyKind::Hex,
ManifestPackageSource::Git { .. } => DependencyKind::Git,
ManifestPackageSource::Local { .. } => DependencyKind::Path,
},
},
)
})
.collect();
// Build the project to check that it is valid
let built = build::main(
paths,
Options {
root_target_support: TargetSupport::Enforced,
warnings_as_errors: false,
mode: Mode::Prod,
target: Some(target),
codegen: Codegen::All,
compile: Compile::All,
no_print_progress: false,
},
manifest,
)?;
let minimum_required_version = built.minimum_required_version();
match &config.gleam_version {
// If the package has no explicit `gleam` version in its `gleam.toml`
// then we want to add the automatically inferred one so we know it's
// correct and folks getting the package from Hex won't have unpleasant
// surprises if the author forgot to manualy write it down.
None => {
// If we're automatically adding the minimum required version
// constraint we want it to at least be `>= 1.0.0`, even if the
// inferred lower bound could be lower.
let minimum_required_version =
std::cmp::max(minimum_required_version, Version::new(1, 0, 0));
let inferred_version_range = pubgrub::Range::higher_than(minimum_required_version);
config.gleam_version = Some(GleamVersion::from_pubgrub(inferred_version_range));
}
// Otherwise we need to check that the annotated version range is
// correct and includes the minimum required version.
Some(gleam_version) => {
if let Some(lowest_allowed_version) = gleam_version.lowest_version()
&& lowest_allowed_version < minimum_required_version
{
return Err(Error::CannotPublishWrongVersion {
minimum_required_version: SmallVersion::from_hexpm(minimum_required_version),
wrongfully_allowed_version: SmallVersion::from_hexpm(lowest_allowed_version),
});
}
}
}
// If any of the modules in the package contain a todo or an echo then
// refuse to publish as the package is not yet finished.
let mut modules_containing_todo = vec![];
let mut modules_containing_echo = vec![];
for module in built.root_package.modules.iter() {
if module.ast.type_info.contains_todo() {
modules_containing_todo.push(module.name.clone());
} else if module.ast.type_info.contains_echo {
modules_containing_echo.push(module.name.clone());
}
}
if !modules_containing_todo.is_empty() {
return Err(Error::CannotPublishTodo {
unfinished: modules_containing_todo,
});
}
if !modules_containing_echo.is_empty() {
return Err(Error::CannotPublishEcho {
unfinished: modules_containing_echo,
});
}
// empty_modules is a list of modules that do not export any values or types.
// We do not allow publishing packages that contain empty modules.
let empty_modules: Vec<_> = built
.root_package
.modules
.iter()
.filter(|module| {
built
.module_interfaces
.get(&module.name)
.map(|interface| {
// Check if the module exports any values or types
interface.values.is_empty() && interface.types.is_empty()
})
.unwrap_or(false)
})
.map(|module| module.name.clone())
.collect();
if !empty_modules.is_empty() {
return Err(Error::CannotPublishEmptyModules {
unfinished: empty_modules,
});
}
// TODO: If any of the modules in the package contain a leaked internal type then
// refuse to publish as the package is not yet finished.
// We need to move aliases in to the type system first.
// context: https://discord.com/channels/768594524158427167/768594524158427170/1227250677734969386
// Collect all the files we want to include in the tarball
let generated_files = match target {
Target::Erlang => generated_erlang_files(paths, &built.root_package)?,
Target::JavaScript => vec![],
};
let src_files = project_files(Utf8Path::new(""))?;
let contents_tar_gz = contents_tarball(&src_files, &generated_files)?;
let version = "3";
let metadata = metadata_config(&built.root_package.config, &src_files, &generated_files)?;
// Calculate checksum
let mut hasher = sha2::Sha256::new();
hasher.update(version.as_bytes());
hasher.update(metadata.as_bytes());
hasher.update(contents_tar_gz.as_slice());
let checksum = base16::encode_upper(&hasher.finalize());
tracing::info!(checksum = %checksum, "Generated Hex package inner checksum");
// Build tarball
let mut tarball = Vec::new();
{
let mut tarball = tar::Builder::new(&mut tarball);
add_to_tar(&mut tarball, "VERSION", version.as_bytes())?;
add_to_tar(&mut tarball, "metadata.config", metadata.as_bytes())?;
add_to_tar(&mut tarball, "contents.tar.gz", contents_tar_gz.as_slice())?;
add_to_tar(&mut tarball, "CHECKSUM", checksum.as_bytes())?;
tarball.finish().map_err(Error::finish_tar)?;
}
tracing::info!("Generated package Hex release tarball");
Ok(Tarball {
compile_result: built.root_package,
cached_modules: built.module_interfaces,
data: tarball,
src_files_added: src_files,
generated_files_added: generated_files,
dependencies,
})
}
fn check_config_for_publishing(config: &PackageConfig) -> Result<()> {
// These fields are required to publish a Hex package. Hex will reject
// packages without them.
if config.description.is_empty() || config.licences.is_empty() {
Err(Error::MissingHexPublishFields {
description_missing: config.description.is_empty(),
licence_missing: config.licences.is_empty(),
})
} else {
Ok(())
}
}
fn metadata_config<'a>(
config: &'a PackageConfig,
source_files: &[Utf8PathBuf],
generated_files: &[(Utf8PathBuf, String)],
) -> Result<String> {
let repo_url = http::Uri::try_from(
config
.repository
.as_ref()
.map(|r| r.url())
.unwrap_or_default(),
)
.ok();
let requirements: Result<Vec<ReleaseRequirement<'a>>> = config
.dependencies
.iter()
.map(|(name, requirement)| match requirement {
Requirement::Hex { version } => Ok(ReleaseRequirement {
name,
requirement: version,
}),
_ => Err(Error::PublishNonHexDependencies {
package: name.to_string(),
}),
})
.collect();
let metadata = ReleaseMetadata {
name: &config.name,
version: &config.version,
description: &config.description,
source_files,
generated_files,
licenses: &config.licences,
links: config
.links
.iter()
.map(|l| (l.title.as_str(), l.href.clone()))
.chain(repo_url.into_iter().map(|u| ("Repository", u)))
.collect(),
requirements: requirements?,
build_tools: vec!["gleam"],
}
.as_erlang();
tracing::info!(contents = ?metadata, "Generated Hex metadata.config");
Ok(metadata)
}
fn contents_tarball(
files: &[Utf8PathBuf],
data_files: &[(Utf8PathBuf, String)],
) -> Result<Vec<u8>, Error> {
let mut contents_tar_gz = Vec::new();
{
let mut tarball =
tar::Builder::new(GzEncoder::new(&mut contents_tar_gz, Compression::default()));
for path in files {
add_path_to_tar(&mut tarball, path)?;
}
for (path, contents) in data_files {
add_to_tar(&mut tarball, path, contents.as_bytes())?;
}
tarball.finish().map_err(Error::finish_tar)?;
}
tracing::info!("Generated contents.tar.gz");
Ok(contents_tar_gz)
}
fn project_files(base_path: &Utf8Path) -> Result<Vec<Utf8PathBuf>> {
let src = base_path.join(Utf8Path::new("src"));
let mut files: Vec<Utf8PathBuf> = fs::gleam_files(&src)
.chain(fs::native_files(&src))
.collect();
let private = base_path.join(Utf8Path::new("priv"));
let mut private_files: Vec<Utf8PathBuf> = fs::private_files(&private).collect();
files.append(&mut private_files);
let mut add = |path| {
let path = base_path.join(path);
if path.exists() {
files.push(path);
}
};
add("README");
add("README.md");
add("README.txt");
add("gleam.toml");
add("LICENSE");
add("LICENCE");
add("LICENSE.md");
add("LICENCE.md");
add("LICENSE.txt");
add("LICENCE.txt");
add("NOTICE");
add("NOTICE.md");
add("NOTICE.txt");
Ok(files)
}
// TODO: test
fn generated_erlang_files(
paths: &ProjectPaths,
package: &Package,
) -> Result<Vec<(Utf8PathBuf, String)>> {
let mut files = vec![];
let dir = paths.build_directory_for_package(Mode::Prod, Target::Erlang, &package.config.name);
let ebin = dir.join("ebin");
let build = dir.join(paths::ARTEFACT_DIRECTORY_NAME);
let include = dir.join("include");
let tar_src = Utf8Path::new("src");
let tar_include = Utf8Path::new("include");
// Erlang modules
for module in &package.modules {
// Do not publish test/ and dev/ code
if !module.origin.is_src() {
continue;
}
let name = module.compiled_erlang_path();
files.push((tar_src.join(&name), fs::read(build.join(name))?));
}
// Erlang headers
if include.is_dir() {
for file in fs::erlang_files(&include) {
let name = file.file_name().expect("generated_files include file name");
files.push((tar_include.join(name), fs::read(file)?));
}
}
// src/package.app.src file
let app = format!("{}.app", &package.config.name);
let appsrc = format!("{}.src", &app);
files.push((tar_src.join(appsrc), fs::read(ebin.join(app))?));
Ok(files)
}
fn add_to_tar<P, W>(tarball: &mut tar::Builder<W>, path: P, data: &[u8]) -> Result<()>
where
P: AsRef<Utf8Path>,
W: Write,
{
let path = path.as_ref();
tracing::info!(file=?path, "Adding file to tarball");
let mut header = tar::Header::new_gnu();
header.set_mode(0o600);
header.set_size(data.len() as u64);
header.set_cksum();
tarball
.append_data(&mut header, path, data)
.map_err(|e| Error::add_tar(path, e))
}
fn add_path_to_tar<P, W>(tarball: &mut tar::Builder<W>, path: P) -> Result<()>
where
P: AsRef<Utf8Path>,
W: Write,
{
let path = path.as_ref();
tracing::info!(file=?path, "Adding file to tarball");
tarball
.append_path(path)
.map_err(|e| Error::add_tar(path, e))
}
#[derive(Debug, Clone)]
pub struct ReleaseMetadata<'a> {
name: &'a str,
version: &'a Version,
description: &'a str,
source_files: &'a [Utf8PathBuf],
generated_files: &'a [(Utf8PathBuf, String)],
licenses: &'a Vec<SpdxLicense>,
links: Vec<(&'a str, http::Uri)>,
requirements: Vec<ReleaseRequirement<'a>>,
build_tools: Vec<&'a str>,
// What should this be? I can't find it in the API anywhere.
// extra: (kvlist(string => kvlist(...))) (optional)
}
impl ReleaseMetadata<'_> {
pub fn as_erlang(&self) -> String {
fn link(link: &(&str, http::Uri)) -> String {
format!(
"\n {{<<\"{name}\">>, <<\"{url}\">>}}",
name = link.0,
url = link.1
)
}
fn file(name: impl AsRef<Utf8Path>) -> String {
format!("\n <<\"{name}\">>", name = name.as_ref())
}
format!(
r#"{{<<"name">>, <<"{name}">>}}.
{{<<"app">>, <<"{name}">>}}.
{{<<"version">>, <<"{version}">>}}.
{{<<"description">>, <<"{description}"/utf8>>}}.
{{<<"licenses">>, [{licenses}]}}.
{{<<"build_tools">>, [{build_tools}]}}.
{{<<"links">>, [{links}
]}}.
{{<<"requirements">>, [{requirements}
]}}.
{{<<"files">>, [{files}
]}}.
"#,
name = self.name,
version = self.version,
description = self.description,
files = self
.source_files
.iter()
.chain(self.generated_files.iter().map(|(p, _)| p))
.map(file)
.sorted()
.join(","),
links = self.links.iter().map(link).join(","),
licenses = self.licenses.iter().map(|l| quotes(l.as_ref())).join(", "),
build_tools = self.build_tools.iter().map(|l| quotes(l)).join(", "),
requirements = self
.requirements
.iter()
.map(ReleaseRequirement::as_erlang)
.join(",")
)
}
}
#[derive(Debug, Clone)]
struct ReleaseRequirement<'a> {
name: &'a str,
// optional: bool,
requirement: &'a Range,
// Support alternate repositories at a later date.
// repository: String,
}
impl ReleaseRequirement<'_> {
pub fn as_erlang(&self) -> String {
format!(
r#"
{{<<"{app}">>, [
{{<<"app">>, <<"{app}">>}},
{{<<"optional">>, false}},
{{<<"requirement">>, <<"{requirement}">>}}
]}}"#,
app = self.name,
requirement = self.requirement,
)
}
}
#[test]
fn release_metadata_as_erlang() {
let licences = vec![
SpdxLicense {
licence: "MIT".into(),
},
SpdxLicense {
licence: "MPL-2.0".into(),
},
];
let version = "1.2.3".try_into().unwrap();
let homepage = "https://gleam.run".parse().unwrap();
let github = "https://github.com/lpil/myapp".parse().unwrap();
let req1 = Range::new("~> 1.2.3 or >= 5.0.0".into()).unwrap();
let req2 = Range::new("~> 1.2".into()).unwrap();
let meta = ReleaseMetadata {
name: "myapp",
version: &version,
description: "description goes here 🌈",
source_files: &[
Utf8PathBuf::from("gleam.toml"),
Utf8PathBuf::from("src/thingy.gleam"),
Utf8PathBuf::from("src/whatever.gleam"),
],
generated_files: &[
(Utf8PathBuf::from("src/myapp.app"), "".into()),
(Utf8PathBuf::from("src/thingy.erl"), "".into()),
(Utf8PathBuf::from("src/whatever.erl"), "".into()),
],
licenses: &licences,
links: vec![("homepage", homepage), ("github", github)],
requirements: vec![
ReleaseRequirement {
name: "wibble",
requirement: &req1,
},
ReleaseRequirement {
name: "wobble",
requirement: &req2,
},
],
build_tools: vec!["gleam", "rebar3"],
};
assert_eq!(
meta.as_erlang(),
r#"{<<"name">>, <<"myapp">>}.
{<<"app">>, <<"myapp">>}.
{<<"version">>, <<"1.2.3">>}.
{<<"description">>, <<"description goes here 🌈"/utf8>>}.
{<<"licenses">>, [<<"MIT">>, <<"MPL-2.0">>]}.
{<<"build_tools">>, [<<"gleam">>, <<"rebar3">>]}.
{<<"links">>, [
{<<"homepage">>, <<"https://gleam.run/">>},
{<<"github">>, <<"https://github.com/lpil/myapp">>}
]}.
{<<"requirements">>, [
{<<"wibble">>, [
{<<"app">>, <<"wibble">>},
{<<"optional">>, false},
{<<"requirement">>, <<"~> 1.2.3 or >= 5.0.0">>}
]},
{<<"wobble">>, [
{<<"app">>, <<"wobble">>},
{<<"optional">>, false},
{<<"requirement">>, <<"~> 1.2">>}
]}
]}.
{<<"files">>, [
<<"gleam.toml">>,
<<"src/myapp.app">>,
<<"src/thingy.erl">>,
<<"src/thingy.gleam">>,
<<"src/whatever.erl">>,
<<"src/whatever.gleam">>
]}.
"#
.to_string()
);
}
#[test]
fn prevent_publish_local_dependency() {
let config = PackageConfig {
dependencies: [("provided".into(), Requirement::path("./path/to/package"))].into(),
..Default::default()
};
assert_eq!(
metadata_config(&config, &[], &[]),
Err(Error::PublishNonHexDependencies {
package: "provided".into()
})
);
}
#[test]
fn prevent_publish_git_dependency() {
let config = PackageConfig {
dependencies: [(
"provided".into(),
Requirement::git("https://github.com/gleam-lang/gleam.git", "da6e917"),
)]
.into(),
..Default::default()
};
assert_eq!(
metadata_config(&config, &[], &[]),
Err(Error::PublishNonHexDependencies {
package: "provided".into()
})
);
}
fn quotes(x: &str) -> String {
format!(r#"<<"{x}">>"#)
}
#[test]
fn exported_project_files_test() {
let tmp = tempfile::tempdir().unwrap();
let path = Utf8PathBuf::from_path_buf(tmp.path().join("my_project")).expect("Non Utf8 Path");
let exported_project_files = &[
"LICENCE",
"LICENCE.md",
"LICENCE.txt",
"LICENSE",
"LICENSE.md",
"LICENSE.txt",
"NOTICE",
"NOTICE.md",
"NOTICE.txt",
"README",
"README.md",
"README.txt",
"gleam.toml",
"priv/ignored",
"priv/wibble",
"priv/wobble.js",
"src/.hidden/hidden_ffi.erl",
"src/.hidden/hidden_ffi.mjs",
"src/.hidden_ffi.erl",
"src/.hidden_ffi.mjs",
"src/exported.gleam",
"src/exported_ffi.erl",
"src/exported_ffi.ex",
"src/exported_ffi.hrl",
"src/exported_ffi.js",
"src/exported_ffi.mjs",
"src/exported_ffi.ts",
"src/ignored.gleam",
"src/ignored_ffi.erl",
"src/ignored_ffi.mjs",
"src/nested/exported.gleam",
"src/nested/exported_ffi.erl",
"src/nested/exported_ffi.ex",
"src/nested/exported_ffi.hrl",
"src/nested/exported_ffi.js",
"src/nested/exported_ffi.mjs",
"src/nested/exported_ffi.ts",
"src/nested/ignored.gleam",
"src/nested/ignored_ffi.erl",
"src/nested/ignored_ffi.mjs",
];
let unexported_project_files = &[
".git/",
".github/workflows/test.yml",
".gitignore",
"build/",
"ignored.txt",
"src/.hidden/hidden.gleam", // Not a valid Gleam module path
"src/.hidden.gleam", // Not a valid Gleam module name
"src/also-ignored.gleam", // Not a valid Gleam module name
"test/exported_test.gleam",
"test/exported_test_ffi.erl",
"test/exported_test_ffi.ex",
"test/exported_test_ffi.hrl",
"test/exported_test_ffi.js",
"test/exported_test_ffi.mjs",
"test/exported_test_ffi.ts",
"test/ignored_test.gleam",
"test/ignored_test_ffi.erl",
"test/ignored_test_ffi.mjs",
"test/nested/exported_test.gleam",
"test/nested/exported_test_ffi.erl",
"test/nested/exported_test_ffi.ex",
"test/nested/exported_test_ffi.hrl",
"test/nested/exported_test_ffi.js",
"test/nested/exported_test_ffi.mjs",
"test/nested/exported_test_ffi.ts",
"test/nested/ignored.gleam",
"test/nested/ignored_test_ffi.erl",
"test/nested/ignored_test_ffi.mjs",
"dev/exported_test_ffi.erl",
"dev/exported_test_ffi.ex",
"dev/exported_test_ffi.hrl",
"dev/exported_test_ffi.js",
"dev/exported_test_ffi.mjs",
"dev/exported_test_ffi.ts",
"dev/ignored_test.gleam",
"dev/ignored_test_ffi.erl",
"dev/ignored_test_ffi.mjs",
"dev/nested/exported_test.gleam",
"dev/nested/exported_test_ffi.erl",
"dev/nested/exported_test_ffi.ex",
"dev/nested/exported_test_ffi.hrl",
"dev/nested/exported_test_ffi.js",
"dev/nested/exported_test_ffi.mjs",
"dev/nested/exported_test_ffi.ts",
"dev/nested/ignored.gleam",
"dev/nested/ignored_test_ffi.erl",
"dev/nested/ignored_test_ffi.mjs",
"unrelated-file.txt",
];
let gitignore = "ignored*
src/also-ignored.gleam";
for &file in exported_project_files
.iter()
.chain(unexported_project_files)
{
if file.ends_with("/") {
fs::mkdir(path.join(file)).unwrap();
continue;
}
let contents = match file {
".gitignore" => gitignore,
_ => "",
};
fs::write(&path.join(file), contents).unwrap();
}
let mut chosen_exported_files = project_files(&path).unwrap();
chosen_exported_files.sort_unstable();
let expected_exported_files = exported_project_files
.iter()
.map(|s| path.join(s))
.collect_vec();
assert_eq!(expected_exported_files, chosen_exported_files);
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/lib.rs | compiler-cli/src/lib.rs | #![warn(
clippy::all,
clippy::dbg_macro,
clippy::todo,
clippy::mem_forget,
clippy::use_self,
clippy::filter_map_next,
clippy::needless_continue,
clippy::needless_borrow,
clippy::match_wildcard_for_single_variants,
clippy::imprecise_flops,
clippy::suboptimal_flops,
clippy::lossy_float_literal,
clippy::rest_pat_in_fully_bound_structs,
clippy::fn_params_excessive_bools,
clippy::inefficient_to_string,
clippy::linkedlist,
clippy::macro_use_imports,
clippy::option_option,
clippy::verbose_file_reads,
clippy::unnested_or_patterns,
rust_2018_idioms,
missing_debug_implementations,
missing_copy_implementations,
trivial_casts,
trivial_numeric_casts,
nonstandard_style,
unexpected_cfgs,
unused_import_braces,
unused_qualifications
)]
#![deny(
clippy::await_holding_lock,
clippy::if_let_mutex,
clippy::indexing_slicing,
clippy::mem_forget,
clippy::ok_expect,
clippy::unimplemented,
clippy::unwrap_used,
unsafe_code,
unstable_features,
unused_results
)]
#![allow(
clippy::match_single_binding,
clippy::inconsistent_struct_constructor,
clippy::assign_op_pattern,
clippy::len_without_is_empty
)]
#[cfg(test)]
#[macro_use]
extern crate pretty_assertions;
mod add;
mod beam_compiler;
mod build;
mod build_lock;
mod cli;
mod compile_package;
mod config;
mod dependencies;
mod docs;
mod export;
mod fix;
mod format;
pub mod fs;
mod hex;
mod http;
mod lsp;
mod new;
mod owner;
mod panic;
mod publish;
mod remove;
pub mod run;
mod shell;
mod text_layout;
use config::root_config;
use fs::{get_current_directory, get_project_root};
pub use gleam_core::error::{Error, Result};
use gleam_core::{
analyse::TargetSupport,
build::{Codegen, Compile, Mode, NullTelemetry, Options, Runtime, Target},
hex::RetirementReason,
paths::ProjectPaths,
version::COMPILER_VERSION,
};
use std::str::FromStr;
use camino::Utf8PathBuf;
use clap::{
Args, Parser, Subcommand,
builder::{PossibleValuesParser, Styles, TypedValueParser, styling},
};
use strum::VariantNames;
#[derive(Args, Debug, Clone)]
struct UpdateOptions {
/// (optional) Names of the packages to update
/// If omitted, all dependencies will be updated
#[arg(verbatim_doc_comment)]
packages: Vec<String>,
}
#[derive(Args, Debug, Clone)]
struct TreeOptions {
/// Name of the package to get the dependency tree for
#[arg(
short,
long,
ignore_case = true,
conflicts_with = "invert",
help = "Package to be used as the root of the tree"
)]
package: Option<String>,
/// Name of the package to get the inverted dependency tree for
#[arg(
short,
long,
ignore_case = true,
conflicts_with = "package",
help = "Invert the tree direction and focus on the given package",
value_name = "PACKAGE"
)]
invert: Option<String>,
}
#[derive(Parser, Debug)]
#[command(
version,
name = "gleam",
next_display_order = None,
help_template = "\
{before-help}{name} {version}
{usage-heading} {usage}
{all-args}{after-help}",
styles = Styles::styled()
.header(styling::AnsiColor::Yellow.on_default())
.usage(styling::AnsiColor::Yellow.on_default())
.literal(styling::AnsiColor::Green.on_default())
)]
enum Command {
/// Build the project
Build {
/// Emit compile time warnings as errors
#[arg(long)]
warnings_as_errors: bool,
#[arg(short, long, ignore_case = true, help = target_doc())]
target: Option<Target>,
/// Don't print progress information
#[clap(long)]
no_print_progress: bool,
},
/// Type check the project
Check {
#[arg(short, long, ignore_case = true, help = target_doc())]
target: Option<Target>,
},
/// Publish the project to the Hex package manager
///
/// This command uses the environment variable:
///
/// - HEXPM_API_KEY: (optional) A Hex API key to authenticate with the Hex package manager.
///
#[command(verbatim_doc_comment)]
Publish {
#[arg(long)]
replace: bool,
#[arg(short, long)]
yes: bool,
},
/// Render HTML documentation
#[command(subcommand)]
Docs(Docs),
/// Work with dependency packages
#[command(subcommand)]
Deps(Dependencies),
/// Update dependency packages to their latest versions
Update(UpdateOptions),
/// Work with the Hex package manager
#[command(subcommand)]
Hex(Hex),
/// Create a new project
New(NewOptions),
/// Format source code
Format {
/// Files to format
#[arg(default_value = ".")]
files: Vec<String>,
/// Read source from STDIN
#[arg(long)]
stdin: bool,
/// Check if inputs are formatted without changing them
#[arg(long)]
check: bool,
},
/// Rewrite deprecated Gleam code
Fix,
/// Start an Erlang shell
Shell,
/// Run the project
///
/// This command runs the `main` function from the `<PROJECT_NAME>` module.
#[command(trailing_var_arg = true)]
Run {
#[arg(short, long, ignore_case = true, help = target_doc())]
target: Option<Target>,
#[arg(long, ignore_case = true, help = runtime_doc())]
runtime: Option<Runtime>,
/// The module to run
#[arg(short, long)]
module: Option<String>,
/// Don't print progress information
#[clap(long)]
no_print_progress: bool,
arguments: Vec<String>,
},
/// Run the project tests
///
/// This command runs the `main` function from the `<PROJECT_NAME>_test` module.
#[command(trailing_var_arg = true)]
Test {
#[arg(short, long, ignore_case = true, help = target_doc())]
target: Option<Target>,
#[arg(long, ignore_case = true, help = runtime_doc())]
runtime: Option<Runtime>,
arguments: Vec<String>,
},
/// Run the project development entrypoint
///
/// This command runs the `main` function from the `<PROJECT_NAME>_dev` module.
#[command(trailing_var_arg = true)]
Dev {
#[arg(short, long, ignore_case = true, help = target_doc())]
target: Option<Target>,
#[arg(long, ignore_case = true, help = runtime_doc())]
runtime: Option<Runtime>,
arguments: Vec<String>,
},
/// Compile a single Gleam package
#[command(hide = true)]
CompilePackage(CompilePackage),
/// Read and print gleam.toml for debugging
#[command(hide = true)]
PrintConfig,
/// Add new project dependencies
Add {
/// The names of Hex packages to add
#[arg(required = true)]
packages: Vec<String>,
/// Add the packages as dev-only dependencies
#[arg(long)]
dev: bool,
},
/// Remove project dependencies
Remove {
/// The names of packages to remove
#[arg(required = true)]
packages: Vec<String>,
},
/// Clean build artifacts
Clean,
/// Run the language server, to be used by editors
#[command(name = "lsp")]
LanguageServer,
/// Export something useful from the Gleam project
#[command(subcommand)]
Export(ExportTarget),
}
fn template_doc() -> &'static str {
"The template to use"
}
fn target_doc() -> String {
format!("The platform to target ({})", Target::VARIANTS.join("|"))
}
fn runtime_doc() -> String {
format!("The runtime to target ({})", Runtime::VARIANTS.join("|"))
}
#[derive(Subcommand, Debug, Clone)]
pub enum ExportTarget {
/// Precompiled Erlang, suitable for deployment
ErlangShipment,
/// The package bundled into a tarball, suitable for publishing to Hex
HexTarball,
/// The JavaScript prelude module
JavascriptPrelude,
/// The TypeScript prelude module
TypescriptPrelude,
/// Information on the modules, functions, and types in the project in JSON format
PackageInterface {
#[arg(long = "out", required = true)]
/// The path to write the JSON file to
output: Utf8PathBuf,
},
/// Package information (gleam.toml) in JSON format
PackageInformation {
#[arg(long = "out", required = true)]
/// The path to write the JSON file to
output: Utf8PathBuf,
},
}
#[derive(Args, Debug, Clone)]
pub struct NewOptions {
/// Location of the project root
pub project_root: String,
/// Name of the project
#[arg(long)]
pub name: Option<String>,
#[arg(long, ignore_case = true, default_value = "erlang", help = template_doc())]
pub template: new::Template,
/// Skip git initialization and creation of .gitignore, .git/* and .github/* files
#[arg(long)]
pub skip_git: bool,
/// Skip creation of .github/* files
#[arg(long)]
pub skip_github: bool,
}
#[derive(Args, Debug)]
pub struct CompilePackage {
/// The compilation target for the generated project
#[arg(long, ignore_case = true)]
target: Target,
/// The directory of the Gleam package
#[arg(long = "package")]
package_directory: Utf8PathBuf,
/// A directory to write compiled package to
#[arg(long = "out")]
output_directory: Utf8PathBuf,
/// A directories of precompiled Gleam projects
#[arg(long = "lib")]
libraries_directory: Utf8PathBuf,
/// The location of the JavaScript prelude module, relative to the `out`
/// directory.
///
/// Required when compiling to JavaScript.
///
/// This likely wants to be a `.mjs` file as NodeJS does not permit
/// importing of other JavaScript file extensions.
///
#[arg(verbatim_doc_comment, long = "javascript-prelude")]
javascript_prelude: Option<Utf8PathBuf>,
/// Skip Erlang to BEAM bytecode compilation if given
#[arg(long = "no-beam")]
skip_beam_compilation: bool,
}
#[derive(Subcommand, Debug)]
enum Dependencies {
/// List all dependency packages
List,
/// Download all dependency packages
Download,
/// List all outdated dependencies
Outdated,
/// Update dependency packages to their latest versions
Update(UpdateOptions),
/// Tree of all the dependency packages
Tree(TreeOptions),
}
#[derive(Subcommand, Debug)]
enum Hex {
/// Retire a release from Hex
///
/// This command uses the environment variable:
///
/// - HEXPM_API_KEY: (optional) A Hex API key to authenticate with the Hex package manager.
///
#[command(verbatim_doc_comment)]
Retire {
package: String,
version: String,
#[arg(value_parser = PossibleValuesParser::new(RetirementReason::VARIANTS).map(|s| RetirementReason::from_str(&s).unwrap()))]
reason: RetirementReason,
message: Option<String>,
},
/// Un-retire a release from Hex
///
/// This command uses this environment variable:
///
/// - HEXPM_API_KEY: (optional) A Hex API key to authenticate with the Hex package manager.
///
#[command(verbatim_doc_comment)]
Unretire { package: String, version: String },
/// Revert a release from Hex
///
/// This command uses this environment variable:
///
/// - HEXPM_API_KEY: (optional) A Hex API key to authenticate with the Hex package manager.
///
#[command(verbatim_doc_comment)]
Revert {
#[arg(long)]
package: Option<String>,
#[arg(long)]
version: Option<String>,
},
/// Deal with package ownership
#[command(subcommand)]
Owner(Owner),
/// Authenticate with Hex
Authenticate,
}
#[derive(Subcommand, Debug)]
enum Owner {
/// Transfers ownership of the given package to a new Hex user
///
/// This command uses this environment variable:
///
/// - HEXPM_API_KEY: (optional) A Hex API key to authenticate against the Hex package manager.
///
#[command(verbatim_doc_comment)]
Transfer {
package: String,
/// The username or email of the new owner
#[arg(long = "to")]
username_or_email: String,
},
}
#[derive(Subcommand, Debug)]
enum Docs {
/// Render HTML docs locally
Build {
/// Opens the docs in a browser after rendering
#[arg(long)]
open: bool,
#[arg(short, long, ignore_case = true, help = target_doc())]
target: Option<Target>,
},
/// Publish HTML docs to HexDocs
///
/// This command uses this environment variable:
///
/// - HEXPM_API_KEY: (optional) A Hex API key to authenticate with the Hex package manager.
///
#[command(verbatim_doc_comment)]
Publish,
/// Remove HTML docs from HexDocs
///
/// This command uses this environment variable:
///
/// - HEXPM_API_KEY: (optional) A Hex API key to authenticate with the Hex package manager.
///
#[command(verbatim_doc_comment)]
Remove {
/// The name of the package
#[arg(long)]
package: String,
/// The version of the docs to remove
#[arg(long)]
version: String,
},
}
pub fn main() {
initialise_logger();
panic::add_handler();
let stderr = cli::stderr_buffer_writer();
let result = parse_and_run_command();
match result {
Ok(_) => {
tracing::info!("Successfully completed");
}
Err(error) => {
tracing::error!(error = ?error, "Failed");
let mut buffer = stderr.buffer();
error.pretty(&mut buffer);
stderr.print(&buffer).expect("Final result error writing");
std::process::exit(1);
}
}
}
fn parse_and_run_command() -> Result<(), Error> {
match Command::parse() {
Command::Build {
target,
warnings_as_errors,
no_print_progress,
} => {
let paths = find_project_paths()?;
command_build(&paths, target, warnings_as_errors, no_print_progress)
}
Command::Check { target } => {
let paths = find_project_paths()?;
command_check(&paths, target)
}
Command::Docs(Docs::Build { open, target }) => {
let paths = find_project_paths()?;
docs::build(&paths, docs::BuildOptions { open, target })
}
Command::Docs(Docs::Publish) => {
let paths = find_project_paths()?;
docs::publish(&paths)
}
Command::Docs(Docs::Remove { package, version }) => docs::remove(package, version),
Command::Format {
stdin,
files,
check,
} => format::run(stdin, check, files),
Command::Fix => {
let paths = find_project_paths()?;
fix::run(&paths)
}
Command::Deps(Dependencies::List) => {
let paths = find_project_paths()?;
dependencies::list(&paths)
}
Command::Deps(Dependencies::Download) => {
let paths = find_project_paths()?;
download_dependencies(&paths)
}
Command::Deps(Dependencies::Outdated) => {
let paths = find_project_paths()?;
dependencies::outdated(&paths)
}
Command::Deps(Dependencies::Update(options)) => {
let paths = find_project_paths()?;
dependencies::update(&paths, options.packages)
}
Command::Deps(Dependencies::Tree(options)) => {
let paths = find_project_paths()?;
dependencies::tree(&paths, options)
}
Command::Hex(Hex::Authenticate) => hex::authenticate(),
Command::New(options) => new::create(options, COMPILER_VERSION),
Command::Shell => {
let paths = find_project_paths()?;
shell::command(&paths)
}
Command::Run {
target,
arguments,
runtime,
module,
no_print_progress,
} => {
let paths = find_project_paths()?;
run::command(
&paths,
arguments,
target,
runtime,
module,
run::Which::Src,
no_print_progress,
)
}
Command::Test {
target,
arguments,
runtime,
} => {
let paths = find_project_paths()?;
run::command(
&paths,
arguments,
target,
runtime,
None,
run::Which::Test,
false,
)
}
Command::Dev {
target,
arguments,
runtime,
} => {
let paths = find_project_paths()?;
run::command(
&paths,
arguments,
target,
runtime,
None,
run::Which::Dev,
false,
)
}
Command::CompilePackage(opts) => compile_package::command(opts),
Command::Publish { replace, yes } => {
let paths = find_project_paths()?;
publish::command(&paths, replace, yes)
}
Command::PrintConfig => {
let paths = find_project_paths()?;
print_config(&paths)
}
Command::Hex(Hex::Retire {
package,
version,
reason,
message,
}) => hex::retire(package, version, reason, message),
Command::Hex(Hex::Unretire { package, version }) => hex::unretire(package, version),
Command::Hex(Hex::Revert { package, version }) => {
let paths = find_project_paths()?;
hex::revert(&paths, package, version)
}
Command::Hex(Hex::Owner(Owner::Transfer {
package,
username_or_email,
})) => owner::transfer(package, username_or_email),
Command::Add { packages, dev } => {
let paths = find_project_paths()?;
add::command(&paths, packages, dev)
}
Command::Remove { packages } => {
let paths = find_project_paths()?;
remove::command(&paths, packages)
}
Command::Update(options) => {
let paths = find_project_paths()?;
dependencies::update(&paths, options.packages)
}
Command::Clean => {
let paths = find_project_paths()?;
clean(&paths)
}
Command::LanguageServer => lsp::main(),
Command::Export(ExportTarget::ErlangShipment) => {
let paths = find_project_paths()?;
export::erlang_shipment(&paths)
}
Command::Export(ExportTarget::HexTarball) => {
let paths = find_project_paths()?;
export::hex_tarball(&paths)
}
Command::Export(ExportTarget::JavascriptPrelude) => export::javascript_prelude(),
Command::Export(ExportTarget::TypescriptPrelude) => export::typescript_prelude(),
Command::Export(ExportTarget::PackageInterface { output }) => {
let paths = find_project_paths()?;
export::package_interface(&paths, output)
}
Command::Export(ExportTarget::PackageInformation { output }) => {
let paths = find_project_paths()?;
export::package_information(&paths, output)
}
}
}
fn command_check(paths: &ProjectPaths, target: Option<Target>) -> Result<()> {
let _ = build::main(
paths,
Options {
root_target_support: TargetSupport::Enforced,
warnings_as_errors: false,
codegen: Codegen::DepsOnly,
compile: Compile::All,
mode: Mode::Dev,
target,
no_print_progress: false,
},
build::download_dependencies(paths, cli::Reporter::new())?,
)?;
Ok(())
}
fn command_build(
paths: &ProjectPaths,
target: Option<Target>,
warnings_as_errors: bool,
no_print_progress: bool,
) -> Result<()> {
let manifest = if no_print_progress {
build::download_dependencies(paths, NullTelemetry)?
} else {
build::download_dependencies(paths, cli::Reporter::new())?
};
let _ = build::main(
paths,
Options {
root_target_support: TargetSupport::Enforced,
warnings_as_errors,
codegen: Codegen::All,
compile: Compile::All,
mode: Mode::Dev,
target,
no_print_progress,
},
manifest,
)?;
Ok(())
}
fn print_config(paths: &ProjectPaths) -> Result<()> {
let config = root_config(paths)?;
println!("{config:#?}");
Ok(())
}
fn clean(paths: &ProjectPaths) -> Result<()> {
fs::delete_directory(&paths.build_directory())
}
fn initialise_logger() {
let enable_colours = std::env::var("GLEAM_LOG_NOCOLOUR").is_err();
tracing_subscriber::fmt()
.with_writer(std::io::stderr)
.with_env_filter(std::env::var("GLEAM_LOG").unwrap_or_else(|_| "off".into()))
.with_target(false)
.with_ansi(enable_colours)
.without_time()
.init();
}
fn find_project_paths() -> Result<ProjectPaths> {
let current_dir = get_current_directory()?;
get_project_root(current_dir).map(ProjectPaths::new)
}
#[cfg(test)]
fn project_paths_at_current_directory_without_toml() -> ProjectPaths {
let current_dir = get_current_directory().expect("Failed to get current directory");
ProjectPaths::new(current_dir)
}
fn download_dependencies(paths: &ProjectPaths) -> Result<()> {
_ = dependencies::resolve_and_download(
paths,
cli::Reporter::new(),
None,
Vec::new(),
dependencies::DependencyManagerConfig {
use_manifest: dependencies::UseManifest::Yes,
check_major_versions: dependencies::CheckMajorVersions::No,
},
)?;
Ok(())
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/http.rs | compiler-cli/src/http.rs | use std::convert::TryInto;
use std::sync::OnceLock;
use async_trait::async_trait;
use camino::Utf8PathBuf;
use gleam_core::{
Error, Result,
error::{FileIoAction, FileKind},
};
use http::{Request, Response};
use reqwest::{Certificate, Client};
use crate::fs;
static REQWEST_CLIENT: OnceLock<Client> = OnceLock::new();
#[derive(Debug)]
pub struct HttpClient;
impl HttpClient {
pub fn new() -> Self {
Self
}
pub fn boxed() -> Box<Self> {
Box::new(Self::new())
}
}
#[async_trait]
impl gleam_core::io::HttpClient for HttpClient {
async fn send(&self, request: Request<Vec<u8>>) -> Result<Response<Vec<u8>>> {
let request = request
.try_into()
.expect("Unable to convert HTTP request for use by reqwest library");
let client = init_client().map_err(Error::http)?;
let mut response = client.execute(request).await.map_err(Error::http)?;
let mut builder = Response::builder()
.status(response.status())
.version(response.version());
if let Some(headers) = builder.headers_mut() {
std::mem::swap(headers, response.headers_mut());
}
builder
.body(response.bytes().await.map_err(Error::http)?.to_vec())
.map_err(Error::http)
}
}
fn init_client() -> Result<&'static Client, Error> {
if let Some(client) = REQWEST_CLIENT.get() {
return Ok(client);
}
let certificate_path = match std::env::var("GLEAM_CACERTS_PATH") {
Ok(path) => path,
Err(_) => {
return Ok(REQWEST_CLIENT.get_or_init(|| {
Client::builder()
.build()
.expect("Failed to create reqwest client")
}));
}
};
let certificate_bytes = fs::read_bytes(&certificate_path)?;
let certificate = Certificate::from_pem(&certificate_bytes).map_err(|error| Error::FileIo {
kind: FileKind::File,
action: FileIoAction::Parse,
path: Utf8PathBuf::from(&certificate_path),
err: Some(error.to_string()),
})?;
Ok(REQWEST_CLIENT.get_or_init(|| {
Client::builder()
.add_root_certificate(certificate)
.build()
.expect("Failed to create reqwest client")
}))
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/fix.rs | compiler-cli/src/fix.rs | use std::rc::Rc;
use gleam_core::{
Error, Result, Warning,
analyse::TargetSupport,
build::{Codegen, Compile, Mode, Options},
error::{FileIoAction, FileKind},
paths::ProjectPaths,
type_,
warning::VectorWarningEmitterIO,
};
use hexpm::version::Version;
use crate::{build, cli};
pub fn run(paths: &ProjectPaths) -> Result<()> {
// When running gleam fix we want all the compilation warnings to be hidden,
// at the same time we need to access those to apply the fixes: so we
// accumulate those into a vector.
let warnings = Rc::new(VectorWarningEmitterIO::new());
let _built = build::main_with_warnings(
paths,
Options {
root_target_support: TargetSupport::Enforced,
warnings_as_errors: false,
codegen: Codegen::DepsOnly,
compile: Compile::All,
mode: Mode::Dev,
target: None,
no_print_progress: false,
},
build::download_dependencies(paths, cli::Reporter::new())?,
warnings.clone(),
)?;
let warnings = warnings.take();
fix_minimum_required_version(paths, warnings)?;
println!("Done!");
Ok(())
}
fn fix_minimum_required_version(paths: &ProjectPaths, warnings: Vec<Warning>) -> Result<()> {
let Some(minimum_required_version) = minimum_required_version_from_warnings(warnings) else {
return Ok(());
};
// Set the version requirement in gleam.toml
let root_config = paths.root_config();
let mut toml = crate::fs::read(&root_config)?
.parse::<toml_edit::DocumentMut>()
.map_err(|e| Error::FileIo {
kind: FileKind::File,
action: FileIoAction::Parse,
path: root_config.to_path_buf(),
err: Some(e.to_string()),
})?;
#[allow(clippy::indexing_slicing)]
{
toml["gleam"] = toml_edit::value(format!(">= {minimum_required_version}"));
}
// Write the updated config
crate::fs::write(root_config.as_path(), &toml.to_string())?;
println!("- Set required Gleam version to \">= {minimum_required_version}\"");
Ok(())
}
/// Returns the highest minimum required version among all warnings requiring a
/// specific Gleam version that is not allowed by the `gleam` version contraint
/// in the `gleam.toml`.
fn minimum_required_version_from_warnings(warnings: Vec<Warning>) -> Option<Version> {
warnings
.iter()
.filter_map(|warning| match warning {
Warning::Type {
warning:
type_::Warning::FeatureRequiresHigherGleamVersion {
minimum_required_version,
..
},
..
} => Some(minimum_required_version),
_ => None,
})
.reduce(std::cmp::max)
.cloned()
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/fs.rs | compiler-cli/src/fs.rs | use gleam_core::{
Result, Warning,
build::{NullTelemetry, Target},
error::{Error, FileIoAction, FileKind, OS, ShellCommandFailureReason, parse_os},
io::{
BeamCompiler, Command, CommandExecutor, Content, DirEntry, FileSystemReader,
FileSystemWriter, OutputFile, ReadDir, Stdio, WrappedReader, is_native_file_extension,
},
manifest::Manifest,
paths::ProjectPaths,
warning::WarningEmitterIO,
};
use gleam_language_server::{DownloadDependencies, Locker, MakeLocker};
use std::{
collections::HashSet,
fmt::Debug,
fs::{File, exists},
io::{self, BufRead, BufReader, Write},
sync::{Arc, Mutex, OnceLock},
time::SystemTime,
};
use camino::{ReadDirUtf8, Utf8Path, Utf8PathBuf};
use crate::{dependencies, lsp::LspLocker};
#[cfg(test)]
mod tests;
/// Return the current directory as a UTF-8 Path
pub fn get_current_directory() -> Result<Utf8PathBuf, Error> {
let curr_dir = std::env::current_dir().map_err(|e| Error::FileIo {
kind: FileKind::Directory,
action: FileIoAction::Open,
path: ".".into(),
err: Some(e.to_string()),
})?;
Utf8PathBuf::from_path_buf(curr_dir.clone()).map_err(|_| Error::NonUtf8Path { path: curr_dir })
}
// Return the first directory with a gleam.toml as a UTF-8 Path
pub fn get_project_root(path: Utf8PathBuf) -> Result<Utf8PathBuf, Error> {
fn walk(dir: Utf8PathBuf) -> Option<Utf8PathBuf> {
match dir.join("gleam.toml").is_file() {
true => Some(dir),
false => match dir.parent() {
Some(p) => walk(p.into()),
None => None,
},
}
}
walk(path.clone()).ok_or(Error::UnableToFindProjectRoot {
path: path.to_string(),
})
}
pub fn get_os() -> OS {
parse_os(std::env::consts::OS, get_distro_str().as_str())
}
// try to extract the distro id from /etc/os-release
pub fn extract_distro_id(os_release: String) -> String {
let distro = os_release.lines().find(|line| line.starts_with("ID="));
if let Some(distro) = distro {
let id = distro.split('=').nth(1).unwrap_or("").replace("\"", "");
return id;
}
"".to_string()
}
pub fn get_distro_str() -> String {
let path = Utf8Path::new("/etc/os-release");
if std::env::consts::OS != "linux" || !path.exists() {
return "other".to_string();
}
let os_release = read(path);
match os_release {
Ok(os_release) => extract_distro_id(os_release),
Err(_) => "other".to_string(),
}
}
/// A `FileWriter` implementation that writes to the file system.
#[derive(Debug, Clone, Default)]
pub struct ProjectIO {
beam_compiler: Arc<Mutex<crate::beam_compiler::BeamCompiler>>,
}
impl ProjectIO {
pub fn new() -> Self {
Self {
beam_compiler: Default::default(),
}
}
pub fn boxed() -> Box<Self> {
Box::new(Self::new())
}
}
impl FileSystemReader for ProjectIO {
fn read(&self, path: &Utf8Path) -> Result<String, Error> {
read(path)
}
fn read_bytes(&self, path: &Utf8Path) -> Result<Vec<u8>, Error> {
read_bytes(path)
}
fn is_file(&self, path: &Utf8Path) -> bool {
path.is_file()
}
fn is_directory(&self, path: &Utf8Path) -> bool {
path.is_dir()
}
fn reader(&self, path: &Utf8Path) -> Result<WrappedReader, Error> {
reader(path)
}
fn read_dir(&self, path: &Utf8Path) -> Result<ReadDir> {
read_dir(path).map(|entries| {
entries
.map(|result| result.map(|entry| DirEntry::from_path(entry.path())))
.collect()
})
}
fn modification_time(&self, path: &Utf8Path) -> Result<SystemTime, Error> {
path.metadata()
.map(|m| m.modified().unwrap_or_else(|_| SystemTime::now()))
.map_err(|e| Error::FileIo {
action: FileIoAction::ReadMetadata,
kind: FileKind::File,
path: path.to_path_buf(),
err: Some(e.to_string()),
})
}
fn canonicalise(&self, path: &Utf8Path) -> Result<Utf8PathBuf, Error> {
canonicalise(path)
}
}
impl FileSystemWriter for ProjectIO {
fn delete_directory(&self, path: &Utf8Path) -> Result<()> {
delete_directory(path)
}
fn copy(&self, from: &Utf8Path, to: &Utf8Path) -> Result<()> {
copy(from, to)
}
fn copy_dir(&self, from: &Utf8Path, to: &Utf8Path) -> Result<()> {
copy_dir(from, to)
}
fn mkdir(&self, path: &Utf8Path) -> Result<(), Error> {
mkdir(path)
}
fn hardlink(&self, from: &Utf8Path, to: &Utf8Path) -> Result<(), Error> {
hardlink(from, to)
}
fn symlink_dir(&self, from: &Utf8Path, to: &Utf8Path) -> Result<(), Error> {
symlink_dir(from, to)
}
fn delete_file(&self, path: &Utf8Path) -> Result<()> {
delete_file(path)
}
fn write(&self, path: &Utf8Path, content: &str) -> Result<(), Error> {
write(path, content)
}
fn write_bytes(&self, path: &Utf8Path, content: &[u8]) -> Result<(), Error> {
write_bytes(path, content)
}
fn exists(&self, path: &Utf8Path) -> bool {
path.exists()
}
}
impl CommandExecutor for ProjectIO {
fn exec(&self, command: Command) -> Result<i32, Error> {
let Command {
program,
args,
env,
cwd,
stdio,
} = command;
tracing::trace!(program=program, args=?args.join(" "), env=?env, cwd=?cwd, "command_exec");
let result = std::process::Command::new(&program)
.args(args)
.stdin(stdio.get_process_stdio())
.stdout(stdio.get_process_stdio())
.envs(env.iter().map(|pair| (&pair.0, &pair.1)))
.current_dir(cwd.unwrap_or_else(|| Utf8Path::new("./").to_path_buf()))
.status();
match result {
Ok(status) => Ok(status.code().unwrap_or_default()),
Err(error) => Err(match error.kind() {
io::ErrorKind::NotFound => Error::ShellProgramNotFound {
program,
os: get_os(),
},
other => Error::ShellCommand {
program,
reason: ShellCommandFailureReason::IoError(other),
},
}),
}
}
}
impl BeamCompiler for ProjectIO {
fn compile_beam(
&self,
out: &Utf8Path,
lib: &Utf8Path,
modules: &HashSet<Utf8PathBuf>,
stdio: Stdio,
) -> Result<Vec<String>, Error> {
self.beam_compiler
.lock()
.as_mut()
.expect("could not get beam_compiler")
.compile(self, out, lib, modules, stdio)
}
}
impl MakeLocker for ProjectIO {
fn make_locker(&self, paths: &ProjectPaths, target: Target) -> Result<Box<dyn Locker>> {
let locker = LspLocker::new(paths, target)?;
Ok(Box::new(locker))
}
}
impl DownloadDependencies for ProjectIO {
fn download_dependencies(&self, paths: &ProjectPaths) -> Result<Manifest> {
dependencies::resolve_and_download(
paths,
NullTelemetry,
None,
Vec::new(),
dependencies::DependencyManagerConfig {
use_manifest: dependencies::UseManifest::Yes,
check_major_versions: dependencies::CheckMajorVersions::No,
},
)
}
}
pub fn delete_directory(dir: &Utf8Path) -> Result<(), Error> {
tracing::trace!(path=?dir, "deleting_directory");
if dir.exists() {
std::fs::remove_dir_all(dir).map_err(|e| Error::FileIo {
action: FileIoAction::Delete,
kind: FileKind::Directory,
path: dir.to_path_buf(),
err: Some(e.to_string()),
})?;
} else {
tracing::trace!(path=?dir, "directory_did_not_exist_for_deletion");
}
Ok(())
}
pub fn delete_file(file: &Utf8Path) -> Result<(), Error> {
tracing::trace!("Deleting file {:?}", file);
if file.exists() {
std::fs::remove_file(file).map_err(|e| Error::FileIo {
action: FileIoAction::Delete,
kind: FileKind::File,
path: file.to_path_buf(),
err: Some(e.to_string()),
})?;
} else {
tracing::trace!("Did not exist for deletion: {:?}", file);
}
Ok(())
}
pub fn write_outputs_under(outputs: &[OutputFile], base: &Utf8Path) -> Result<(), Error> {
for file in outputs {
let path = base.join(&file.path);
match &file.content {
Content::Binary(buffer) => write_bytes(&path, buffer),
Content::Text(buffer) => write(&path, buffer),
}?;
}
Ok(())
}
pub fn write_output(file: &OutputFile) -> Result<(), Error> {
let OutputFile { path, content } = file;
match content {
Content::Binary(buffer) => write_bytes(path, buffer),
Content::Text(buffer) => write(path, buffer),
}
}
pub fn write(path: &Utf8Path, text: &str) -> Result<(), Error> {
write_bytes(path, text.as_bytes())
}
#[cfg(target_family = "unix")]
pub fn make_executable(path: impl AsRef<Utf8Path>) -> Result<(), Error> {
use std::os::unix::fs::PermissionsExt;
tracing::trace!(path = ?path.as_ref(), "setting_permissions");
std::fs::set_permissions(path.as_ref(), std::fs::Permissions::from_mode(0o755)).map_err(
|e| Error::FileIo {
action: FileIoAction::UpdatePermissions,
kind: FileKind::File,
path: path.as_ref().to_path_buf(),
err: Some(e.to_string()),
},
)?;
Ok(())
}
#[cfg(not(target_family = "unix"))]
pub fn make_executable(_path: impl AsRef<Utf8Path>) -> Result<(), Error> {
Ok(())
}
pub fn write_bytes(path: &Utf8Path, bytes: &[u8]) -> Result<(), Error> {
tracing::trace!(path=?path, "writing_file");
let dir_path = path.parent().ok_or_else(|| Error::FileIo {
action: FileIoAction::FindParent,
kind: FileKind::Directory,
path: path.to_path_buf(),
err: None,
})?;
std::fs::create_dir_all(dir_path).map_err(|e| Error::FileIo {
action: FileIoAction::Create,
kind: FileKind::Directory,
path: dir_path.to_path_buf(),
err: Some(e.to_string()),
})?;
let mut f = File::create(path).map_err(|e| Error::FileIo {
action: FileIoAction::Create,
kind: FileKind::File,
path: path.to_path_buf(),
err: Some(e.to_string()),
})?;
f.write_all(bytes).map_err(|e| Error::FileIo {
action: FileIoAction::WriteTo,
kind: FileKind::File,
path: path.to_path_buf(),
err: Some(e.to_string()),
})?;
Ok(())
}
fn is_gleam_path(path: &Utf8Path, dir: impl AsRef<Utf8Path>) -> bool {
use regex::Regex;
static RE: OnceLock<Regex> = OnceLock::new();
RE.get_or_init(|| {
Regex::new(&format!(
"^({module}{slash})*{module}\\.gleam$",
module = "[a-z][_a-z0-9]*",
slash = "(/|\\\\)",
))
.expect("is_gleam_path() RE regex")
})
.is_match(
path.strip_prefix(dir.as_ref())
.expect("is_gleam_path(): strip_prefix")
.as_str(),
)
}
fn is_gleam_build_dir(e: &ignore::DirEntry) -> bool {
if !e.path().is_dir() || !e.path().ends_with("build") {
return false;
}
let Some(parent_path) = e.path().parent() else {
return false;
};
parent_path.join("gleam.toml").exists()
}
/// Walks through all Gleam module files in the directory, even if ignored,
/// except for those in the `build/` directory. Excludes any Gleam files within
/// invalid module paths, for example if they or a folder they're in contain a
/// dot or a hyphen within their names.
pub fn gleam_files(dir: &Utf8Path) -> impl Iterator<Item = Utf8PathBuf> + '_ {
ignore::WalkBuilder::new(dir)
.follow_links(true)
.standard_filters(false)
.filter_entry(|entry| !is_gleam_build_dir(entry))
.build()
.filter_map(Result::ok)
.filter(|entry| {
entry
.file_type()
.map(|type_| type_.is_file())
.unwrap_or(false)
})
.map(ignore::DirEntry::into_path)
.map(|path| Utf8PathBuf::from_path_buf(path).expect("Non Utf-8 Path"))
.filter(move |d| is_gleam_path(d, dir))
}
/// Walks through all native files in the directory, such as `.mjs` and `.erl`,
/// even if ignored.
pub fn native_files(dir: &Utf8Path) -> impl Iterator<Item = Utf8PathBuf> + '_ {
ignore::WalkBuilder::new(dir)
.follow_links(true)
.standard_filters(false)
.filter_entry(|entry| !is_gleam_build_dir(entry))
.build()
.filter_map(Result::ok)
.filter(|entry| {
entry
.file_type()
.map(|type_| type_.is_file())
.unwrap_or(false)
})
.map(ignore::DirEntry::into_path)
.map(|path| Utf8PathBuf::from_path_buf(path).expect("Non Utf-8 Path"))
.filter(|path| {
let extension = path.extension().unwrap_or_default();
is_native_file_extension(extension)
})
}
/// Walks through all files in the directory, even if ignored.
pub fn private_files(dir: &Utf8Path) -> impl Iterator<Item = Utf8PathBuf> + '_ {
ignore::WalkBuilder::new(dir)
.follow_links(true)
.standard_filters(false)
.build()
.filter_map(Result::ok)
.filter(|entry| {
entry
.file_type()
.map(|type_| type_.is_file())
.unwrap_or(false)
})
.map(ignore::DirEntry::into_path)
.map(|path| Utf8PathBuf::from_path_buf(path).expect("Non Utf-8 Path"))
}
/// Walks through all `.erl` and `.hrl` files in the directory, even if ignored.
pub fn erlang_files(dir: &Utf8Path) -> impl Iterator<Item = Utf8PathBuf> + '_ {
ignore::WalkBuilder::new(dir)
.follow_links(true)
.standard_filters(false)
.build()
.filter_map(Result::ok)
.filter(|entry| {
entry
.file_type()
.map(|type_| type_.is_file())
.unwrap_or(false)
})
.map(ignore::DirEntry::into_path)
.map(|path| Utf8PathBuf::from_path_buf(path).expect("Non Utf-8 Path"))
.filter(|path| {
let extension = path.extension().unwrap_or_default();
extension == "erl" || extension == "hrl"
})
}
pub fn create_tar_archive(outputs: Vec<OutputFile>) -> Result<Vec<u8>, Error> {
tracing::trace!("creating_tar_archive");
let encoder = flate2::write::GzEncoder::new(vec![], flate2::Compression::default());
let mut builder = tar::Builder::new(encoder);
for file in outputs {
let mut header = tar::Header::new_gnu();
header.set_path(&file.path).map_err(|e| Error::AddTar {
path: file.path.clone(),
err: e.to_string(),
})?;
header.set_size(file.content.as_bytes().len() as u64);
header.set_cksum();
builder
.append(&header, file.content.as_bytes())
.map_err(|e| Error::AddTar {
path: file.path.clone(),
err: e.to_string(),
})?;
}
builder
.into_inner()
.map_err(|e| Error::TarFinish(e.to_string()))?
.finish()
.map_err(|e| Error::Gzip(e.to_string()))
}
pub fn mkdir(path: impl AsRef<Utf8Path> + Debug) -> Result<(), Error> {
if path.as_ref().exists() {
return Ok(());
}
tracing::trace!(path=?path, "creating_directory");
std::fs::create_dir_all(path.as_ref()).map_err(|err| Error::FileIo {
kind: FileKind::Directory,
path: Utf8PathBuf::from(path.as_ref()),
action: FileIoAction::Create,
err: Some(err.to_string()),
})
}
pub fn read_dir(path: impl AsRef<Utf8Path> + Debug) -> Result<ReadDirUtf8, Error> {
tracing::trace!(path=?path,"reading_directory");
Utf8Path::read_dir_utf8(path.as_ref()).map_err(|e| Error::FileIo {
action: FileIoAction::Read,
kind: FileKind::Directory,
path: Utf8PathBuf::from(path.as_ref()),
err: Some(e.to_string()),
})
}
pub fn module_caches_paths(
path: impl AsRef<Utf8Path> + Debug,
) -> Result<impl Iterator<Item = Utf8PathBuf>, Error> {
Ok(read_dir(path)?
.filter_map(Result::ok)
.map(|f| f.into_path())
.filter(|p| p.extension() == Some("cache")))
}
pub fn read(path: impl AsRef<Utf8Path> + Debug) -> Result<String, Error> {
tracing::trace!(path=?path,"reading_file");
std::fs::read_to_string(path.as_ref()).map_err(|err| Error::FileIo {
action: FileIoAction::Read,
kind: FileKind::File,
path: Utf8PathBuf::from(path.as_ref()),
err: Some(err.to_string()),
})
}
pub fn read_bytes(path: impl AsRef<Utf8Path> + Debug) -> Result<Vec<u8>, Error> {
tracing::trace!(path=?path,"reading_file");
std::fs::read(path.as_ref()).map_err(|err| Error::FileIo {
action: FileIoAction::Read,
kind: FileKind::File,
path: Utf8PathBuf::from(path.as_ref()),
err: Some(err.to_string()),
})
}
pub fn reader(path: impl AsRef<Utf8Path> + Debug) -> Result<WrappedReader, Error> {
tracing::trace!(path=?path,"opening_file_reader");
let reader = File::open(path.as_ref()).map_err(|err| Error::FileIo {
action: FileIoAction::Open,
kind: FileKind::File,
path: Utf8PathBuf::from(path.as_ref()),
err: Some(err.to_string()),
})?;
Ok(WrappedReader::new(path.as_ref(), Box::new(reader)))
}
pub fn buffered_reader<P: AsRef<Utf8Path> + Debug>(path: P) -> Result<impl BufRead, Error> {
tracing::trace!(path=?path,"opening_file_buffered_reader");
let reader = File::open(path.as_ref()).map_err(|err| Error::FileIo {
action: FileIoAction::Open,
kind: FileKind::File,
path: Utf8PathBuf::from(path.as_ref()),
err: Some(err.to_string()),
})?;
Ok(BufReader::new(reader))
}
pub fn copy(
path: impl AsRef<Utf8Path> + Debug,
to: impl AsRef<Utf8Path> + Debug,
) -> Result<(), Error> {
tracing::trace!(from=?path, to=?to, "copying_file");
// TODO: include the destination in the error message
std::fs::copy(path.as_ref(), to.as_ref())
.map_err(|err| Error::FileIo {
action: FileIoAction::Copy,
kind: FileKind::File,
path: Utf8PathBuf::from(path.as_ref()),
err: Some(err.to_string()),
})
.map(|_| ())
}
// pub fn rename(path: impl AsRef<Utf8Path> + Debug, to: impl AsRef<Utf8Path> + Debug) -> Result<(), Error> {
// tracing::trace!(from=?path, to=?to, "renaming_file");
// // TODO: include the destination in the error message
// std::fs::rename(&path, &to)
// .map_err(|err| Error::FileIo {
// action: FileIoAction::Rename,
// kind: FileKind::File,
// path: Utf8PathBuf::from(path.as_ref()),
// err: Some(err.to_string()),
// })
// .map(|_| ())
// }
pub fn copy_dir(
path: impl AsRef<Utf8Path> + Debug,
to: impl AsRef<Utf8Path> + Debug,
) -> Result<(), Error> {
tracing::trace!(from=?path, to=?to, "copying_directory");
// TODO: include the destination in the error message
fs_extra::dir::copy(
path.as_ref(),
to.as_ref(),
&fs_extra::dir::CopyOptions::new()
.copy_inside(false)
.content_only(true),
)
.map_err(|err| Error::FileIo {
action: FileIoAction::Copy,
kind: FileKind::Directory,
path: Utf8PathBuf::from(path.as_ref()),
err: Some(err.to_string()),
})
.map(|_| ())
}
pub fn symlink_dir(
src: impl AsRef<Utf8Path> + Debug,
dest: impl AsRef<Utf8Path> + Debug,
) -> Result<(), Error> {
tracing::trace!(src=?src, dest=?dest, "symlinking");
let src = canonicalise(src.as_ref())?;
#[cfg(target_family = "windows")]
let result = std::os::windows::fs::symlink_dir(src, dest.as_ref());
#[cfg(not(target_family = "windows"))]
let result = std::os::unix::fs::symlink(src, dest.as_ref());
result.map_err(|err| Error::FileIo {
action: FileIoAction::Link,
kind: FileKind::File,
path: Utf8PathBuf::from(dest.as_ref()),
err: Some(err.to_string()),
})?;
Ok(())
}
pub fn hardlink(
from: impl AsRef<Utf8Path> + Debug,
to: impl AsRef<Utf8Path> + Debug,
) -> Result<(), Error> {
tracing::trace!(from=?from, to=?to, "hardlinking");
std::fs::hard_link(from.as_ref(), to.as_ref())
.map_err(|err| Error::FileIo {
action: FileIoAction::Link,
kind: FileKind::File,
path: Utf8PathBuf::from(from.as_ref()),
err: Some(err.to_string()),
})
.map(|_| ())
}
/// Check if the given path is inside a git work tree.
/// This is done by running `git rev-parse --is-inside-work-tree --quiet` in the
/// given path. If git is not installed then we assume we're not in a git work
/// tree.
///
fn is_inside_git_work_tree(path: &Utf8Path) -> Result<bool, Error> {
tracing::trace!(path=?path, "checking_for_git_repo");
let args: Vec<&str> = vec!["rev-parse", "--is-inside-work-tree", "--quiet"];
// Ignore all output, rely on the exit code instead.
// git will display a fatal error on stderr if rev-parse isn't run inside of a git work tree,
// so send stderr to /dev/null
let result = std::process::Command::new("git")
.args(args)
.stdin(std::process::Stdio::null())
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.current_dir(path)
.status();
match result {
Ok(status) => Ok(status.success()),
Err(error) => match error.kind() {
io::ErrorKind::NotFound => Ok(false),
other => Err(Error::ShellCommand {
program: "git".into(),
reason: ShellCommandFailureReason::IoError(other),
}),
},
}
}
pub(crate) fn is_git_work_tree_root(path: &Utf8Path) -> bool {
tracing::trace!(path=?path, "checking_for_git_repo_root");
exists(path.join(".git")).unwrap_or(false)
}
/// Run `git init` in the given path.
/// If git is not installed then we do nothing.
pub fn git_init(path: &Utf8Path) -> Result<(), Error> {
tracing::trace!(path=?path, "initializing git");
if is_inside_git_work_tree(path)? {
tracing::trace!(path=?path, "git_repo_already_exists");
return Ok(());
}
let args = vec!["init".into(), "--quiet".into(), path.to_string()];
let command = Command {
program: "git".to_string(),
args,
env: vec![],
cwd: None,
stdio: Stdio::Inherit,
};
match ProjectIO::new().exec(command) {
Ok(_) => Ok(()),
Err(err) => match err {
Error::ShellProgramNotFound { .. } => Ok(()),
_ => Err(Error::GitInitialization {
error: err.to_string(),
}),
},
}
}
pub fn canonicalise(path: &Utf8Path) -> Result<Utf8PathBuf, Error> {
std::fs::canonicalize(path)
.map_err(|err| Error::FileIo {
action: FileIoAction::Canonicalise,
kind: FileKind::File,
path: Utf8PathBuf::from(path),
err: Some(err.to_string()),
})
.map(|pb| Utf8PathBuf::from_path_buf(pb).expect("Non Utf8 Path"))
}
#[derive(Debug, Clone, Copy)]
pub struct ConsoleWarningEmitter;
impl WarningEmitterIO for ConsoleWarningEmitter {
fn emit_warning(&self, warning: Warning) {
let buffer_writer = crate::cli::stderr_buffer_writer();
let mut buffer = buffer_writer.buffer();
warning.pretty(&mut buffer);
buffer_writer
.print(&buffer)
.expect("Writing warning to stderr");
}
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/cli.rs | compiler-cli/src/cli.rs | use gleam_core::{
build::Telemetry,
error::{Error, StandardIoAction},
manifest::{Changed, ChangedGit, PackageChanges},
};
use hexpm::version::Version;
use itertools::Itertools as _;
use std::{
io::{IsTerminal, Write},
time::{Duration, Instant},
};
use termcolor::{BufferWriter, Color, ColorChoice, ColorSpec, WriteColor};
#[derive(Debug, Default, Clone)]
pub struct Reporter;
impl Reporter {
pub fn new() -> Self {
Self
}
}
impl Telemetry for Reporter {
fn compiled_package(&self, duration: Duration) {
print_compiled(duration);
}
fn compiling_package(&self, name: &str) {
print_compiling(name);
}
fn checked_package(&self, duration: Duration) {
print_checked(duration);
}
fn checking_package(&self, name: &str) {
print_checking(name);
}
fn downloading_package(&self, name: &str) {
print_downloading(name)
}
fn packages_downloaded(&self, start: Instant, count: usize) {
print_packages_downloaded(start, count)
}
fn resolving_package_versions(&self) {
print_resolving_versions()
}
fn running(&self, name: &str) {
print_running(name);
}
fn waiting_for_build_directory_lock(&self) {
print_waiting_for_build_directory_lock()
}
fn resolved_package_versions(&self, changes: &PackageChanges) {
print_package_changes(changes)
}
}
pub fn ask(question: &str) -> Result<String, Error> {
print!("{question}: ");
std::io::stdout().flush().expect("ask stdout flush");
let mut answer = String::new();
let _ = std::io::stdin()
.read_line(&mut answer)
.map_err(|e| Error::StandardIo {
action: StandardIoAction::Read,
err: Some(e.kind()),
})?;
Ok(answer.trim().to_string())
}
pub fn confirm(question: &str) -> Result<bool, Error> {
let answer = ask(&format!("{question} [y/n]"))?;
match answer.as_str() {
"y" | "yes" | "Y" | "YES" => Ok(true),
_ => Ok(false),
}
}
pub fn confirm_with_text(response: &str) -> Result<bool, Error> {
let answer = ask(&format!("Type '{response}' to continue"))?;
Ok(response == answer)
}
pub fn ask_password(question: &str) -> Result<String, Error> {
let prompt = format!("{question} (will not be printed as you type): ");
rpassword::prompt_password(prompt)
.map_err(|e| Error::StandardIo {
action: StandardIoAction::Read,
err: Some(e.kind()),
})
.map(|s| s.trim().to_string())
}
pub fn print_publishing(name: &str, version: &Version) {
print_colourful_prefix("Publishing", &format!("{name} v{version}"))
}
pub fn print_published(duration: Duration) {
print_colourful_prefix("Published", &format!("in {}", seconds(duration)))
}
pub fn print_retired(package: &str, version: &str) {
print_colourful_prefix("Retired", &format!("{package} {version}"))
}
pub fn print_unretired(package: &str, version: &str) {
print_colourful_prefix("Unretired", &format!("{package} {version}"))
}
pub fn print_publishing_documentation() {
print_colourful_prefix("Publishing", "documentation");
}
fn print_downloading(text: &str) {
print_colourful_prefix("Downloading", text)
}
fn print_waiting_for_build_directory_lock() {
print_colourful_prefix("Waiting", "for build directory lock")
}
fn print_resolving_versions() {
print_colourful_prefix("Resolving", "versions")
}
fn print_compiling(text: &str) {
print_colourful_prefix("Compiling", text)
}
pub(crate) fn print_exported(text: &str) {
print_colourful_prefix("Exported", text)
}
pub(crate) fn print_checking(text: &str) {
print_colourful_prefix("Checking", text)
}
pub(crate) fn print_compiled(duration: Duration) {
print_colourful_prefix("Compiled", &format!("in {}", seconds(duration)))
}
pub(crate) fn print_checked(duration: Duration) {
print_colourful_prefix("Checked", &format!("in {}", seconds(duration)))
}
pub(crate) fn print_running(text: &str) {
print_colourful_prefix("Running", text)
}
pub(crate) fn print_package_changes(changes: &PackageChanges) {
for (name, version) in changes.added.iter().sorted() {
print_added(&format!("{name} v{version}"));
}
for Changed { name, old, new } in changes.changed.iter().sorted_by_key(|p| &p.name) {
print_changed(&format!("{name} v{old} -> v{new}"));
}
for ChangedGit {
name,
old_hash,
new_hash,
} in changes.changed_git.iter().sorted_by_key(|p| &p.name)
{
print_changed(&format!("{name} {old_hash} -> {new_hash}"));
}
for name in changes.removed.iter().sorted() {
print_removed(name);
}
}
fn print_added(text: &str) {
print_colourful_prefix("Added", text)
}
fn print_changed(text: &str) {
print_colourful_prefix("Changed", text)
}
fn print_removed(text: &str) {
print_colourful_prefix("Removed", text)
}
pub(crate) fn print_generating_documentation() {
print_colourful_prefix("Generating", "documentation")
}
pub(crate) fn print_transferring_ownership() {
print_colourful_prefix("Transferring", "ownership");
}
pub(crate) fn print_transferred_ownership() {
print_colourful_prefix("Transferred", "ownership");
}
fn print_packages_downloaded(start: Instant, count: usize) {
let elapsed = seconds(start.elapsed());
let msg = match count {
1 => format!("1 package in {elapsed}"),
_ => format!("{count} packages in {elapsed}"),
};
print_colourful_prefix("Downloaded", &msg)
}
pub fn seconds(duration: Duration) -> String {
format!("{:.2}s", duration.as_millis() as f32 / 1000.)
}
pub fn print_colourful_prefix(prefix: &str, text: &str) {
let buffer_writer = stderr_buffer_writer();
let mut buffer = buffer_writer.buffer();
buffer
.set_color(
ColorSpec::new()
.set_intense(true)
.set_fg(Some(Color::Magenta)),
)
.expect("print_green_prefix");
write!(buffer, "{prefix: >11}").expect("print_green_prefix");
buffer
.set_color(&ColorSpec::new())
.expect("print_green_prefix");
writeln!(buffer, " {text}").expect("print_green_prefix");
buffer_writer.print(&buffer).expect("print_green_prefix");
}
pub fn stderr_buffer_writer() -> BufferWriter {
// Don't add color codes to the output if standard error isn't connected to a terminal
BufferWriter::stderr(color_choice())
}
fn colour_forced() -> bool {
if let Ok(force) = std::env::var("FORCE_COLOR") {
!force.is_empty()
} else {
false
}
}
fn color_choice() -> ColorChoice {
if colour_forced() {
ColorChoice::Always
} else if std::io::stderr().is_terminal() {
ColorChoice::Auto
} else {
ColorChoice::Never
}
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/panic.rs | compiler-cli/src/panic.rs | #![allow(clippy::unwrap_used)]
use std::panic::PanicHookInfo;
pub fn add_handler() {
std::panic::set_hook(Box::new(move |info: &PanicHookInfo<'_>| {
print_compiler_bug_message(info)
}));
}
fn print_compiler_bug_message(info: &PanicHookInfo<'_>) {
let message = match (
info.payload().downcast_ref::<&str>(),
info.payload().downcast_ref::<String>(),
) {
(Some(s), _) => (*s).to_string(),
(_, Some(s)) => s.to_string(),
(None, None) => "unknown error".into(),
};
let location = match info.location() {
None => "".into(),
Some(location) => format!("{}:{}\n\t", location.file(), location.line()),
};
let buffer_writer = crate::cli::stderr_buffer_writer();
let mut buffer = buffer_writer.buffer();
use std::io::Write;
use termcolor::{Color, ColorSpec, WriteColor};
buffer
.set_color(ColorSpec::new().set_bold(true).set_fg(Some(Color::Red)))
.unwrap();
write!(buffer, "error").unwrap();
buffer.set_color(ColorSpec::new().set_bold(true)).unwrap();
write!(buffer, ": Fatal compiler bug!\n\n").unwrap();
buffer.set_color(&ColorSpec::new()).unwrap();
writeln!(
buffer,
"This is a bug in the Gleam compiler, sorry!
Please report this crash to https://github.com/gleam-lang/gleam/issues/new
and include this error message with your report.
Panic: {location}{message}
Gleam version: {version}
Operating system: {os}
If you can also share your code and say what file you were editing or any
steps to reproduce the crash that would be a great help.
You may also want to try again with the `GLEAM_LOG=trace` environment
variable set.
",
location = location,
message = message,
version = env!("CARGO_PKG_VERSION"),
os = std::env::consts::OS,
)
.unwrap();
buffer_writer.print(&buffer).unwrap();
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/hex.rs | compiler-cli/src/hex.rs | mod auth;
use crate::{cli, http::HttpClient};
use gleam_core::{
Error, Result,
hex::{self, RetirementReason},
io::HttpClient as _,
paths::ProjectPaths,
};
pub use auth::HexAuthentication;
pub fn retire(
package: String,
version: String,
reason: RetirementReason,
message: Option<String>,
) -> Result<()> {
let runtime = tokio::runtime::Runtime::new().expect("Unable to start Tokio async runtime");
let config = hexpm::Config::new();
let api_key = HexAuthentication::new(&runtime, config.clone()).get_or_create_api_key()?;
runtime.block_on(hex::retire_release(
&package,
&version,
reason,
message.as_deref(),
&api_key,
&config,
&HttpClient::new(),
))?;
cli::print_retired(&package, &version);
Ok(())
}
pub fn unretire(package: String, version: String) -> Result<()> {
let runtime = tokio::runtime::Runtime::new().expect("Unable to start Tokio async runtime");
let config = hexpm::Config::new();
let api_key = HexAuthentication::new(&runtime, config.clone()).get_or_create_api_key()?;
runtime.block_on(hex::unretire_release(
&package,
&version,
&api_key,
&config,
&HttpClient::new(),
))?;
cli::print_unretired(&package, &version);
Ok(())
}
pub fn revert(
paths: &ProjectPaths,
package: Option<String>,
version: Option<String>,
) -> Result<()> {
let (package, version) = match (package, version) {
(Some(pkg), Some(ver)) => (pkg, ver),
(None, Some(ver)) => (crate::config::root_config(paths)?.name.to_string(), ver),
(Some(pkg), None) => {
let query = format!("Which version of package {pkg} do you want to revert?");
let ver = cli::ask(&query)?;
(pkg, ver)
}
(None, None) => {
// Only want to access root_config once rather than twice
let config = crate::config::root_config(paths)?;
(config.name.to_string(), config.version.to_string())
}
};
let question = format!("Do you wish to revert {package} version {version}?");
if !cli::confirm(&question)? {
println!("Not reverting.");
return Ok(());
}
let runtime = tokio::runtime::Runtime::new().expect("Unable to start Tokio async runtime");
let hex_config = hexpm::Config::new();
let api_key = HexAuthentication::new(&runtime, hex_config.clone()).get_or_create_api_key()?;
let http = HttpClient::new();
// Revert release from API
let request = hexpm::api_revert_release_request(&package, &version, &api_key, &hex_config)
.map_err(Error::hex)?;
let response = runtime.block_on(http.send(request))?;
hexpm::api_revert_release_response(response).map_err(Error::hex)?;
// Done!
println!("{package} {version} has been removed from Hex");
Ok(())
}
pub(crate) fn authenticate() -> Result<()> {
let runtime = tokio::runtime::Runtime::new().expect("Unable to start Tokio async runtime");
let http = HttpClient::new();
let config = hexpm::Config::new();
let mut auth = HexAuthentication::new(&runtime, config.clone());
let previous = auth.read_stored_api_key()?;
if previous.is_some() {
let question = "You already have a local Hex API token. Would you like to replace it
with a new one?";
if !cli::confirm(question)? {
return Ok(());
}
}
let new_key = auth.create_and_store_api_key()?;
if let Some(previous) = previous {
println!("Deleting previous key `{}` from Hex", previous.name);
runtime.block_on(hex::remove_api_key(
&previous.name,
&config,
&new_key.unencrypted,
&http,
))?;
}
Ok(())
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/shell.rs | compiler-cli/src/shell.rs | use gleam_core::{
analyse::TargetSupport,
build::{Codegen, Compile, Mode, Options, Target},
error::{Error, ShellCommandFailureReason},
paths::ProjectPaths,
};
use std::process::Command;
pub fn command(paths: &ProjectPaths) -> Result<(), Error> {
// Build project
let _ = crate::build::main(
paths,
Options {
root_target_support: TargetSupport::Enforced,
warnings_as_errors: false,
codegen: Codegen::All,
compile: Compile::All,
mode: Mode::Dev,
target: Some(Target::Erlang),
no_print_progress: false,
},
crate::build::download_dependencies(paths, crate::cli::Reporter::new())?,
)?;
// Don't exit on ctrl+c as it is used by child erlang shell
ctrlc::set_handler(move || {}).expect("Error setting Ctrl-C handler");
// Prepare the Erlang shell command
let mut command = Command::new("erl");
// Print character lists as lists
let _ = command.arg("-stdlib").arg("shell_strings").arg("false");
// Specify locations of .beam files
let packages = paths.build_directory_for_target(Mode::Dev, Target::Erlang);
for entry in crate::fs::read_dir(packages)?.filter_map(Result::ok) {
let _ = command.arg("-pa").arg(entry.path().join("ebin"));
}
crate::cli::print_running("Erlang shell");
// Run the shell
tracing::info!("Running OS process {:?}", command);
let _ = command.status().map_err(|e| Error::ShellCommand {
program: "erl".into(),
reason: ShellCommandFailureReason::IoError(e.kind()),
})?;
Ok(())
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/compile_package.rs | compiler-cli/src/compile_package.rs | use crate::{
CompilePackage, config,
fs::{self, ConsoleWarningEmitter, ProjectIO},
};
use camino::Utf8Path;
use ecow::EcoString;
use gleam_core::{
Error, Result,
build::{
Mode, NullTelemetry, PackageCompiler, StaleTracker, Target, TargetCodegenConfiguration,
},
metadata,
paths::{self, ProjectPaths},
type_::ModuleInterface,
uid::UniqueIdGenerator,
warning::WarningEmitter,
};
use std::{collections::HashSet, rc::Rc};
pub fn command(options: CompilePackage) -> Result<()> {
let ids = UniqueIdGenerator::new();
let mut type_manifests = load_libraries(&ids, &options.libraries_directory)?;
let mut defined_modules = im::HashMap::new();
let warnings = WarningEmitter::new(Rc::new(ConsoleWarningEmitter));
let paths = ProjectPaths::new(options.package_directory.clone());
let config = config::read(paths.root_config())?;
let target = match options.target {
Target::Erlang => TargetCodegenConfiguration::Erlang { app_file: None },
Target::JavaScript => TargetCodegenConfiguration::JavaScript {
emit_typescript_definitions: false,
prelude_location: options
.javascript_prelude
.ok_or_else(|| Error::JavaScriptPreludeRequired)?,
},
};
tracing::info!("Compiling package");
let mut compiler = PackageCompiler::new(
&config,
Mode::Dev,
&options.package_directory,
&options.output_directory,
&options.libraries_directory,
&target,
ids,
ProjectIO::new(),
);
compiler.write_entrypoint = false;
compiler.write_metadata = true;
compiler.compile_beam_bytecode = !options.skip_beam_compilation;
compiler
.compile(
&warnings,
&mut type_manifests,
&mut defined_modules,
&mut StaleTracker::default(),
&mut HashSet::new(),
&NullTelemetry,
)
.into_result()
.map(|_| ())
}
fn load_libraries(
ids: &UniqueIdGenerator,
lib: &Utf8Path,
) -> Result<im::HashMap<EcoString, ModuleInterface>> {
tracing::info!("Reading precompiled module metadata files");
let mut manifests = im::HashMap::new();
for lib in fs::read_dir(lib)?.filter_map(Result::ok) {
let path = lib.path().join(paths::ARTEFACT_DIRECTORY_NAME);
if !path.is_dir() {
continue;
}
for module in fs::module_caches_paths(path)? {
let reader = fs::buffered_reader(module)?;
let module = metadata::ModuleDecoder::new(ids.clone()).read(reader)?;
let _ = manifests.insert(module.name.clone(), module);
}
}
Ok(manifests)
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/build.rs | compiler-cli/src/build.rs | use std::{rc::Rc, time::Instant};
use gleam_core::{
Result,
build::{Built, Codegen, NullTelemetry, Options, ProjectCompiler, Telemetry},
manifest::Manifest,
paths::ProjectPaths,
warning::WarningEmitterIO,
};
use crate::{
build_lock::BuildLock,
cli, dependencies,
fs::{self, ConsoleWarningEmitter},
};
pub fn download_dependencies(paths: &ProjectPaths, telemetry: impl Telemetry) -> Result<Manifest> {
dependencies::resolve_and_download(
paths,
telemetry,
None,
Vec::new(),
dependencies::DependencyManagerConfig {
use_manifest: dependencies::UseManifest::Yes,
check_major_versions: dependencies::CheckMajorVersions::No,
},
)
}
pub fn main(paths: &ProjectPaths, options: Options, manifest: Manifest) -> Result<Built> {
main_with_warnings(paths, options, manifest, Rc::new(ConsoleWarningEmitter))
}
pub(crate) fn main_with_warnings(
paths: &ProjectPaths,
options: Options,
manifest: Manifest,
warnings: Rc<dyn WarningEmitterIO>,
) -> Result<Built> {
let perform_codegen = options.codegen;
let root_config = crate::config::root_config(paths)?;
let telemetry: &'static dyn Telemetry = if options.no_print_progress {
&NullTelemetry
} else {
&cli::Reporter
};
let io = fs::ProjectIO::new();
let start = Instant::now();
let lock = BuildLock::new_target(
paths,
options.mode,
options.target.unwrap_or(root_config.target),
)?;
tracing::info!("Compiling packages");
let result = {
let _guard = lock.lock(telemetry);
let compiler = ProjectCompiler::new(
root_config,
options,
manifest.packages,
telemetry,
warnings,
paths.clone(),
io,
);
compiler.compile()?
};
match perform_codegen {
Codegen::All | Codegen::DepsOnly => telemetry.compiled_package(start.elapsed()),
Codegen::None => telemetry.checked_package(start.elapsed()),
};
Ok(result)
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/lsp.rs | compiler-cli/src/lsp.rs | use crate::{
build_lock::{BuildLock, Guard},
fs::ProjectIO,
};
use gleam_core::{
Result,
build::{Mode, NullTelemetry, Target},
paths::ProjectPaths,
};
use gleam_language_server::{LanguageServer, LockGuard, Locker};
pub fn main() -> Result<()> {
tracing::info!("language_server_starting");
if std::io::IsTerminal::is_terminal(&std::io::stdin()) {
eprintln!(
"Hello human!
This command is intended to be run by language server clients such
as a text editor rather than being run directly in the console.
Many editors will automatically start the language server for you
when you open a Gleam project. If yours does not you may need to
look up how to configure your editor to use a language server.
If you are seeing this in the logs of your editor you can safely
ignore this message.
If you have run `gleam lsp` yourself in your terminal then exit
this program by pressing ctrl+c.
"
);
}
// Create the transport. Includes the stdio (stdin and stdout) versions but this could
// also be implemented to use sockets or HTTP.
let (connection, io_threads) = lsp_server::Connection::stdio();
// Run the server and wait for the two threads to end, typically by trigger
// LSP Exit event.
LanguageServer::new(&connection, ProjectIO::new())?.run()?;
// Shut down gracefully.
drop(connection);
io_threads.join().expect("joining_lsp_threads");
tracing::info!("language_server_stopped");
Ok(())
}
#[derive(Debug)]
pub struct LspLocker(BuildLock);
impl LspLocker {
pub fn new(paths: &ProjectPaths, target: Target) -> Result<Self> {
let build_lock = BuildLock::new_target(paths, Mode::Lsp, target)?;
Ok(Self(build_lock))
}
}
impl Locker for LspLocker {
fn lock_for_build(&self) -> Result<LockGuard> {
let guard: Guard = self.0.lock(&NullTelemetry)?;
Ok(LockGuard(Box::new(guard)))
}
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/build_lock.rs | compiler-cli/src/build_lock.rs | use camino::Utf8PathBuf;
use gleam_core::{
Error, Result,
build::{Mode, Target, Telemetry},
error::{FileIoAction, FileKind},
paths::ProjectPaths,
};
use strum::IntoEnumIterator;
#[derive(Debug)]
pub(crate) struct BuildLock {
directory: Utf8PathBuf,
filename: String,
}
impl BuildLock {
/// Lock the build directory for the specified mode and target.
pub fn new_target(paths: &ProjectPaths, mode: Mode, target: Target) -> Result<Self> {
let directory = paths.build_directory();
crate::fs::mkdir(&directory)?;
Ok(Self {
directory,
filename: format!("gleam-{mode}-{target}.lock"),
})
}
/// Lock the packages directory.
pub fn new_packages(paths: &ProjectPaths) -> Result<Self> {
let directory = paths.build_packages_directory();
crate::fs::mkdir(&directory)?;
Ok(Self {
directory,
filename: "gleam.lock".to_string(),
})
}
/// Construct the lock file path
pub fn lock_path(&self) -> Utf8PathBuf {
self.directory.join(&self.filename)
}
/// Lock the directory specified by the lock
pub fn lock<Telem: Telemetry + ?Sized>(&self, telemetry: &Telem) -> Result<Guard> {
let lock_path = self.lock_path();
tracing::debug!(path=?lock_path, "locking_directory");
crate::fs::mkdir(&self.directory)?;
let mut file = fslock::LockFile::open(lock_path.as_str()).map_err(|e| Error::FileIo {
kind: FileKind::File,
path: lock_path.clone(),
action: FileIoAction::Create,
err: Some(e.to_string()),
})?;
if !file.try_lock_with_pid().expect("Trying directory locking") {
telemetry.waiting_for_build_directory_lock();
file.lock_with_pid().expect("Directory locking")
}
Ok(Guard(file))
}
/// Lock all build directories. Does not lock the packages directory.
pub fn lock_all_build<Telem: Telemetry>(
paths: &ProjectPaths,
telemetry: &Telem,
) -> Result<Vec<Guard>> {
let mut locks = vec![];
for mode in Mode::iter() {
for target in Target::iter() {
locks.push(Self::new_target(paths, mode, target)?.lock(telemetry)?);
}
}
Ok(locks)
}
}
#[derive(Debug)]
pub(crate) struct Guard(
// False positive. This is used in `drop`. Presumably the lint error is a
// bug in clippy.
#[allow(dead_code)] fslock::LockFile,
);
#[test]
fn locking_global() {
let paths = crate::project_paths_at_current_directory_without_toml();
let lock = BuildLock::new_packages(&paths).expect("make lock");
let _guard1: Guard = lock.lock(&gleam_core::build::NullTelemetry).unwrap();
println!("Locked!")
}
#[test]
fn locking_dev_erlang() {
let paths = crate::project_paths_at_current_directory_without_toml();
let lock = BuildLock::new_target(&paths, Mode::Dev, Target::Erlang).expect("make lock");
let _guard1: Guard = lock.lock(&gleam_core::build::NullTelemetry).unwrap();
println!("Locked!")
}
#[test]
fn locking_prod_erlang() {
let paths = crate::project_paths_at_current_directory_without_toml();
let lock = BuildLock::new_target(&paths, Mode::Prod, Target::Erlang).expect("make lock");
let _guard1: Guard = lock.lock(&gleam_core::build::NullTelemetry).unwrap();
println!("Locked!")
}
#[test]
fn locking_lsp_erlang() {
let paths = crate::project_paths_at_current_directory_without_toml();
let lock = BuildLock::new_target(&paths, Mode::Lsp, Target::Erlang).expect("make lock");
let _guard1: Guard = lock.lock(&gleam_core::build::NullTelemetry).unwrap();
println!("Locked!")
}
#[test]
fn locking_dev_javascript() {
let paths = crate::project_paths_at_current_directory_without_toml();
let lock = BuildLock::new_target(&paths, Mode::Dev, Target::JavaScript).expect("make lock");
let _guard1: Guard = lock.lock(&gleam_core::build::NullTelemetry).unwrap();
println!("Locked!")
}
#[test]
fn locking_prod_javascript() {
let paths = crate::project_paths_at_current_directory_without_toml();
let lock = BuildLock::new_target(&paths, Mode::Prod, Target::JavaScript).expect("make lock");
let _guard1: Guard = lock.lock(&gleam_core::build::NullTelemetry).unwrap();
println!("Locked!")
}
#[test]
fn locking_lsp_javascript() {
let paths = crate::project_paths_at_current_directory_without_toml();
let lock = BuildLock::new_target(&paths, Mode::Lsp, Target::JavaScript).expect("make lock");
let _guard1: Guard = lock.lock(&gleam_core::build::NullTelemetry).unwrap();
println!("Locked!")
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/format.rs | compiler-cli/src/format.rs | use gleam_core::{
error::{Error, FileIoAction, FileKind, Result, StandardIoAction, Unformatted},
io::Content,
io::OutputFile,
};
use std::{io::Read, str::FromStr};
use camino::{Utf8Path, Utf8PathBuf};
pub fn run(stdin: bool, check: bool, files: Vec<String>) -> Result<()> {
if stdin {
process_stdin(check)
} else {
process_files(check, files)
}
}
fn process_stdin(check: bool) -> Result<()> {
let src = read_stdin()?.into();
let mut out = String::new();
gleam_core::format::pretty(&mut out, &src, Utf8Path::new("<stdin>"))?;
if !check {
print!("{out}");
return Ok(());
}
if src != out {
return Err(Error::Format {
problem_files: vec![Unformatted {
source: Utf8PathBuf::from("<standard input>"),
destination: Utf8PathBuf::from("<standard output>"),
input: src,
output: out,
}],
});
}
Ok(())
}
fn process_files(check: bool, files: Vec<String>) -> Result<()> {
if check {
check_files(files)
} else {
format_files(files)
}
}
fn check_files(files: Vec<String>) -> Result<()> {
let problem_files = unformatted_files(files)?;
if problem_files.is_empty() {
Ok(())
} else {
Err(Error::Format { problem_files })
}
}
fn format_files(files: Vec<String>) -> Result<()> {
for file in unformatted_files(files)? {
crate::fs::write_output(&OutputFile {
path: file.destination,
content: Content::Text(file.output),
})?;
}
Ok(())
}
pub fn unformatted_files(files: Vec<String>) -> Result<Vec<Unformatted>> {
let mut problem_files = Vec::with_capacity(files.len());
for file_path in files {
let path = Utf8PathBuf::from_str(&file_path).map_err(|e| Error::FileIo {
action: FileIoAction::Open,
kind: FileKind::File,
path: Utf8PathBuf::from(file_path),
err: Some(e.to_string()),
})?;
if path.is_dir() {
for path in crate::fs::gleam_files(&path) {
format_file(&mut problem_files, path)?;
}
} else {
format_file(&mut problem_files, path)?;
}
}
Ok(problem_files)
}
fn format_file(problem_files: &mut Vec<Unformatted>, path: Utf8PathBuf) -> Result<()> {
let src = crate::fs::read(&path)?.into();
let mut output = String::new();
gleam_core::format::pretty(&mut output, &src, &path)?;
if src != output {
problem_files.push(Unformatted {
source: path.clone(),
destination: path,
input: src,
output,
});
}
Ok(())
}
pub fn read_stdin() -> Result<String> {
let mut src = String::new();
let _ = std::io::stdin()
.read_to_string(&mut src)
.map_err(|e| Error::StandardIo {
action: StandardIoAction::Read,
err: Some(e.kind()),
})?;
Ok(src)
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/run.rs | compiler-cli/src/run.rs | use std::sync::OnceLock;
use camino::Utf8PathBuf;
use ecow::EcoString;
use gleam_core::{
analyse::TargetSupport,
build::{Built, Codegen, Compile, Mode, NullTelemetry, Options, Runtime, Target, Telemetry},
config::{DenoFlag, PackageConfig},
error::Error,
io::{Command, CommandExecutor, Stdio},
paths::ProjectPaths,
type_::ModuleFunction,
};
use crate::{config::PackageKind, fs::ProjectIO};
#[derive(Debug, Clone, Copy)]
pub enum Which {
Src,
Test,
Dev,
}
// TODO: test
pub fn command(
paths: &ProjectPaths,
arguments: Vec<String>,
target: Option<Target>,
runtime: Option<Runtime>,
module: Option<String>,
which: Which,
no_print_progress: bool,
) -> Result<(), Error> {
// Don't exit on ctrl+c as it is used by child erlang shell
ctrlc::set_handler(move || {}).expect("Error setting Ctrl-C handler");
let command = setup(
paths,
arguments,
target,
runtime,
module,
which,
no_print_progress,
)?;
let status = ProjectIO::new().exec(command)?;
std::process::exit(status);
}
pub fn setup(
paths: &ProjectPaths,
arguments: Vec<String>,
target: Option<Target>,
runtime: Option<Runtime>,
module: Option<String>,
which: Which,
no_print_progress: bool,
) -> Result<Command, Error> {
// Validate the module path
if let Some(mod_path) = &module
&& !is_gleam_module(mod_path)
{
return Err(Error::InvalidModuleName {
module: mod_path.to_owned(),
});
};
let telemetry: &'static dyn Telemetry = if no_print_progress {
&NullTelemetry
} else {
&crate::cli::Reporter
};
// Download dependencies
let manifest = if no_print_progress {
crate::build::download_dependencies(paths, NullTelemetry)?
} else {
crate::build::download_dependencies(paths, crate::cli::Reporter::new())?
};
// Get the config for the module that is being run to check the target.
// Also get the kind of the package the module belongs to: wether the module
// belongs to a dependency or to the root package.
let (mod_config, package_kind) = match &module {
Some(mod_path) => {
crate::config::find_package_config_for_module(mod_path, &manifest, paths)?
}
_ => (crate::config::root_config(paths)?, PackageKind::Root),
};
// The root config is required to run the project.
let root_config = crate::config::root_config(paths)?;
// Determine which module to run
let module = module.unwrap_or(match which {
Which::Src => root_config.name.to_string(),
Which::Test => format!("{}_test", &root_config.name),
Which::Dev => format!("{}_dev", &root_config.name),
});
let target = target.unwrap_or(mod_config.target);
let options = Options {
warnings_as_errors: false,
compile: match package_kind {
// If we're trying to run a dependecy module we do not compile and
// check the root package. So we can run the main function from a
// dependency's module even if the root package doesn't compile.
PackageKind::Dependency => Compile::DepsOnly,
PackageKind::Root => Compile::All,
},
codegen: Codegen::All,
mode: Mode::Dev,
target: Some(target),
root_target_support: match package_kind {
// The module we want to run is in the root package, so we make sure that the package
// can compile successfully for the current target.
PackageKind::Root => TargetSupport::Enforced,
// On the other hand, if we're trying to run a module that belongs to a dependency, we
// only care if the dependency can compile for the current target.
PackageKind::Dependency => TargetSupport::NotEnforced,
},
no_print_progress,
};
let built = crate::build::main(paths, options, manifest)?;
// A module can not be run if it does not exist or does not have a public main function.
let main_function = get_or_suggest_main_function(built, &module, target)?;
telemetry.running(&format!("{module}.main"));
// Get the command to run the project.
match target {
Target::Erlang => match runtime {
Some(r) => Err(Error::InvalidRuntime {
target: Target::Erlang,
invalid_runtime: r,
}),
_ => run_erlang_command(paths, &root_config.name, &module, arguments),
},
Target::JavaScript => match runtime.unwrap_or(mod_config.javascript.runtime) {
Runtime::Deno => run_javascript_deno_command(
paths,
&root_config,
&main_function.package,
&module,
arguments,
),
Runtime::NodeJs => {
run_javascript_node_command(paths, &main_function.package, &module, arguments)
}
Runtime::Bun => {
run_javascript_bun_command(paths, &main_function.package, &module, arguments)
}
},
}
}
fn run_erlang_command(
paths: &ProjectPaths,
package: &str,
module: &str,
arguments: Vec<String>,
) -> Result<Command, Error> {
let mut args = vec![];
// Specify locations of Erlang applications
let packages = paths.build_directory_for_target(Mode::Dev, Target::Erlang);
for entry in crate::fs::read_dir(packages)?.filter_map(Result::ok) {
args.push("-pa".into());
args.push(entry.path().join("ebin").into());
}
// gleam modules are separated by `/`. Erlang modules are separated by `@`.
let module = module.replace('/', "@");
args.push("-eval".into());
args.push(format!("{package}@@main:run({module})"));
// Don't run the Erlang shell
args.push("-noshell".into());
// Tell the BEAM that any following argument are for the program
args.push("-extra".into());
for argument in arguments.into_iter() {
args.push(argument);
}
Ok(Command {
program: "erl".to_string(),
args,
env: vec![],
cwd: None,
stdio: Stdio::Inherit,
})
}
fn run_javascript_bun_command(
paths: &ProjectPaths,
package: &str,
module: &str,
arguments: Vec<String>,
) -> Result<Command, Error> {
let mut args = vec!["run".to_string()];
let entry = write_javascript_entrypoint(paths, package, module)?;
args.push(entry.to_string());
for arg in arguments.into_iter() {
args.push(arg);
}
Ok(Command {
program: "bun".to_string(),
args,
env: vec![],
cwd: None,
stdio: Stdio::Inherit,
})
}
fn run_javascript_node_command(
paths: &ProjectPaths,
package: &str,
module: &str,
arguments: Vec<String>,
) -> Result<Command, Error> {
let mut args = vec![];
let entry = write_javascript_entrypoint(paths, package, module)?;
args.push(entry.to_string());
for argument in arguments.into_iter() {
args.push(argument);
}
Ok(Command {
program: "node".to_string(),
args,
env: vec![],
cwd: None,
stdio: Stdio::Inherit,
})
}
fn write_javascript_entrypoint(
paths: &ProjectPaths,
package: &str,
module: &str,
) -> Result<Utf8PathBuf, Error> {
let path = paths
.build_directory_for_package(Mode::Dev, Target::JavaScript, package)
.to_path_buf()
.join("gleam.main.mjs");
let module = format!(
r#"import {{ main }} from "./{module}.mjs";
main();
"#,
);
crate::fs::write(&path, &module)?;
Ok(path)
}
fn run_javascript_deno_command(
paths: &ProjectPaths,
config: &PackageConfig,
package: &str,
module: &str,
arguments: Vec<String>,
) -> Result<Command, Error> {
let mut args = vec![];
// Run the main function.
args.push("run".into());
// Enable unstable features and APIs
if config.javascript.deno.unstable {
args.push("--unstable".into())
}
// Enable location API
if let Some(location) = &config.javascript.deno.location {
args.push(format!("--location={location}"));
}
// Set deno permissions
if config.javascript.deno.allow_all {
// Allow all
args.push("--allow-all".into())
} else {
// Allow env
add_deno_flag(&mut args, "--allow-env", &config.javascript.deno.allow_env);
// Allow sys
if config.javascript.deno.allow_sys {
args.push("--allow-sys".into())
}
// Allow hrtime
if config.javascript.deno.allow_hrtime {
args.push("--allow-hrtime".into())
}
// Allow net
add_deno_flag(&mut args, "--allow-net", &config.javascript.deno.allow_net);
// Allow ffi
if config.javascript.deno.allow_ffi {
args.push("--allow-ffi".into())
}
// Allow read
add_deno_flag(
&mut args,
"--allow-read",
&config.javascript.deno.allow_read,
);
// Allow run
add_deno_flag(&mut args, "--allow-run", &config.javascript.deno.allow_run);
// Allow write
add_deno_flag(
&mut args,
"--allow-write",
&config.javascript.deno.allow_write,
);
}
let entrypoint = write_javascript_entrypoint(paths, package, module)?;
args.push(entrypoint.to_string());
for argument in arguments.into_iter() {
args.push(argument);
}
Ok(Command {
program: "deno".to_string(),
args,
env: vec![],
cwd: None,
stdio: Stdio::Inherit,
})
}
fn add_deno_flag(args: &mut Vec<String>, flag: &str, flags: &DenoFlag) {
match flags {
DenoFlag::AllowAll => args.push(flag.to_owned()),
DenoFlag::Allow(allow) => {
if !allow.is_empty() {
args.push(format!("{}={}", flag.to_owned(), allow.join(",")));
}
}
}
}
/// Check if a module name is a valid gleam module name.
fn is_gleam_module(module: &str) -> bool {
use regex::Regex;
static RE: OnceLock<Regex> = OnceLock::new();
RE.get_or_init(|| {
Regex::new(&format!(
"^({module}{slash})*{module}$",
module = "[a-z][_a-z0-9]*",
slash = "/",
))
.expect("is_gleam_module() RE regex")
})
.is_match(module)
}
/// If provided module is not executable, suggest a possible valid module.
fn get_or_suggest_main_function(
built: Built,
module: &str,
target: Target,
) -> Result<ModuleFunction, Error> {
// Check if the module exists
let error = match built.get_main_function(&module.into(), target) {
Ok(main_fn) => return Ok(main_fn),
Err(error) => error,
};
// Otherwise see if the module has been prefixed with "src/", "test/" or "dev/".
for prefix in ["src/", "test/", "dev/"] {
let other = match module.strip_prefix(prefix) {
Some(other) => other.into(),
None => continue,
};
if built.get_main_function(&other, target).is_ok() {
return Err(Error::ModuleDoesNotExist {
module: EcoString::from(module),
suggestion: Some(other),
});
}
}
Err(error)
}
#[test]
fn invalid_module_names() {
for mod_name in [
"",
"/mod/name",
"/mod/name/",
"mod/name/",
"/mod/",
"mod/",
"common-invalid-character",
] {
assert!(!is_gleam_module(mod_name));
}
}
#[test]
fn valid_module_names() {
for mod_name in ["valid", "valid/name", "valid/mod/name"] {
assert!(is_gleam_module(mod_name));
}
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/owner.rs | compiler-cli/src/owner.rs | use crate::{cli, http::HttpClient};
use gleam_core::{Result, hex};
pub fn transfer(package: String, new_owner_username_or_email: String) -> Result<()> {
println!(
"Transferring ownership of this package will remove all current owners and make
{new_owner_username_or_email} its new owner.
Do you wish to transfer ownership of `{package}` to {new_owner_username_or_email}?",
);
let should_transfer_ownership = cli::confirm_with_text(&package)?;
if !should_transfer_ownership {
println!("Not transferring ownership.");
return Ok(());
}
let runtime = tokio::runtime::Runtime::new().expect("Unable to start Tokio async runtime");
let hex_config = hexpm::Config::new();
let api_key =
crate::hex::HexAuthentication::new(&runtime, hex_config.clone()).get_or_create_api_key()?;
cli::print_transferring_ownership();
runtime.block_on(hex::transfer_owner(
&api_key,
package,
new_owner_username_or_email,
&hex_config,
&HttpClient::new(),
))?;
cli::print_transferred_ownership();
Ok(())
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/export.rs | compiler-cli/src/export.rs | use camino::Utf8PathBuf;
use gleam_core::{
Result,
analyse::TargetSupport,
build::{Codegen, Compile, Mode, Options, Target},
paths::ProjectPaths,
};
static ENTRYPOINT_FILENAME_POWERSHELL: &str = "entrypoint.ps1";
static ENTRYPOINT_FILENAME_POSIX_SHELL: &str = "entrypoint.sh";
static ENTRYPOINT_TEMPLATE_POWERSHELL: &str =
include_str!("../templates/erlang-shipment-entrypoint.ps1");
static ENTRYPOINT_TEMPLATE_POSIX_SHELL: &str =
include_str!("../templates/erlang-shipment-entrypoint.sh");
// TODO: start in embedded mode
// TODO: test
/// Generate a directory of precompiled Erlang along with a start script.
/// Suitable for deployment to a server.
///
/// For each Erlang application (aka package) directory these directories are
/// copied across:
/// - ebin
/// - include
/// - priv
pub(crate) fn erlang_shipment(paths: &ProjectPaths) -> Result<()> {
let target = Target::Erlang;
let mode = Mode::Prod;
let build = paths.build_directory_for_target(mode, target);
let out = paths.erlang_shipment_directory();
crate::fs::mkdir(&out)?;
// Reset the directories to ensure we have a clean slate and no old code
crate::fs::delete_directory(&build)?;
crate::fs::delete_directory(&out)?;
// Build project in production mode
let built = crate::build::main(
paths,
Options {
root_target_support: TargetSupport::Enforced,
warnings_as_errors: false,
codegen: Codegen::All,
compile: Compile::All,
mode,
target: Some(target),
no_print_progress: false,
},
crate::build::download_dependencies(paths, crate::cli::Reporter::new())?,
)?;
for entry in crate::fs::read_dir(&build)?.filter_map(Result::ok) {
let path = entry.path();
// We are only interested in package directories
if !path.is_dir() {
continue;
}
let name = path.file_name().expect("Directory name");
let build = build.join(name);
let out = out.join(name);
crate::fs::mkdir(&out)?;
// Copy desired package subdirectories
for subdirectory in ["ebin", "priv", "include"] {
let source = build.join(subdirectory);
if source.is_dir() {
let source = crate::fs::canonicalise(&source)?;
let out = out.join(subdirectory);
crate::fs::copy_dir(source, &out)?;
}
}
}
// PowerShell entry point script.
write_entrypoint_script(
&out.join(ENTRYPOINT_FILENAME_POWERSHELL),
ENTRYPOINT_TEMPLATE_POWERSHELL,
&built.root_package.config.name,
)?;
// POSIX Shell entry point script.
write_entrypoint_script(
&out.join(ENTRYPOINT_FILENAME_POSIX_SHELL),
ENTRYPOINT_TEMPLATE_POSIX_SHELL,
&built.root_package.config.name,
)?;
crate::cli::print_exported(&built.root_package.config.name);
println!(
"
Your Erlang shipment has been generated to {out}.
It can be copied to a compatible server with Erlang installed and run with
one of the following scripts:
- {ENTRYPOINT_FILENAME_POWERSHELL} (PowerShell script)
- {ENTRYPOINT_FILENAME_POSIX_SHELL} (POSIX Shell script)
",
);
Ok(())
}
fn write_entrypoint_script(
entrypoint_output_path: &Utf8PathBuf,
entrypoint_template_path: &str,
package_name: &str,
) -> Result<()> {
let text = entrypoint_template_path.replace("$PACKAGE_NAME_FROM_GLEAM", package_name);
crate::fs::write(entrypoint_output_path, &text)?;
crate::fs::make_executable(entrypoint_output_path)?;
Ok(())
}
pub fn hex_tarball(paths: &ProjectPaths) -> Result<()> {
let mut config = crate::config::root_config(paths)?;
let data: Vec<u8> = crate::publish::build_hex_tarball(paths, &mut config)?;
let path = paths.build_export_hex_tarball(&config.name, &config.version.to_string());
crate::fs::write_bytes(&path, &data)?;
println!(
"
Your hex tarball has been generated in {}.
",
&path
);
Ok(())
}
pub fn javascript_prelude() -> Result<()> {
print!("{}", gleam_core::javascript::PRELUDE);
Ok(())
}
pub fn typescript_prelude() -> Result<()> {
print!("{}", gleam_core::javascript::PRELUDE_TS_DEF);
Ok(())
}
pub fn package_interface(paths: &ProjectPaths, out: Utf8PathBuf) -> Result<()> {
// Build the project
let mut built = crate::build::main(
paths,
Options {
mode: Mode::Prod,
target: None,
codegen: Codegen::All,
compile: Compile::All,
warnings_as_errors: false,
root_target_support: TargetSupport::Enforced,
no_print_progress: false,
},
crate::build::download_dependencies(paths, crate::cli::Reporter::new())?,
)?;
built.root_package.attach_doc_and_module_comments();
let out = gleam_core::docs::generate_json_package_interface(
out,
&built.root_package,
&built.module_interfaces,
);
crate::fs::write_outputs_under(&[out], paths.root())?;
Ok(())
}
pub fn package_information(paths: &ProjectPaths, out: Utf8PathBuf) -> Result<()> {
let config = crate::config::root_config(paths)?;
let out = gleam_core::docs::generate_json_package_information(out, config);
crate::fs::write_outputs_under(&[out], paths.root())?;
Ok(())
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/dependencies.rs | compiler-cli/src/dependencies.rs | mod dependency_manager;
use std::{
cell::RefCell,
collections::{HashMap, HashSet},
io::ErrorKind,
process::Command,
rc::Rc,
time::Instant,
};
use camino::{Utf8Path, Utf8PathBuf};
use ecow::{EcoString, eco_format};
use flate2::read::GzDecoder;
use gleam_core::{
Error, Result,
build::{Mode, Target, Telemetry},
config::PackageConfig,
dependency::{self, PackageFetchError},
error::{FileIoAction, FileKind, ShellCommandFailureReason, StandardIoAction},
hex::{self, HEXPM_PUBLIC_KEY},
io::{HttpClient as _, TarUnpacker, WrappedReader},
manifest::{Base16Checksum, Manifest, ManifestPackage, ManifestPackageSource, PackageChanges},
paths::ProjectPaths,
requirement::Requirement,
};
use hexpm::version::Version;
use itertools::Itertools;
use same_file::is_same_file;
use strum::IntoEnumIterator;
pub use dependency_manager::DependencyManagerConfig;
#[cfg(test)]
mod tests;
use crate::{
TreeOptions,
build_lock::{BuildLock, Guard},
cli,
fs::{self, ProjectIO},
http::HttpClient,
text_layout::space_table,
};
struct Symbols {
down: &'static str,
tee: &'static str,
ell: &'static str,
right: &'static str,
}
static UTF8_SYMBOLS: Symbols = Symbols {
down: "│",
tee: "├",
ell: "└",
right: "─",
};
/// When set to `Yes`, the cli will check for major version updates of direct dependencies and
/// print them to the console if the major versions are not upgradeable due to constraints.
#[derive(Debug, Clone, Copy)]
pub enum CheckMajorVersions {
Yes,
No,
}
pub fn list(paths: &ProjectPaths) -> Result<()> {
let (_, manifest) = get_manifest_details(paths)?;
list_manifest_packages(std::io::stdout(), manifest)
}
pub fn tree(paths: &ProjectPaths, options: TreeOptions) -> Result<()> {
let (config, manifest) = get_manifest_details(paths)?;
// Initialize the root package since it is not part of the manifest
let root_package = ManifestPackage {
build_tools: vec![],
name: config.name.clone(),
requirements: config.all_direct_dependencies()?.keys().cloned().collect(),
version: config.version.clone(),
source: ManifestPackageSource::Local {
path: paths.root().to_path_buf(),
},
otp_app: None,
};
// Get the manifest packages and add the root package to the vec
let mut packages = manifest.packages.iter().cloned().collect_vec();
packages.append(&mut vec![root_package.clone()]);
list_package_and_dependencies_tree(std::io::stdout(), options, packages.clone(), config.name)
}
fn get_manifest_details(paths: &ProjectPaths) -> Result<(PackageConfig, Manifest)> {
let runtime = tokio::runtime::Runtime::new().expect("Unable to start Tokio async runtime");
let config = crate::config::root_config(paths)?;
let package_fetcher = PackageFetcher::new(runtime.handle().clone());
let dependency_manager = DependencyManagerConfig {
use_manifest: UseManifest::Yes,
check_major_versions: CheckMajorVersions::No,
}
.into_dependency_manager(
runtime.handle().clone(),
package_fetcher,
cli::Reporter::new(),
Mode::Dev,
);
let manifest = dependency_manager
.resolve_versions(paths, &config, Vec::new())?
.manifest;
Ok((config, manifest))
}
fn list_manifest_packages<W: std::io::Write>(mut buffer: W, manifest: Manifest) -> Result<()> {
let packages = manifest
.packages
.into_iter()
.map(|package| vec![package.name.to_string(), package.version.to_string()])
.collect_vec();
let out = space_table(&["Package", "Version"], packages);
write!(buffer, "{out}").map_err(|e| Error::StandardIo {
action: StandardIoAction::Write,
err: Some(e.kind()),
})
}
fn list_package_and_dependencies_tree<W: std::io::Write>(
mut buffer: W,
options: TreeOptions,
packages: Vec<ManifestPackage>,
root_package_name: EcoString,
) -> Result<()> {
let mut invert = false;
let package: Option<&ManifestPackage> = if let Some(input_package_name) = options.package {
packages.iter().find(|p| p.name == input_package_name)
} else if let Some(input_package_name) = options.invert {
invert = true;
packages.iter().find(|p| p.name == input_package_name)
} else {
packages.iter().find(|p| p.name == root_package_name)
};
if let Some(package) = package {
let tree = Vec::from([eco_format!("{0} v{1}", package.name, package.version)]);
let tree = list_dependencies_tree(
tree.clone(),
package.clone(),
packages,
EcoString::new(),
invert,
);
tree.iter()
.try_for_each(|line| writeln!(buffer, "{line}"))
.map_err(|e| Error::StandardIo {
action: StandardIoAction::Write,
err: Some(e.kind()),
})
} else {
writeln!(buffer, "Package not found. Please check the package name.").map_err(|e| {
Error::StandardIo {
action: StandardIoAction::Write,
err: Some(e.kind()),
}
})
}
}
fn list_dependencies_tree(
mut tree: Vec<EcoString>,
package: ManifestPackage,
packages: Vec<ManifestPackage>,
accum: EcoString,
invert: bool,
) -> Vec<EcoString> {
let dependencies = packages
.iter()
.filter(|p| {
(invert && p.requirements.contains(&package.name))
|| (!invert && package.requirements.contains(&p.name))
})
.cloned()
.collect_vec();
let dependencies = dependencies.iter().sorted().enumerate();
let deps_length = dependencies.len();
for (index, dependency) in dependencies {
let is_last = index == deps_length - 1;
let prefix = if is_last {
UTF8_SYMBOLS.ell
} else {
UTF8_SYMBOLS.tee
};
tree.push(eco_format!(
"{0}{1}{2}{2} {3} v{4}",
accum.clone(),
prefix,
UTF8_SYMBOLS.right,
dependency.name,
dependency.version
));
let accum = accum.clone() + (if !is_last { UTF8_SYMBOLS.down } else { " " }) + " ";
tree = list_dependencies_tree(
tree.clone(),
dependency.clone(),
packages.clone(),
accum.clone(),
invert,
);
}
tree
}
pub fn outdated(paths: &ProjectPaths) -> Result<()> {
let (_, manifest) = get_manifest_details(paths)?;
let runtime = tokio::runtime::Runtime::new().expect("Unable to start Tokio async runtime");
let package_fetcher = PackageFetcher::new(runtime.handle().clone());
let version_updates = dependency::check_for_version_updates(&manifest, &package_fetcher);
if !version_updates.is_empty() {
print!("{}", pretty_print_version_updates(version_updates));
}
Ok(())
}
#[derive(Debug, Clone, Copy)]
pub enum UseManifest {
Yes,
No,
}
pub fn update(paths: &ProjectPaths, packages: Vec<String>) -> Result<()> {
let use_manifest = if packages.is_empty() {
UseManifest::No
} else {
UseManifest::Yes
};
// Update specific packages
_ = resolve_and_download(
paths,
cli::Reporter::new(),
None,
packages.into_iter().map(EcoString::from).collect(),
DependencyManagerConfig {
use_manifest,
check_major_versions: CheckMajorVersions::Yes,
},
)?;
Ok(())
}
/// Edit the manifest.toml file in this proejct, removing all extra requirements and packages
/// that are no longer present in the gleam.toml config.
pub fn cleanup<Telem: Telemetry>(paths: &ProjectPaths, telemetry: Telem) -> Result<Manifest> {
let span = tracing::info_span!("remove_deps");
let _enter = span.enter();
// We do this before acquiring the build lock so that we don't create the
// build directory if there is no gleam.toml
crate::config::ensure_config_exists(paths)?;
let lock = BuildLock::new_packages(paths)?;
let _guard: Guard = lock.lock(&telemetry)?;
// Read the project config
let config = crate::root_config(paths)?;
let old_manifest = read_manifest_from_disc(paths)?;
let mut manifest = old_manifest.clone();
remove_extra_requirements(&config, &mut manifest)?;
// Remove any packages that are no longer required due to manifest changes
let local = LocalPackages::read_from_disc(paths)?;
remove_extra_packages(paths, &local, &manifest, &telemetry)?;
// Record new state of the packages directory
tracing::debug!("writing_manifest_toml");
write_manifest_to_disc(paths, &manifest)?;
LocalPackages::from_manifest(&manifest).write_to_disc(paths)?;
let changes = PackageChanges::between_manifests(&old_manifest, &manifest);
telemetry.resolved_package_versions(&changes);
Ok(manifest)
}
/// Remove requirements and unneeded packages from manifest that are no longer present in config.
fn remove_extra_requirements(config: &PackageConfig, manifest: &mut Manifest) -> Result<()> {
// "extra requirements" are all packages that are requirements in the manifest, but no longer
// part of the gleam.toml config.
let is_extra_requirement = |name: &EcoString| {
!config.dev_dependencies.contains_key(name) && !config.dependencies.contains_key(name)
};
// If a requirement is also used as a dependency, we do not want to force-unlock it.
// If the dependents get deleted as well, this transitive dependency will be dropped.
let is_unlockable_requirement = |name: &EcoString| {
manifest
.packages
.iter()
.all(|p| !p.requirements.contains(name))
};
let extra_requirements = manifest
.requirements
.keys()
.filter(|&name| is_extra_requirement(name) && is_unlockable_requirement(name))
.cloned()
.collect::<Vec<_>>();
manifest
.requirements
.retain(|name, _| !is_extra_requirement(name));
// Unlock all packages that we we want to remove - this removes them and all unneeded
// dependencies from `locked`.
let mut locked = config.locked(Some(manifest))?;
unlock_packages(&mut locked, extra_requirements.as_slice(), Some(manifest))?;
// Remove all unlocked packages from the manifest - these are truly no longer needed.
manifest
.packages
.retain(|package| locked.contains_key(&package.name));
Ok(())
}
pub fn parse_gleam_add_specifier(package: &str) -> Result<(EcoString, Requirement)> {
let Some((package, version)) = package.split_once('@') else {
// Default to the latest version available.
return Ok((
package.into(),
Requirement::hex(">= 0.0.0").expect("'>= 0.0.0' should be a valid pubgrub range"),
));
};
// Parse the major and minor from the provided semantic version.
let parts = version.split('.').collect::<Vec<_>>();
let major = match parts.first() {
Some(major) => Ok(major),
None => Err(Error::InvalidVersionFormat {
input: package.to_string(),
error: "Failed to parse semantic major version".to_string(),
}),
}?;
let minor = match parts.get(1) {
Some(minor) => minor,
None => "0",
};
// Using the major version specifier, calculate the maximum
// allowable version (i.e., the next major version).
let Ok(num) = major.parse::<usize>() else {
return Err(Error::InvalidVersionFormat {
input: version.to_string(),
error: "Failed to parse semantic major version as integer".to_string(),
});
};
let max_ver = [&(num + 1).to_string(), "0", "0"].join(".");
// Pad the provided version specifier with zeros map to a Hex version.
let requirement = match parts.len() {
1 | 2 => {
let min_ver = [major, minor, "0"].join(".");
Requirement::hex(&[">=", &min_ver, "and", "<", &max_ver].join(" "))
}
3 => Requirement::hex(version),
n => {
return Err(Error::InvalidVersionFormat {
input: version.to_string(),
error: format!(
"Expected up to 3 numbers in version specifier (MAJOR.MINOR.PATCH), found {n}"
),
});
}
}?;
Ok((package.into(), requirement))
}
pub fn resolve_and_download<Telem: Telemetry>(
paths: &ProjectPaths,
telemetry: Telem,
new_package: Option<(Vec<(EcoString, Requirement)>, bool)>,
packages_to_update: Vec<EcoString>,
config: DependencyManagerConfig,
) -> Result<Manifest> {
// Start event loop so we can run async functions to call the Hex API
let runtime = tokio::runtime::Runtime::new().expect("Unable to start Tokio async runtime");
let package_fetcher = PackageFetcher::new(runtime.handle().clone());
let dependency_manager = config.into_dependency_manager(
runtime.handle().clone(),
package_fetcher,
telemetry,
Mode::Dev,
);
dependency_manager.resolve_and_download_versions(paths, new_package, packages_to_update)
}
fn format_versions_and_extract_longest_parts(
versions: dependency::PackageVersionDiffs,
) -> Vec<Vec<String>> {
versions
.iter()
.map(|(name, (v1, v2))| vec![name.to_string(), v1.to_string(), v2.to_string()])
.sorted()
.collect_vec()
}
fn pretty_print_major_versions_available(versions: dependency::PackageVersionDiffs) -> String {
let versions = format_versions_and_extract_longest_parts(versions);
format!(
"\nThe following dependencies have new major versions available:\n\n{}",
space_table(&["Package", "Current", "Latest"], &versions)
)
}
fn pretty_print_version_updates(versions: dependency::PackageVersionDiffs) -> EcoString {
let versions = format_versions_and_extract_longest_parts(versions);
space_table(&["Package", "Current", "Latest"], &versions)
}
async fn add_missing_packages<Telem: Telemetry>(
paths: &ProjectPaths,
fs: Box<ProjectIO>,
manifest: &Manifest,
local: &LocalPackages,
project_name: EcoString,
telemetry: &Telem,
) -> Result<(), Error> {
let missing_packages = local.missing_local_packages(manifest, &project_name);
let mut num_to_download = 0;
let missing_git_packages = missing_packages
.iter()
.copied()
.filter(|package| package.is_git())
.inspect(|_| {
num_to_download += 1;
})
.collect_vec();
let mut missing_hex_packages = missing_packages
.iter()
.copied()
.filter(|package| package.is_hex())
.inspect(|_| {
num_to_download += 1;
})
.peekable();
// If we need to download at-least one package
if missing_hex_packages.peek().is_some() || !missing_git_packages.is_empty() {
let http = HttpClient::boxed();
let downloader = hex::Downloader::new(fs.clone(), fs, http, Untar::boxed(), paths.clone());
let start = Instant::now();
telemetry.downloading_package("packages");
downloader
.download_hex_packages(missing_hex_packages, &project_name)
.await?;
for package in missing_git_packages {
let ManifestPackageSource::Git { repo, commit } = &package.source else {
continue;
};
let _ = download_git_package(&package.name, repo, commit, paths)?;
}
telemetry.packages_downloaded(start, num_to_download);
}
Ok(())
}
fn remove_extra_packages<Telem: Telemetry>(
paths: &ProjectPaths,
local: &LocalPackages,
manifest: &Manifest,
telemetry: &Telem,
) -> Result<()> {
let _guard = BuildLock::lock_all_build(paths, telemetry)?;
for (package_name, version) in local.extra_local_packages(manifest) {
// TODO: test
// Delete the package source
let path = paths.build_packages_package(&package_name);
if path.exists() {
tracing::debug!(package=%package_name, version=%version, "removing_unneeded_package");
fs::delete_directory(&path)?;
}
// TODO: test
// Delete any build artefacts for the package
for mode in Mode::iter() {
for target in Target::iter() {
let name = manifest
.packages
.iter()
.find(|p| p.name == package_name)
.map(|p| p.application_name().as_str())
.unwrap_or(package_name.as_str());
let path = paths.build_directory_for_package(mode, target, name);
if path.exists() {
tracing::debug!(package=%package_name, version=%version, "deleting_build_cache");
fs::delete_directory(&path)?;
}
}
}
}
Ok(())
}
fn read_manifest_from_disc(paths: &ProjectPaths) -> Result<Manifest> {
tracing::debug!("reading_manifest_toml");
let manifest_path = paths.manifest();
let toml = fs::read(&manifest_path)?;
let manifest = toml::from_str(&toml).map_err(|e| Error::FileIo {
action: FileIoAction::Parse,
kind: FileKind::File,
path: manifest_path.clone(),
err: Some(e.to_string()),
})?;
Ok(manifest)
}
fn write_manifest_to_disc(paths: &ProjectPaths, manifest: &Manifest) -> Result<()> {
let path = paths.manifest();
fs::write(&path, &manifest.to_toml(paths.root()))
}
// This is the container for locally pinned packages, representing the current contents of
// the `project/build/packages` directory.
// For descriptions of packages provided by paths and git deps, see the ProvidedPackage struct.
// The same package may appear in both at different times.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
struct LocalPackages {
packages: HashMap<String, Version>,
}
impl LocalPackages {
pub fn extra_local_packages(&self, manifest: &Manifest) -> Vec<(String, Version)> {
let manifest_packages: HashSet<_> = manifest
.packages
.iter()
.map(|p| (&p.name, &p.version))
.collect();
self.packages
.iter()
.filter(|(n, v)| !manifest_packages.contains(&(&EcoString::from(*n), v)))
.map(|(n, v)| (n.clone(), v.clone()))
.collect()
}
pub fn missing_local_packages<'a>(
&self,
manifest: &'a Manifest,
root: &str,
) -> Vec<&'a ManifestPackage> {
manifest
.packages
.iter()
// We don't need to download the root package
.filter(|p| p.name != root)
// We don't need to download local packages because we use the linked source directly
.filter(|p| !p.is_local())
// We don't need to download packages which we have the correct version of
.filter(|p| self.packages.get(p.name.as_str()) != Some(&p.version))
.collect()
}
pub fn read_from_disc(paths: &ProjectPaths) -> Result<Self> {
let path = paths.build_packages_toml();
if !path.exists() {
return Ok(Self {
packages: HashMap::new(),
});
}
let toml = fs::read(&path)?;
toml::from_str(&toml).map_err(|e| Error::FileIo {
action: FileIoAction::Parse,
kind: FileKind::File,
path: path.clone(),
err: Some(e.to_string()),
})
}
pub fn write_to_disc(&self, paths: &ProjectPaths) -> Result<()> {
let path = paths.build_packages_toml();
let toml = toml::to_string(&self).expect("packages.toml serialization");
fs::write(&path, &toml)
}
pub fn from_manifest(manifest: &Manifest) -> Self {
Self {
packages: manifest
.packages
.iter()
.map(|p| (p.name.to_string(), p.version.clone()))
.collect(),
}
}
}
fn is_same_requirements(
requirements1: &HashMap<EcoString, Requirement>,
requirements2: &HashMap<EcoString, Requirement>,
root_path: &Utf8Path,
) -> Result<bool> {
if requirements1.len() != requirements2.len() {
return Ok(false);
}
for (key, requirement1) in requirements1 {
if !same_requirements(requirement1, requirements2.get(key), root_path)? {
return Ok(false);
}
}
Ok(true)
}
fn same_requirements(
requirement1: &Requirement,
requirement2: Option<&Requirement>,
root_path: &Utf8Path,
) -> Result<bool> {
let (left, right) = match (requirement1, requirement2) {
(Requirement::Path { path: path1 }, Some(Requirement::Path { path: path2 })) => {
let left = fs::canonicalise(&root_path.join(path1))?;
let right = fs::canonicalise(&root_path.join(path2))?;
(left, right)
}
(_, Some(requirement2)) => return Ok(requirement1 == requirement2),
(_, None) => return Ok(false),
};
Ok(left == right)
}
#[derive(Clone, Eq, PartialEq, Debug)]
struct ProvidedPackage {
version: Version,
source: ProvidedPackageSource,
requirements: HashMap<EcoString, hexpm::version::Range>,
}
#[derive(Clone, Eq, Debug)]
enum ProvidedPackageSource {
Git { repo: EcoString, commit: EcoString },
Local { path: Utf8PathBuf },
}
impl ProvidedPackage {
fn to_hex_package(&self, name: &EcoString) -> hexpm::Package {
let requirements = self
.requirements
.iter()
.map(|(name, version)| {
(
name.as_str().into(),
hexpm::Dependency {
requirement: version.clone(),
optional: false,
app: None,
repository: None,
},
)
})
.collect();
let release = hexpm::Release {
version: self.version.clone(),
requirements,
retirement_status: None,
outer_checksum: vec![],
meta: (),
};
hexpm::Package {
name: name.as_str().into(),
repository: "local".into(),
releases: vec![release],
}
}
fn to_manifest_package(&self, name: &str) -> ManifestPackage {
let mut package = ManifestPackage {
name: name.into(),
version: self.version.clone(),
otp_app: None, // Note, this will probably need to be set to something eventually
build_tools: vec!["gleam".into()],
requirements: self.requirements.keys().cloned().collect(),
source: self.source.to_manifest_package_source(),
};
package.requirements.sort();
package
}
}
impl ProvidedPackageSource {
fn to_manifest_package_source(&self) -> ManifestPackageSource {
match self {
Self::Git { repo, commit } => ManifestPackageSource::Git {
repo: repo.clone(),
commit: commit.clone(),
},
Self::Local { path } => ManifestPackageSource::Local { path: path.clone() },
}
}
fn to_toml(&self) -> String {
match self {
Self::Git { repo, commit } => {
format!(r#"{{ repo: "{repo}", commit: "{commit}" }}"#)
}
Self::Local { path } => {
format!(r#"{{ path: "{path}" }}"#)
}
}
}
}
impl PartialEq for ProvidedPackageSource {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Self::Local { path: own_path }, Self::Local { path: other_path }) => {
is_same_file(own_path, other_path).unwrap_or(false)
}
(
Self::Git {
repo: own_repo,
commit: own_commit,
},
Self::Git {
repo: other_repo,
commit: other_commit,
},
) => own_repo == other_repo && own_commit == other_commit,
(Self::Git { .. }, Self::Local { .. }) | (Self::Local { .. }, Self::Git { .. }) => {
false
}
}
}
}
/// Provide a package from a local project
fn provide_local_package(
package_name: EcoString,
package_path: &Utf8Path,
parent_path: &Utf8Path,
project_paths: &ProjectPaths,
provided: &mut HashMap<EcoString, ProvidedPackage>,
parents: &mut Vec<EcoString>,
) -> Result<hexpm::version::Range> {
let package_path = if package_path.is_absolute() {
package_path.to_path_buf()
} else {
fs::canonicalise(&parent_path.join(package_path))?
};
let package_source = ProvidedPackageSource::Local {
path: package_path.clone(),
};
provide_package(
package_name,
package_path,
package_source,
project_paths,
provided,
parents,
)
}
fn execute_command(command: &mut Command) -> Result<std::process::Output> {
let result = command.output();
match result {
Ok(output) if output.status.success() => Ok(output),
Ok(output) => {
let reason = match String::from_utf8(output.stderr) {
Ok(stderr) => ShellCommandFailureReason::ShellCommandError(stderr),
Err(_) => ShellCommandFailureReason::Unknown,
};
Err(Error::ShellCommand {
program: "git".into(),
reason,
})
}
Err(error) => Err(match error.kind() {
ErrorKind::NotFound => Error::ShellProgramNotFound {
program: "git".into(),
os: fs::get_os(),
},
other => Error::ShellCommand {
program: "git".into(),
reason: ShellCommandFailureReason::IoError(other),
},
}),
}
}
/// Downloads a git package from a remote repository. The commands that are run
/// looks like this:
///
/// ```sh
/// git init
/// git remote remove origin
/// git remote add origin <repo>
/// git fetch origin
/// git checkout <ref>
/// git rev-parse HEAD
/// ```
///
/// This is somewhat inefficient as we have to fetch the entire git history before
/// switching to the exact commit we want. There a few alternatives to this:
///
/// - `git clone --depth 1 --branch="<ref>"` This works, but only allows us to use
/// branch names as refs, however we want to allow commit hashes as well.
/// - `git fetch --depth 1 origin <ref>` Similarly, this imposes an unwanted
/// restriction. `git fetch` only allows branch names or full commit hashes,
/// but we want to allow partial hashes as well.
///
/// Since Git dependencies will be used quite rarely, this option was settled upon
/// because it allows branch names, full and partial commit hashes as refs.
///
/// In the future we can optimise this more, for example first checking if we
/// are already checked out to the commit stored in the manifest, or by only
/// fetching the history without the objects to resolve partial commit hashes.
/// For now though this is good enough until it become an actual performance
/// problem.
///
fn download_git_package(
package_name: &str,
repo: &str,
ref_: &str,
project_paths: &ProjectPaths,
) -> Result<EcoString> {
let package_path = project_paths.build_packages_package(package_name);
// If the package path exists but is not inside a git work tree, we need to
// remove the directory because running `git init` in a non-empty directory
// followed by `git checkout ...` is an error. See
// https://github.com/gleam-lang/gleam/issues/4488 for details.
if !fs::is_git_work_tree_root(&package_path) {
fs::delete_directory(&package_path)?;
}
fs::mkdir(&package_path)?;
let _ = execute_command(Command::new("git").arg("init").current_dir(&package_path))?;
// If this directory already exists, but the remote URL has been edited in
// `gleam.toml` without a `gleam clean`, `git remote add` will fail, causing
// the remote to be stuck as the original value. Here we remove the remote
// first, which ensures that `git remote add` properly add the remote each
// time. If this fails, that means we haven't set the remote in the first
// place, so we can safely ignore the error.
let _ = Command::new("git")
.arg("remote")
.arg("remove")
.arg("origin")
.current_dir(&package_path)
.output();
let _ = execute_command(
Command::new("git")
.arg("remote")
.arg("add")
.arg("origin")
.arg(repo)
.current_dir(&package_path),
)?;
let _ = execute_command(
Command::new("git")
.arg("fetch")
.arg("origin")
.current_dir(&package_path),
)?;
let _ = execute_command(
Command::new("git")
.arg("checkout")
.arg(ref_)
.current_dir(&package_path),
)?;
let output = execute_command(
Command::new("git")
.arg("rev-parse")
.arg("HEAD")
.current_dir(&package_path),
)?;
let commit = String::from_utf8(output.stdout)
.expect("Output should be UTF-8")
.trim()
.into();
Ok(commit)
}
/// Provide a package from a git repository
fn provide_git_package(
package_name: EcoString,
repo: &str,
// A git ref, such as a branch name, commit hash or tag name
ref_: &str,
project_paths: &ProjectPaths,
provided: &mut HashMap<EcoString, ProvidedPackage>,
parents: &mut Vec<EcoString>,
) -> Result<hexpm::version::Range> {
let commit = download_git_package(&package_name, repo, ref_, project_paths)?;
let package_source = ProvidedPackageSource::Git {
repo: repo.into(),
commit,
};
let package_path = fs::canonicalise(&project_paths.build_packages_package(&package_name))?;
provide_package(
package_name,
package_path,
package_source,
project_paths,
provided,
parents,
)
}
/// Adds a gleam project located at a specific path to the list of "provided packages"
fn provide_package(
package_name: EcoString,
package_path: Utf8PathBuf,
package_source: ProvidedPackageSource,
project_paths: &ProjectPaths,
provided: &mut HashMap<EcoString, ProvidedPackage>,
parents: &mut Vec<EcoString>,
) -> Result<hexpm::version::Range> {
// Return early if a package cycle is detected
if parents.contains(&package_name) {
let mut last_cycle = parents
.split(|p| p == &package_name)
.next_back()
.unwrap_or_default()
.to_vec();
last_cycle.push(package_name);
return Err(Error::PackageCycle {
packages: last_cycle,
});
}
// Check that we do not have a cached version of this package already
match provided.get(&package_name) {
Some(package) if package.source == package_source => {
// This package has already been provided from this source, return the version
let version = hexpm::version::Range::new(format!("== {}", &package.version))
.expect("== {version} should be a valid range");
return Ok(version);
}
Some(package) => {
// This package has already been provided from a different source which conflicts
return Err(Error::ProvidedDependencyConflict {
package: package_name.into(),
source_1: package_source.to_toml(),
source_2: package.source.to_toml(),
});
}
None => (),
}
// Load the package
let config = crate::config::read(package_path.join("gleam.toml"))?;
// Check that we are loading the correct project
if config.name != package_name {
return Err(Error::WrongDependencyProvided {
expected: package_name.into(),
path: package_path.to_path_buf(),
found: config.name.into(),
});
};
// Walk the requirements of the package
let mut requirements = HashMap::new();
parents.push(package_name);
for (name, requirement) in config.dependencies.into_iter() {
let version = match requirement {
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | true |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/add.rs | compiler-cli/src/add.rs | use camino::{Utf8Path, Utf8PathBuf};
use gleam_core::{
Error, Result,
error::{FileIoAction, FileKind},
paths::ProjectPaths,
};
use crate::{
cli,
dependencies::{self, parse_gleam_add_specifier},
fs,
};
pub fn command(paths: &ProjectPaths, packages_to_add: Vec<String>, dev: bool) -> Result<()> {
let config = crate::config::root_config(paths)?;
if packages_to_add.iter().any(|name| name == &config.name) {
return Err(Error::CannotAddSelfAsDependency {
name: config.name.clone(),
});
}
let mut new_package_requirements = Vec::with_capacity(packages_to_add.len());
for specifier in packages_to_add {
new_package_requirements.push(parse_gleam_add_specifier(&specifier)?);
}
// Insert the new packages into the manifest and perform dependency
// resolution to determine suitable versions
let manifest = dependencies::resolve_and_download(
paths,
cli::Reporter::new(),
Some((new_package_requirements.clone(), dev)),
Vec::new(),
dependencies::DependencyManagerConfig {
use_manifest: dependencies::UseManifest::Yes,
check_major_versions: dependencies::CheckMajorVersions::No,
},
)?;
// Read gleam.toml and manifest.toml so we can insert new deps into it
let mut gleam_toml = read_toml_edit(&paths.root_config())?;
let mut manifest_toml = read_toml_edit(&paths.manifest())?;
// Insert the new deps
for (added_package, _) in new_package_requirements {
let added_package = added_package.to_string();
// Pull the selected version out of the new manifest so we know what it is
let version = &manifest
.packages
.iter()
.find(|package| package.name == *added_package)
.expect("Added package not found in resolved manifest")
.version;
tracing::info!(version=%version, "new_package_version_resolved");
// Produce a version requirement locked to the major version.
// i.e. if 1.2.3 is selected we want >= 1.2.3 and < 2.0.0
let range = format!(
">= {}.{}.{} and < {}.0.0",
version.major,
version.minor,
version.patch,
version.major + 1
);
// False positive. This package doesn't use the indexing API correctly.
#[allow(clippy::indexing_slicing)]
{
if dev {
if !gleam_toml.as_table().contains_key("dev-dependencies") {
gleam_toml["dev-dependencies"] = toml_edit::table();
}
gleam_toml["dev-dependencies"][&added_package] = toml_edit::value(range.clone());
} else {
if !gleam_toml.as_table().contains_key("dependencies") {
gleam_toml["dependencies"] = toml_edit::table();
}
gleam_toml["dependencies"][&added_package] = toml_edit::value(range.clone());
};
manifest_toml["requirements"][&added_package]["version"] = range.into();
}
}
// Write the updated config
fs::write(&paths.root_config(), &gleam_toml.to_string())?;
fs::write(&paths.manifest(), &manifest_toml.to_string())?;
Ok(())
}
fn read_toml_edit(name: &Utf8Path) -> Result<toml_edit::DocumentMut, Error> {
fs::read(name)?
.parse::<toml_edit::DocumentMut>()
.map_err(|e| Error::FileIo {
kind: FileKind::File,
action: FileIoAction::Parse,
path: Utf8PathBuf::from("gleam.toml"),
err: Some(e.to_string()),
})
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/docs.rs | compiler-cli/src/docs.rs | use std::{
collections::HashMap,
time::{Instant, SystemTime},
};
use camino::{Utf8Path, Utf8PathBuf};
use ecow::EcoString;
use crate::{cli, fs::ProjectIO, http::HttpClient};
use gleam_core::{
Result,
analyse::TargetSupport,
build::{Codegen, Compile, Mode, Options, Package, Target},
config::{DocsPage, PackageConfig},
docs::{Dependency, DependencyKind, DocContext},
error::Error,
hex,
io::HttpClient as _,
manifest::ManifestPackageSource,
paths::ProjectPaths,
type_,
};
pub fn remove(package: String, version: String) -> Result<()> {
let runtime = tokio::runtime::Runtime::new().expect("Unable to start Tokio async runtime");
let hex_config = hexpm::Config::new();
let api_key =
crate::hex::HexAuthentication::new(&runtime, hex_config.clone()).get_or_create_api_key()?;
let http = HttpClient::new();
// Remove docs from API
let request = hexpm::api_remove_docs_request(&package, &version, &api_key, &hex_config)
.map_err(Error::hex)?;
let response = runtime.block_on(http.send(request))?;
hexpm::api_remove_docs_response(response).map_err(Error::hex)?;
// Done!
println!("The docs for {package} {version} have been removed from HexDocs");
Ok(())
}
#[derive(Debug)]
pub struct BuildOptions {
/// Whether to open the docs after building.
pub open: bool,
pub target: Option<Target>,
}
pub fn build(paths: &ProjectPaths, options: BuildOptions) -> Result<()> {
let config = crate::config::root_config(paths)?;
// Reset the build directory so we know the state of the project
crate::fs::delete_directory(&paths.build_directory_for_target(Mode::Prod, config.target))?;
let out = paths.build_documentation_directory(&config.name);
let manifest = crate::build::download_dependencies(paths, cli::Reporter::new())?;
let dependencies = manifest
.packages
.iter()
.map(|package| {
(
package.name.clone(),
Dependency {
version: package.version.clone(),
kind: match &package.source {
ManifestPackageSource::Hex { .. } => DependencyKind::Hex,
ManifestPackageSource::Git { .. } => DependencyKind::Git,
ManifestPackageSource::Local { .. } => DependencyKind::Path,
},
},
)
})
.collect();
let mut built = crate::build::main(
paths,
Options {
mode: Mode::Prod,
target: options.target,
codegen: Codegen::All,
compile: Compile::All,
warnings_as_errors: false,
root_target_support: TargetSupport::Enforced,
no_print_progress: false,
},
manifest,
)?;
let outputs = build_documentation(
paths,
&config,
dependencies,
&mut built.root_package,
DocContext::Build,
&built.module_interfaces,
)?;
// Write
crate::fs::delete_directory(&out)?;
crate::fs::write_outputs_under(&outputs, &out)?;
let index_html = out.join("index.html");
println!(
"\nThe documentation for {package} has been rendered to \n{index_html}",
package = config.name,
index_html = index_html
);
if options.open {
open_docs(&index_html)?;
}
// We're done!
Ok(())
}
/// Opens the indicated path in the default program configured by the system.
///
/// For the docs this will generally be a browser (unless some other program is
/// configured as the default for `.html` files).
fn open_docs(path: &Utf8Path) -> Result<()> {
opener::open(path).map_err(|error| Error::FailedToOpenDocs {
path: path.to_path_buf(),
error: error.to_string(),
})?;
Ok(())
}
pub(crate) fn build_documentation(
paths: &ProjectPaths,
config: &PackageConfig,
dependencies: HashMap<EcoString, Dependency>,
compiled: &mut Package,
is_hex_publish: DocContext,
cached_modules: &im::HashMap<EcoString, type_::ModuleInterface>,
) -> Result<Vec<gleam_core::io::OutputFile>, Error> {
compiled.attach_doc_and_module_comments();
cli::print_generating_documentation();
let mut pages = vec![DocsPage {
title: "README".into(),
path: "index.html".into(),
source: paths.readme(), // TODO: support non markdown READMEs. Or a default if there is none.
}];
pages.extend(config.documentation.pages.iter().cloned());
let mut outputs = gleam_core::docs::generate_html(
paths,
gleam_core::docs::DocumentationConfig {
package_config: config,
dependencies,
analysed: compiled.modules.as_slice(),
docs_pages: &pages,
rendering_timestamp: SystemTime::now(),
context: is_hex_publish,
},
ProjectIO::new(),
);
outputs.push(gleam_core::docs::generate_json_package_interface(
Utf8PathBuf::from("package-interface.json"),
compiled,
cached_modules,
));
Ok(outputs)
}
pub fn publish(paths: &ProjectPaths) -> Result<()> {
let config = crate::config::root_config(paths)?;
let runtime = tokio::runtime::Runtime::new().expect("Unable to start Tokio async runtime");
let hex_config = hexpm::Config::new();
let api_key =
crate::hex::HexAuthentication::new(&runtime, hex_config.clone()).get_or_create_api_key()?;
// Reset the build directory so we know the state of the project
crate::fs::delete_directory(&paths.build_directory_for_target(Mode::Prod, config.target))?;
let manifest = crate::build::download_dependencies(paths, cli::Reporter::new())?;
let dependencies = manifest
.packages
.iter()
.map(|package| {
(
package.name.clone(),
Dependency {
version: package.version.clone(),
kind: match &package.source {
ManifestPackageSource::Hex { .. } => DependencyKind::Hex,
ManifestPackageSource::Git { .. } => DependencyKind::Git,
ManifestPackageSource::Local { .. } => DependencyKind::Path,
},
},
)
})
.collect();
let mut built = crate::build::main(
paths,
Options {
root_target_support: TargetSupport::Enforced,
warnings_as_errors: false,
codegen: Codegen::All,
compile: Compile::All,
mode: Mode::Prod,
target: None,
no_print_progress: false,
},
manifest,
)?;
let outputs = build_documentation(
paths,
&config,
dependencies,
&mut built.root_package,
DocContext::HexPublish,
&built.module_interfaces,
)?;
let archive = crate::fs::create_tar_archive(outputs)?;
let start = Instant::now();
cli::print_publishing_documentation();
runtime.block_on(hex::publish_documentation(
&config.name,
&config.version,
archive,
&api_key,
&hex_config,
&HttpClient::new(),
))?;
cli::print_published(start.elapsed());
Ok(())
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/remove.rs | compiler-cli/src/remove.rs | use gleam_core::{
Error, Result,
error::{FileIoAction, FileKind},
paths::ProjectPaths,
};
use crate::{cli, fs};
pub fn command(paths: &ProjectPaths, packages: Vec<String>) -> Result<()> {
// Read gleam.toml so we can remove deps from it
let root_config = paths.root_config();
let mut toml = fs::read(&root_config)?
.parse::<toml_edit::DocumentMut>()
.map_err(|e| Error::FileIo {
kind: FileKind::File,
action: FileIoAction::Parse,
path: root_config.to_path_buf(),
err: Some(e.to_string()),
})?;
// Remove the specified dependencies
let mut packages_not_exist = vec![];
for package_to_remove in packages.iter() {
#[allow(clippy::indexing_slicing)]
let maybe_removed_item = toml["dependencies"]
.as_table_like_mut()
.and_then(|deps| deps.remove(package_to_remove));
#[allow(clippy::indexing_slicing)]
let maybe_removed_dev_item = toml["dev-dependencies"]
.as_table_like_mut()
.and_then(|deps| deps.remove(package_to_remove));
if maybe_removed_item.or(maybe_removed_dev_item).is_none() {
packages_not_exist.push(package_to_remove.into());
}
}
if !packages_not_exist.is_empty() {
return Err(Error::RemovedPackagesNotExist {
packages: packages_not_exist,
});
}
// Write the updated config
fs::write(root_config.as_path(), &toml.to_string())?;
_ = crate::dependencies::cleanup(paths, cli::Reporter::new())?;
Ok(())
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/text_layout.rs | compiler-cli/src/text_layout.rs | use ecow::EcoString;
/// Generates a string delimeted table with 2 spaces between each column, columns padded with
/// enough spaces to be aligned, and hyphens under the headers (excluding the final column of each
/// row). Rows should have the right number of columns.
///
/// ## Example
///
/// ```txt
/// Package Current Latest
/// ------- ------- ------
/// wibble 1.4.0 1.4.1
/// wobble 1.0.1 2.3.0
/// ```
///
pub fn space_table<Grid, Row, Cell>(headers: &[impl AsRef<str>], data: Grid) -> EcoString
where
Grid: AsRef<[Row]>,
Row: AsRef<[Cell]>,
Cell: AsRef<str>,
{
let mut output = EcoString::new();
let mut column_widths: Vec<usize> =
headers.iter().map(|header| header.as_ref().len()).collect();
for row in data.as_ref() {
for (index, cell) in row.as_ref().iter().enumerate() {
if let Some(width) = column_widths.get_mut(index) {
let cell = cell.as_ref();
*width = (*width).max(cell.len());
}
}
}
for (index, (header, width)) in headers.iter().zip(column_widths.iter()).enumerate() {
if index > 0 {
output.push_str(" ");
}
let header = header.as_ref();
output.push_str(header);
if index < headers.len() - 1 {
let padding = width - header.len();
if padding > 0 {
output.push_str(&" ".repeat(padding));
}
}
}
output.push('\n');
for (index, (header, width)) in headers.iter().zip(column_widths.iter()).enumerate() {
if index > 0 {
output.push_str(" ");
}
let header = header.as_ref();
output.push_str(&"-".repeat(header.len()));
if index < headers.len() - 1 {
let padding = width - header.len();
if padding > 0 {
output.push_str(&" ".repeat(padding));
}
}
}
output.push('\n');
for row in data.as_ref() {
for (index, (cell, width)) in row.as_ref().iter().zip(column_widths.iter()).enumerate() {
if index > 0 {
output.push_str(" ");
}
let cell = cell.as_ref();
output.push_str(cell);
if index < headers.len() - 1 {
let padding = width - cell.len();
if padding > 0 {
output.push_str(&" ".repeat(padding));
}
}
}
output.push('\n');
}
output
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/beam_compiler.rs | compiler-cli/src/beam_compiler.rs | use gleam_core::{
Result,
error::{Error, ShellCommandFailureReason},
io::{FileSystemWriter, Stdio},
paths,
};
use crate::fs::get_os;
use std::{
collections::HashSet,
io::{self, BufRead, BufReader, Write},
process::{Child, ChildStdin, ChildStdout},
};
use camino::{Utf8Path, Utf8PathBuf};
use itertools::Itertools;
#[derive(Debug)]
struct BeamCompilerInner {
process: Child,
stdin: ChildStdin,
stdout: BufReader<ChildStdout>,
}
#[derive(Debug, Default)]
pub struct BeamCompiler {
inner: Option<BeamCompilerInner>,
}
impl BeamCompiler {
pub fn compile<IO: FileSystemWriter>(
&mut self,
io: &IO,
out: &Utf8Path,
lib: &Utf8Path,
modules: &HashSet<Utf8PathBuf>,
stdio: Stdio,
) -> Result<Vec<String>, Error> {
let inner = match self.inner {
Some(ref mut inner) => match inner.process.try_wait() {
Ok(None) => inner,
_ => self.inner.insert(self.spawn(io, out)?),
},
None => self.inner.insert(self.spawn(io, out)?),
};
let args = format!(
"{{\"{}\", \"{}\", [\"{}\"]}}",
escape_path(lib),
escape_path(out.join("ebin")),
modules
.iter()
.map(|module| escape_path(out.join(paths::ARTEFACT_DIRECTORY_NAME).join(module)))
.join("\", \"")
);
tracing::debug!(args=?args, "call_beam_compiler");
writeln!(inner.stdin, "{args}.").map_err(|e| Error::ShellCommand {
program: "escript".into(),
reason: ShellCommandFailureReason::IoError(e.kind()),
})?;
let mut buf = String::new();
let mut accumulated_modules: Vec<String> = Vec::new();
while let (Ok(_), Ok(None)) = (inner.stdout.read_line(&mut buf), inner.process.try_wait()) {
match buf.trim() {
"gleam-compile-result-ok" => {
// Return Ok with the accumulated modules
return Ok(accumulated_modules);
}
"gleam-compile-result-error" => {
return Err(Error::ShellCommand {
program: "escript".into(),
reason: ShellCommandFailureReason::Unknown,
});
}
s if s.starts_with("gleam-compile-module:") => {
if let Some(module_content) = s.strip_prefix("gleam-compile-module:") {
accumulated_modules.push(module_content.to_string());
}
}
_ => match stdio {
Stdio::Inherit => print!("{buf}"),
Stdio::Null => {}
},
}
buf.clear()
}
// if we get here, stdout got closed before we got an "ok" or "err".
Err(Error::ShellCommand {
program: "escript".into(),
reason: ShellCommandFailureReason::Unknown,
})
}
fn spawn<IO: FileSystemWriter>(
&self,
io: &IO,
out: &Utf8Path,
) -> Result<BeamCompilerInner, Error> {
let escript_path = out
.join(paths::ARTEFACT_DIRECTORY_NAME)
.join("gleam@@compile.erl");
let escript_source = std::include_str!("../templates/gleam@@compile.erl");
io.write(&escript_path, escript_source)?;
tracing::trace!(escript_path=?escript_path, "spawn_beam_compiler");
let mut process = std::process::Command::new("escript")
.arg(escript_path)
.stdin(std::process::Stdio::piped())
.stdout(std::process::Stdio::piped())
.spawn()
.map_err(|e| match e.kind() {
io::ErrorKind::NotFound => Error::ShellProgramNotFound {
program: "escript".into(),
os: get_os(),
},
other => Error::ShellCommand {
program: "escript".into(),
reason: ShellCommandFailureReason::IoError(other),
},
})?;
let stdin = process.stdin.take().expect("could not get child stdin");
let stdout = process.stdout.take().expect("could not get child stdout");
Ok(BeamCompilerInner {
process,
stdin,
stdout: BufReader::new(stdout),
})
}
}
impl Drop for BeamCompiler {
fn drop(&mut self) {
if let Some(mut inner) = self.inner.take() {
// closing stdin will cause the erlang process to exit.
drop(inner.stdin);
let _ = inner.process.wait();
}
}
}
fn escape_path<T: AsRef<str>>(path: T) -> String {
path.as_ref().replace("\\", "\\\\")
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/new.rs | compiler-cli/src/new.rs | use camino::{Utf8Path, Utf8PathBuf};
use clap::ValueEnum;
use gleam_core::{
Result, erlang, error,
error::{Error, FileIoAction, FileKind, InvalidProjectNameReason},
parse,
};
use serde::{Deserialize, Serialize};
use std::fs::File;
use std::{env, io::Write};
use strum::{Display, EnumIter, EnumString, IntoEnumIterator, VariantNames};
#[cfg(test)]
mod tests;
use crate::{NewOptions, fs::get_current_directory};
const GLEAM_STDLIB_REQUIREMENT: &str = ">= 0.44.0 and < 2.0.0";
const GLEEUNIT_REQUIREMENT: &str = ">= 1.0.0 and < 2.0.0";
const ERLANG_OTP_VERSION: &str = "28";
const REBAR3_VERSION: &str = "3";
const ELIXIR_VERSION: &str = "1";
#[derive(
Debug, Serialize, Deserialize, Display, EnumString, VariantNames, ValueEnum, Clone, Copy,
)]
#[strum(serialize_all = "lowercase")]
#[clap(rename_all = "lower")]
pub enum Template {
#[clap(skip)]
Lib,
Erlang,
JavaScript,
}
#[derive(Debug)]
pub struct Creator {
root: Utf8PathBuf,
src: Utf8PathBuf,
test: Utf8PathBuf,
github: Utf8PathBuf,
workflows: Utf8PathBuf,
gleam_version: &'static str,
options: NewOptions,
project_name: String,
}
#[derive(EnumIter, PartialEq, Eq, Debug, Hash)]
enum FileToCreate {
Readme,
Gitignore,
SrcModule,
TestModule,
GleamToml,
GithubCi,
}
impl FileToCreate {
pub fn location(&self, creator: &Creator) -> Utf8PathBuf {
let project_name = &creator.project_name;
match self {
Self::Readme => creator.root.join(Utf8PathBuf::from("README.md")),
Self::Gitignore => creator.root.join(Utf8PathBuf::from(".gitignore")),
Self::SrcModule => creator
.src
.join(Utf8PathBuf::from(format!("{project_name}.gleam"))),
Self::TestModule => creator
.test
.join(Utf8PathBuf::from(format!("{project_name}_test.gleam"))),
Self::GleamToml => creator.root.join(Utf8PathBuf::from("gleam.toml")),
Self::GithubCi => creator.workflows.join(Utf8PathBuf::from("test.yml")),
}
}
pub fn contents(&self, creator: &Creator) -> Option<String> {
let project_name = &creator.project_name;
let skip_git = creator.options.skip_git;
let skip_github = creator.options.skip_github;
let gleam_version = creator.gleam_version;
let target = match creator.options.template {
Template::JavaScript => "target = \"javascript\"\n",
Template::Lib | Template::Erlang => "",
};
match self {
Self::Readme => Some(format!(
r#"# {project_name}
[](https://hex.pm/packages/{project_name})
[](https://hexdocs.pm/{project_name}/)
```sh
gleam add {project_name}@1
```
```gleam
import {project_name}
pub fn main() -> Nil {{
// TODO: An example of the project in use
}}
```
Further documentation can be found at <https://hexdocs.pm/{project_name}>.
## Development
```sh
gleam run # Run the project
gleam test # Run the tests
```
"#,
)),
Self::Gitignore if !skip_git => Some(
"*.beam
*.ez
/build
erl_crash.dump
"
.into(),
),
Self::SrcModule => Some(format!(
r#"import gleam/io
pub fn main() -> Nil {{
io.println("Hello from {project_name}!")
}}
"#,
)),
Self::TestModule => Some(
r#"import gleeunit
pub fn main() -> Nil {
gleeunit.main()
}
// gleeunit test functions end in `_test`
pub fn hello_world_test() {
let name = "Joe"
let greeting = "Hello, " <> name <> "!"
assert greeting == "Hello, Joe!"
}
"#
.into(),
),
Self::GleamToml => Some(format!(
r#"name = "{project_name}"
version = "1.0.0"
{target}
# Fill out these fields if you intend to generate HTML documentation or publish
# your project to the Hex package manager.
#
# description = ""
# licences = ["Apache-2.0"]
# repository = {{ type = "github", user = "", repo = "" }}
# links = [{{ title = "Website", href = "" }}]
#
# For a full reference of all the available options, you can have a look at
# https://gleam.run/writing-gleam/gleam-toml/.
[dependencies]
gleam_stdlib = "{GLEAM_STDLIB_REQUIREMENT}"
[dev-dependencies]
gleeunit = "{GLEEUNIT_REQUIREMENT}"
"#,
)),
Self::GithubCi if !skip_git && !skip_github => Some(format!(
r#"name: test
on:
push:
branches:
- master
- main
pull_request:
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
otp-version: "{ERLANG_OTP_VERSION}"
gleam-version: "{gleam_version}"
rebar3-version: "{REBAR3_VERSION}"
# elixir-version: "{ELIXIR_VERSION}"
- run: gleam deps download
- run: gleam test
- run: gleam format --check src test
"#,
)),
Self::GithubCi | Self::Gitignore => None,
}
}
}
impl Creator {
fn new(options: NewOptions, gleam_version: &'static str) -> Result<Self, Error> {
Self::new_with_confirmation(options, gleam_version, crate::cli::confirm)
}
fn new_with_confirmation(
mut options: NewOptions,
gleam_version: &'static str,
confirm: impl Fn(&str) -> Result<bool, Error>,
) -> Result<Self, Error> {
let name =
get_valid_project_name(options.name.as_deref(), &options.project_root, &confirm)?;
options.project_root = name.project_root(&options.project_root);
let root = get_current_directory()?.join(&options.project_root);
let src = root.join("src");
let test = root.join("test");
let github = root.join(".github");
let workflows = github.join("workflows");
let me = Self {
root: root.clone(),
src,
test,
github,
workflows,
gleam_version,
options,
project_name: name.decided().to_string(),
};
validate_root_folder(&me)?;
Ok(me)
}
fn run(&self) -> Result<()> {
crate::fs::mkdir(&self.root)?;
crate::fs::mkdir(&self.src)?;
crate::fs::mkdir(&self.test)?;
if !self.options.skip_git && !self.options.skip_github {
crate::fs::mkdir(&self.github)?;
crate::fs::mkdir(&self.workflows)?;
}
if !self.options.skip_git {
crate::fs::git_init(&self.root)?;
}
match self.options.template {
Template::Lib | Template::Erlang | Template::JavaScript => {
for file in FileToCreate::iter() {
let path = file.location(self);
if let Some(contents) = file.contents(self) {
write(path, &contents)?;
}
}
}
}
Ok(())
}
}
pub fn create(options: NewOptions, version: &'static str) -> Result<()> {
let creator = Creator::new(options.clone(), version)?;
creator.run()?;
let cd_folder = if options.project_root == "." {
"".into()
} else {
format!("\tcd {}\n", creator.options.project_root)
};
println!(
"Your Gleam project {} has been successfully created.
The project can be compiled and tested by running these commands:
{}\tgleam test
",
creator.project_name, cd_folder,
);
Ok(())
}
fn write(path: Utf8PathBuf, contents: &str) -> Result<()> {
let mut f = File::create(&path).map_err(|err| Error::FileIo {
kind: FileKind::File,
path: path.clone(),
action: FileIoAction::Create,
err: Some(err.to_string()),
})?;
f.write_all(contents.as_bytes())
.map_err(|err| Error::FileIo {
kind: FileKind::File,
path,
action: FileIoAction::WriteTo,
err: Some(err.to_string()),
})?;
Ok(())
}
fn validate_root_folder(creator: &Creator) -> Result<(), Error> {
let mut duplicate_files: Vec<Utf8PathBuf> = Vec::new();
for t in FileToCreate::iter() {
let full_path = t.location(creator);
let content = t.contents(creator);
if full_path.exists() && content.is_some() {
duplicate_files.push(full_path);
}
}
if !duplicate_files.is_empty() {
return Err(Error::OutputFilesAlreadyExist {
file_names: duplicate_files,
});
}
Ok(())
}
fn validate_name(name: &str) -> Result<(), Error> {
if name.starts_with("gleam_") {
Err(Error::InvalidProjectName {
name: name.to_string(),
reason: InvalidProjectNameReason::GleamPrefix,
})
} else if erlang::is_erlang_reserved_word(name) {
Err(Error::InvalidProjectName {
name: name.to_string(),
reason: InvalidProjectNameReason::ErlangReservedWord,
})
} else if erlang::is_erlang_standard_library_module(name) {
Err(Error::InvalidProjectName {
name: name.to_string(),
reason: InvalidProjectNameReason::ErlangStandardLibraryModule,
})
} else if parse::lexer::str_to_keyword(name).is_some() {
Err(Error::InvalidProjectName {
name: name.to_string(),
reason: InvalidProjectNameReason::GleamReservedWord,
})
} else if name == "gleam" {
Err(Error::InvalidProjectName {
name: name.to_string(),
reason: InvalidProjectNameReason::GleamReservedModule,
})
} else if regex::Regex::new("^[a-z][a-z0-9_]*$")
.expect("failed regex to match valid name format")
.is_match(name)
{
Ok(())
} else if regex::Regex::new("^[a-zA-Z][a-zA-Z0-9_]*$")
.expect("failed regex to match valid but non-lowercase name format")
.is_match(name)
{
Err(Error::InvalidProjectName {
name: name.to_string(),
reason: InvalidProjectNameReason::FormatNotLowercase,
})
} else {
Err(Error::InvalidProjectName {
name: name.to_string(),
reason: InvalidProjectNameReason::Format,
})
}
}
fn suggest_valid_name(invalid_name: &str, reason: &InvalidProjectNameReason) -> Option<String> {
match reason {
InvalidProjectNameReason::GleamPrefix => match invalid_name.strip_prefix("gleam_") {
Some(stripped) if invalid_name != "gleam_" => {
let suggestion = stripped.to_string();
match validate_name(&suggestion) {
Ok(_) => Some(suggestion),
Err(_) => None,
}
}
_ => None,
},
InvalidProjectNameReason::ErlangReservedWord => Some(format!("{invalid_name}_app")),
InvalidProjectNameReason::ErlangStandardLibraryModule => {
Some(format!("{invalid_name}_app"))
}
InvalidProjectNameReason::GleamReservedWord => Some(format!("{invalid_name}_app")),
InvalidProjectNameReason::GleamReservedModule => {
if invalid_name == "gleam" {
Some("app_gleam".into())
} else {
Some(format!("{invalid_name}_app"))
}
}
InvalidProjectNameReason::FormatNotLowercase => Some(invalid_name.to_lowercase()),
InvalidProjectNameReason::Format => {
let suggestion = regex::Regex::new(r"[^a-z0-9]")
.expect("failed regex to match any non-lowercase and non-alphanumeric characters")
.replace_all(&invalid_name.to_lowercase(), "_")
.to_string();
let suggestion = regex::Regex::new(r"_+")
.expect("failed regex to match consecutive underscores")
.replace_all(&suggestion, "_")
.to_string();
match validate_name(&suggestion) {
Ok(_) => Some(suggestion),
Err(_) => None,
}
}
}
}
fn get_valid_project_name(
provided_name: Option<&str>,
project_root: &str,
confirm: impl Fn(&str) -> Result<bool, Error>,
) -> Result<ProjectName, Error> {
let initial_name = match provided_name {
Some(name) => name.trim().to_string(),
None => get_foldername(project_root)?.trim().to_string(),
};
let invalid_reason = match validate_name(&initial_name) {
Ok(_) => {
return Ok(match provided_name {
Some(_) => ProjectName::Provided {
decided: initial_name,
},
None => ProjectName::Derived {
folder: initial_name.clone(),
decided: initial_name,
},
});
}
Err(Error::InvalidProjectName { reason, .. }) => reason,
Err(error) => return Err(error),
};
let suggested_name = match suggest_valid_name(&initial_name, &invalid_reason) {
Some(suggested_name) => suggested_name,
None => {
return Err(Error::InvalidProjectName {
name: initial_name,
reason: invalid_reason,
});
}
};
let prompt_for_suggested_name = error::format_invalid_project_name_error(
&initial_name,
&invalid_reason,
&Some(suggested_name.clone()),
);
if confirm(&prompt_for_suggested_name)? {
return Ok(match provided_name {
Some(_) => ProjectName::Provided {
decided: suggested_name,
},
None => ProjectName::Derived {
folder: initial_name,
decided: suggested_name,
},
});
}
Err(Error::InvalidProjectName {
name: initial_name,
reason: invalid_reason,
})
}
fn get_foldername(path: &str) -> Result<String, Error> {
match path {
"." => env::current_dir()
.expect("invalid folder")
.file_name()
.and_then(|x| x.to_str())
.map(ToString::to_string)
.ok_or(Error::UnableToFindProjectRoot {
path: path.to_string(),
}),
_ => Utf8Path::new(path)
.file_name()
.map(ToString::to_string)
.ok_or(Error::UnableToFindProjectRoot {
path: path.to_string(),
}),
}
}
#[derive(Debug, Clone)]
enum ProjectName {
Provided { decided: String },
Derived { folder: String, decided: String },
}
impl ProjectName {
fn decided(&self) -> &str {
match self {
Self::Provided { decided } | Self::Derived { decided, .. } => decided,
}
}
fn project_root(&self, current_root: &str) -> String {
match self {
Self::Provided { .. } => current_root.to_string(),
Self::Derived { folder, decided } => {
if current_root == "." || folder == decided {
return current_root.to_string();
}
// If the name was invalid and generated suggestion was accepted,
// align the directory path with the new name.
let original_root = Utf8Path::new(current_root);
let new_root = match original_root.parent() {
Some(parent) if !parent.as_str().is_empty() => parent.join(decided),
Some(_) | None => Utf8PathBuf::from(decided),
};
new_root.to_string()
}
}
}
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/hex/auth.rs | compiler-cli/src/hex/auth.rs | use crate::{cli, fs::ConsoleWarningEmitter, http::HttpClient};
use gleam_core::{
Error, Result, Warning, encryption, hex,
paths::global_hexpm_credentials_path,
warning::{DeprecatedEnvironmentVariable, WarningEmitter},
};
use std::{rc::Rc, time::SystemTime};
pub const USER_PROMPT: &str = "https://hex.pm username";
pub const USER_ENV_NAME: &str = "HEXPM_USER";
pub const PASS_PROMPT: &str = "https://hex.pm password";
pub const LOCAL_PASS_PROMPT: &str = "Local password";
pub const PASS_ENV_NAME: &str = "HEXPM_PASS";
pub const API_ENV_NAME: &str = "HEXPM_API_KEY";
#[derive(Debug)]
pub struct EncryptedApiKey {
pub name: String,
pub encrypted: String,
}
#[derive(Debug)]
pub struct UnencryptedApiKey {
pub unencrypted: String,
}
pub struct HexAuthentication<'runtime> {
runtime: &'runtime tokio::runtime::Runtime,
http: HttpClient,
local_password: Option<String>,
hex_config: hexpm::Config,
warnings: Vec<Warning>,
warning_emitter: WarningEmitter,
}
impl<'runtime> HexAuthentication<'runtime> {
/// Reads the stored API key from disc, if it exists.
///
pub fn new(runtime: &'runtime tokio::runtime::Runtime, hex_config: hexpm::Config) -> Self {
Self {
runtime,
http: HttpClient::new(),
local_password: None,
hex_config,
warnings: vec![],
warning_emitter: WarningEmitter::new(Rc::new(ConsoleWarningEmitter)),
}
}
/// Create a new API key, removing the previous one if it already exists.
///
pub fn create_and_store_api_key(&mut self) -> Result<UnencryptedApiKey> {
let name = generate_api_key_name();
let path = global_hexpm_credentials_path();
// Get login creds from user
let username = ask_username(&mut self.warnings)?;
let password = ask_password(&mut self.warnings)?;
// Get API key
let future = hex::create_api_key(&name, &username, &password, &self.hex_config, &self.http);
let api_key = self.runtime.block_on(future)?;
if self.local_password.is_none() {
println!(
"
Please enter a new unique password. This will be used to locally
encrypt your Hex API key.
"
);
}
let password = self.ask_local_password()?;
let encrypted = encryption::encrypt_with_passphrase(api_key.as_bytes(), &password)
.map_err(|e| Error::FailedToEncryptLocalHexApiKey {
detail: e.to_string(),
})?;
crate::fs::write(&path, &format!("{name}\n{encrypted}"))?;
println!("Encrypted Hex API key written to {path}");
Ok(UnencryptedApiKey {
unencrypted: api_key,
})
}
fn ask_local_password(&mut self) -> Result<String> {
if let Some(pw) = self.local_password.as_ref() {
return Ok(pw.clone());
}
let pw = ask_local_password(&mut self.warnings)?;
self.local_password = Some(pw.clone());
Ok(pw)
}
/// Get an API key from
/// 1. the HEXPM_API_KEY env var
/// 2. the file system (encrypted)
/// 3. the Hex API
pub fn get_or_create_api_key(&mut self) -> Result<String> {
if let Some(key) = Self::read_env_api_key()? {
return Ok(key);
}
if let Some(key) = self.read_and_decrypt_stored_api_key()? {
return Ok(key.unencrypted);
}
Ok(self.create_and_store_api_key()?.unencrypted)
}
fn read_env_api_key() -> Result<Option<String>> {
let api_key = std::env::var(API_ENV_NAME).unwrap_or_default();
if api_key.trim().is_empty() {
Ok(None)
} else {
Ok(Some(api_key))
}
}
fn read_and_decrypt_stored_api_key(&mut self) -> Result<Option<UnencryptedApiKey>> {
let Some(EncryptedApiKey { encrypted, .. }) = self.read_stored_api_key()? else {
return Ok(None);
};
let password = self.ask_local_password()?;
let unencrypted = encryption::decrypt_with_passphrase(encrypted.as_bytes(), &password)
.map_err(|e| Error::FailedToDecryptLocalHexApiKey {
detail: e.to_string(),
})?;
Ok(Some(UnencryptedApiKey { unencrypted }))
}
pub fn read_stored_api_key(&self) -> Result<Option<EncryptedApiKey>> {
let path = global_hexpm_credentials_path();
if !path.exists() {
return Ok(None);
}
let text = crate::fs::read(&path)?;
let mut chunks = text.splitn(2, '\n');
let Some(name) = chunks.next() else {
return Ok(None);
};
let Some(encrypted) = chunks.next() else {
return Ok(None);
};
Ok(Some(EncryptedApiKey {
name: name.to_string(),
encrypted: encrypted.to_string(),
}))
}
}
impl Drop for HexAuthentication<'_> {
fn drop(&mut self) {
while let Some(warning) = self.warnings.pop() {
self.warning_emitter.emit(warning);
}
}
}
fn ask_local_password(warnings: &mut Vec<Warning>) -> std::result::Result<String, Error> {
std::env::var(PASS_ENV_NAME)
.inspect(|_| {
warnings.push(Warning::DeprecatedEnvironmentVariable {
variable: DeprecatedEnvironmentVariable::HexpmPass,
})
})
.or_else(|_| cli::ask_password(LOCAL_PASS_PROMPT))
}
fn ask_password(warnings: &mut Vec<Warning>) -> std::result::Result<String, Error> {
std::env::var(PASS_ENV_NAME)
.inspect(|_| {
warnings.push(Warning::DeprecatedEnvironmentVariable {
variable: DeprecatedEnvironmentVariable::HexpmPass,
})
})
.or_else(|_| cli::ask_password(PASS_PROMPT))
}
fn ask_username(warnings: &mut Vec<Warning>) -> std::result::Result<String, Error> {
std::env::var(USER_ENV_NAME)
.inspect(|_| {
warnings.push(Warning::DeprecatedEnvironmentVariable {
variable: DeprecatedEnvironmentVariable::HexpmUser,
})
})
.or_else(|_| cli::ask(USER_PROMPT))
}
pub fn generate_api_key_name() -> String {
let timestamp = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.expect("This function must only be called after January 1, 1970. Sorry time traveller!")
.as_secs();
let name = hostname::get()
.expect("Looking up hostname")
.to_string_lossy()
.to_string();
format!("{name}-{timestamp}")
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/new/tests.rs | compiler-cli/src/new/tests.rs | use std::path::PathBuf;
use camino::{Utf8Path, Utf8PathBuf};
use gleam_core::Error;
#[test]
fn new() {
let tmp = tempfile::tempdir().unwrap();
let path = Utf8PathBuf::from_path_buf(tmp.path().join("my_project")).expect("Non Utf8 Path");
let creator = super::Creator::new(
super::NewOptions {
project_root: path.to_string(),
template: super::Template::Erlang,
name: None,
skip_git: false,
skip_github: false,
},
"1.0.0-gleam",
)
.unwrap();
creator.run().unwrap();
assert!(path.join(".git").exists());
assert!(path.join("README.md").exists());
assert!(path.join("gleam.toml").exists());
assert!(path.join("src/my_project.gleam").exists());
assert!(path.join("test/my_project_test.gleam").exists());
assert!(path.join(".github/workflows/test.yml").exists());
let toml = crate::fs::read(path.join("gleam.toml")).unwrap();
assert!(toml.contains("name = \"my_project\""));
}
#[test]
fn new_with_default_template() {
let tmp = tempfile::tempdir().unwrap();
let path = Utf8PathBuf::from_path_buf(tmp.into_path()).expect("Non Utf8 Path");
let creator = super::Creator::new(
super::NewOptions {
project_root: path.join("my_project").to_string(),
template: super::Template::Erlang,
name: None,
skip_git: false,
skip_github: true,
},
"1.0.0-gleam",
)
.unwrap();
creator.run().unwrap();
insta::glob!(&path, "my_project/[^.]**/*.*", |file_path| {
if !file_path.is_dir() {
insta::assert_snapshot!(
crate::fs::read(
Utf8PathBuf::from_path_buf(file_path.to_path_buf()).expect("Non Utf8 Path"),
)
.unwrap()
);
}
});
}
#[test]
fn new_with_javascript_template() {
let tmp = tempfile::tempdir().unwrap();
let path = Utf8PathBuf::from_path_buf(tmp.into_path()).expect("Non Utf8 Path");
let creator = super::Creator::new(
super::NewOptions {
project_root: path.join("my_project").to_string(),
template: super::Template::JavaScript,
name: None,
skip_git: false,
skip_github: true,
},
"1.0.0-gleam",
)
.unwrap();
creator.run().unwrap();
insta::glob!(&path, "my_project/[^.]**/*.*", |file_path| {
if !file_path.is_dir() {
insta::assert_snapshot!(
crate::fs::read(
Utf8PathBuf::from_path_buf(file_path.to_path_buf()).expect("Non Utf8 Path"),
)
.unwrap()
);
}
});
}
#[test]
fn new_with_skip_git() {
let tmp = tempfile::tempdir().unwrap();
let path = Utf8PathBuf::from_path_buf(tmp.path().join("my_project")).expect("Non Utf8 Path");
let creator = super::Creator::new(
super::NewOptions {
project_root: path.to_string(),
template: super::Template::Erlang,
name: None,
skip_git: true,
skip_github: false,
},
"1.0.0-gleam",
)
.unwrap();
creator.run().unwrap();
assert!(!path.join(".git").exists());
assert!(!path.join(".github").exists());
}
#[test]
fn new_with_skip_github() {
let tmp = tempfile::tempdir().unwrap();
let path = Utf8PathBuf::from_path_buf(tmp.path().join("my_project")).expect("Non Utf8 Path");
let creator = super::Creator::new(
super::NewOptions {
project_root: path.to_string(),
template: super::Template::Erlang,
name: None,
skip_git: false,
skip_github: true,
},
"1.0.0-gleam",
)
.unwrap();
creator.run().unwrap();
assert!(path.join(".git").exists());
assert!(!path.join(".github").exists());
assert!(!path.join(".github/workflows/test.yml").exists());
}
#[test]
fn new_with_skip_git_and_github() {
let tmp = tempfile::tempdir().unwrap();
let path = Utf8PathBuf::from_path_buf(tmp.path().join("my_project")).expect("Non Utf8 Path");
let creator = super::Creator::new(
super::NewOptions {
project_root: path.to_string(),
template: super::Template::Erlang,
name: None,
skip_git: true,
skip_github: true,
},
"1.0.0-gleam",
)
.unwrap();
creator.run().unwrap();
assert!(!path.join(".git").exists());
assert!(!path.join(".github").exists());
assert!(!path.join(".github/workflows/test.yml").exists());
}
#[test]
fn invalid_path() {
let tmp = tempfile::tempdir().unwrap();
let path = Utf8PathBuf::from_path_buf(tmp.path().join("-------")).expect("Non Utf8 Path");
assert!(
super::Creator::new(
super::NewOptions {
project_root: path.to_string(),
template: super::Template::Erlang,
name: None,
skip_git: false,
skip_github: false,
},
"1.0.0-gleam",
)
.is_err()
);
}
#[test]
fn invalid_name() {
let tmp = tempfile::tempdir().unwrap();
let path = Utf8PathBuf::from_path_buf(tmp.path().join("projec")).expect("Non Utf8 Path");
assert!(
super::Creator::new(
super::NewOptions {
project_root: path.to_string(),
template: super::Template::Erlang,
name: Some("-".into()),
skip_git: false,
skip_github: false,
},
"1.0.0-gleam",
)
.is_err()
);
}
#[test]
fn existing_directory_no_files() {
let tmp = tempfile::tempdir().unwrap();
let path = Utf8PathBuf::from_path_buf(tmp.path().join("my_project")).expect("Non Utf8 Path");
crate::fs::mkdir(&path).unwrap();
let creator = super::Creator::new(
super::NewOptions {
project_root: path.to_string(),
template: super::Template::Erlang,
name: None,
skip_git: true,
skip_github: true,
},
"1.0.0-gleam",
)
.unwrap();
creator.run().unwrap();
assert!(path.join("README.md").exists());
}
#[test]
fn existing_directory_with_one_existing_file() {
let tmp = tempfile::tempdir().unwrap();
let path = Utf8PathBuf::from_path_buf(tmp.path().join("my_project")).expect("Non Utf8 Path");
crate::fs::mkdir(&path).unwrap();
let _ = std::fs::File::create(PathBuf::from(&path).join("README.md")).unwrap();
let _ = std::fs::File::create(PathBuf::from(&path).join("my_project.gleam")).unwrap();
assert!(
super::Creator::new(
super::NewOptions {
project_root: path.to_string(),
template: super::Template::Erlang,
name: None,
skip_git: true,
skip_github: true,
},
"1.0.0-gleam",
)
.is_err()
);
}
#[test]
fn existing_directory_with_non_generated_file() {
let tmp = tempfile::tempdir().unwrap();
let path = Utf8PathBuf::from_path_buf(tmp.path().join("my_project")).expect("Non Utf8 Path");
crate::fs::mkdir(&path).unwrap();
let file_path = PathBuf::from(&path).join("some_fake_thing_that_is_not_generated.md");
let _ = std::fs::File::create(file_path).unwrap();
let creator = super::Creator::new(
super::NewOptions {
project_root: path.to_string(),
template: super::Template::Erlang,
name: None,
skip_git: true,
skip_github: true,
},
"1.0.0-gleam",
)
.unwrap();
creator.run().unwrap();
assert!(path.join("README.md").exists());
assert!(
path.join("some_fake_thing_that_is_not_generated.md")
.exists()
);
}
#[test]
fn conflict_with_existing_files() {
let tmp = tempfile::tempdir().unwrap();
let path = Utf8PathBuf::from_path_buf(tmp.path().join("my_project")).expect("Non Utf8 Path");
crate::fs::mkdir(&path).unwrap();
let _ = std::fs::File::create(PathBuf::from(&path).join("README.md")).unwrap();
assert_eq!(
super::Creator::new(
super::NewOptions {
project_root: path.to_string(),
template: super::Template::Erlang,
name: None,
skip_git: true,
skip_github: true,
},
"1.0.0-gleam",
)
.err(),
Some(Error::OutputFilesAlreadyExist {
file_names: vec![path.join("README.md")]
})
);
}
#[test]
fn skip_existing_git_files_when_skip_git_is_true() {
let tmp = tempfile::tempdir().unwrap();
let path = Utf8PathBuf::from_path_buf(tmp.path().join("my_project")).expect("Non Utf8 Path");
crate::fs::mkdir(&path).unwrap();
let file_path = PathBuf::from(&path).join(".gitignore");
let _ = std::fs::File::create(file_path).unwrap();
let creator = super::Creator::new(
super::NewOptions {
project_root: path.to_string(),
template: super::Template::Erlang,
name: None,
skip_git: true,
skip_github: true,
},
"1.0.0-gleam",
)
.unwrap();
creator.run().unwrap();
assert!(path.join("README.md").exists());
assert!(path.join(".gitignore").exists());
}
#[test]
fn suggested_project_name_updates_directory() {
let tmp = tempfile::tempdir().unwrap();
let base = Utf8PathBuf::from_path_buf(tmp.path().to_path_buf()).expect("Non Utf8 Path");
let original_root = base.join("gleam_testproject");
let creator = super::Creator::new_with_confirmation(
super::NewOptions {
project_root: original_root.to_string(),
template: super::Template::Erlang,
name: None,
skip_git: true,
skip_github: true,
},
"1.0.0-gleam",
|_| Ok::<bool, Error>(true),
)
.unwrap();
let expected_root = base.join("testproject");
assert_eq!(creator.project_name, "testproject");
assert_eq!(
Utf8Path::new(&creator.options.project_root),
expected_root.as_path()
);
creator.run().unwrap();
assert!(expected_root.exists());
assert!(!original_root.exists());
}
#[test]
fn validate_name_format() {
assert!(crate::new::validate_name("project").is_ok());
assert!(crate::new::validate_name("project_name").is_ok());
assert!(crate::new::validate_name("project2").is_ok());
let invalid = ["Project", "PROJECT", "Project_Name"];
for name in invalid {
assert!(matches!(
crate::new::validate_name(name),
Err(Error::InvalidProjectName {
name: _,
reason: crate::new::InvalidProjectNameReason::FormatNotLowercase
})
));
}
let invalid = ["0project", "_project", "project-name"];
for name in invalid {
assert!(matches!(
crate::new::validate_name(name),
Err(Error::InvalidProjectName {
name: _,
reason: crate::new::InvalidProjectNameReason::Format
})
));
}
}
#[test]
fn suggest_valid_names() {
assert_eq!(
crate::new::suggest_valid_name(
"gleam_",
&crate::new::InvalidProjectNameReason::GleamPrefix
),
None
);
assert_eq!(
crate::new::suggest_valid_name(
"gleam_project",
&crate::new::InvalidProjectNameReason::GleamPrefix
),
Some("project".to_string())
);
assert_eq!(
crate::new::suggest_valid_name(
"try",
&crate::new::InvalidProjectNameReason::ErlangReservedWord
),
Some("try_app".to_string())
);
assert_eq!(
crate::new::suggest_valid_name(
"erl_eval",
&crate::new::InvalidProjectNameReason::ErlangStandardLibraryModule
),
Some("erl_eval_app".to_string())
);
assert_eq!(
crate::new::suggest_valid_name(
"assert",
&crate::new::InvalidProjectNameReason::GleamReservedWord
),
Some("assert_app".to_string())
);
assert_eq!(
crate::new::suggest_valid_name(
"gleam",
&crate::new::InvalidProjectNameReason::GleamReservedModule
),
Some("app_gleam".to_string())
);
assert_eq!(
crate::new::suggest_valid_name(
"Project_Name",
&crate::new::InvalidProjectNameReason::FormatNotLowercase
),
Some("project_name".to_string())
);
assert_eq!(
crate::new::suggest_valid_name(
"Pr0ject-n4me!",
&crate::new::InvalidProjectNameReason::Format
),
Some("pr0ject_n4me_".to_string())
);
assert_eq!(
crate::new::suggest_valid_name(
"Pr0ject--n4me!",
&crate::new::InvalidProjectNameReason::Format
),
Some("pr0ject_n4me_".to_string())
);
assert_eq!(
crate::new::suggest_valid_name(
"_pr0ject-name",
&crate::new::InvalidProjectNameReason::Format
),
None
);
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/dependencies/tests.rs | compiler-cli/src/dependencies/tests.rs | use std::collections::HashMap;
use camino::{Utf8Path, Utf8PathBuf};
use ecow::EcoString;
use hexpm::version::Version;
use pretty_assertions::assert_eq;
use gleam_core::{
Error,
build::Runtime,
config::{DenoConfig, DenoFlag, Docs, ErlangConfig, JavaScriptConfig},
manifest::{Base16Checksum, Manifest, ManifestPackage, ManifestPackageSource},
requirement::Requirement,
};
use crate::dependencies::*;
#[test]
fn list_manifest_format() {
let mut buffer = vec![];
let manifest = Manifest {
requirements: HashMap::new(),
packages: vec![
ManifestPackage {
name: "root".into(),
version: Version::parse("1.0.0").unwrap(),
build_tools: ["gleam".into()].into(),
otp_app: None,
requirements: vec![],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![1, 2, 3, 4]),
},
},
ManifestPackage {
name: "aaa".into(),
version: Version::new(0, 4, 2),
build_tools: ["rebar3".into(), "make".into()].into(),
otp_app: Some("aaa_app".into()),
requirements: vec!["zzz".into(), "gleam_stdlib".into()],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![3, 22]),
},
},
ManifestPackage {
name: "zzz".into(),
version: Version::new(0, 4, 0),
build_tools: ["mix".into()].into(),
otp_app: None,
requirements: vec![],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![3, 22]),
},
},
],
};
list_manifest_packages(&mut buffer, manifest).unwrap();
assert_eq!(
std::str::from_utf8(&buffer).unwrap(),
"Package Version
------- -------
root 1.0.0
aaa 0.4.2
zzz 0.4.0
"
)
}
#[test]
fn tree_format() {
let mut buffer = vec![];
let manifest = Manifest {
requirements: HashMap::new(),
packages: vec![
ManifestPackage {
name: "deps_proj".into(),
version: Version::parse("1.0.0").unwrap(),
build_tools: [].into(),
otp_app: None,
requirements: vec!["gleam_regexp".into(), "gleam_stdlib".into()],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![1, 2, 3, 4]),
},
},
ManifestPackage {
name: "gleam_stdlib".into(),
version: Version::new(0, 52, 0),
build_tools: ["rebar3".into(), "make".into()].into(),
otp_app: Some("aaa_app".into()),
requirements: vec![],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![3, 22]),
},
},
ManifestPackage {
name: "gleam_regexp".into(),
version: Version::new(1, 0, 0),
build_tools: ["mix".into()].into(),
otp_app: None,
requirements: vec!["gleam_stdlib".into()],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![3, 22]),
},
},
],
};
let options = TreeOptions {
package: None,
invert: None,
};
let root_package_name = EcoString::from("deps_proj");
list_package_and_dependencies_tree(
&mut buffer,
options,
manifest.packages.clone(),
root_package_name,
)
.unwrap();
assert_eq!(
std::str::from_utf8(&buffer).unwrap(),
r#"deps_proj v1.0.0
├── gleam_regexp v1.0.0
│ └── gleam_stdlib v0.52.0
└── gleam_stdlib v0.52.0
"#
)
}
#[test]
fn tree_package_format() {
let mut buffer = vec![];
let manifest = Manifest {
requirements: HashMap::new(),
packages: vec![
ManifestPackage {
name: "gleam_stdlib".into(),
version: Version::new(0, 52, 0),
build_tools: ["rebar3".into(), "make".into()].into(),
otp_app: Some("aaa_app".into()),
requirements: vec![],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![3, 22]),
},
},
ManifestPackage {
name: "deps_proj".into(),
version: Version::parse("1.0.0").unwrap(),
build_tools: [].into(),
otp_app: None,
requirements: vec!["gleam_stdlib".into(), "gleam_regexp".into()],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![1, 2, 3, 4]),
},
},
ManifestPackage {
name: "gleam_regexp".into(),
version: Version::new(1, 0, 0),
build_tools: ["mix".into()].into(),
otp_app: None,
requirements: vec!["gleam_stdlib".into()],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![3, 22]),
},
},
],
};
let options = TreeOptions {
package: Some("gleam_regexp".to_string()),
invert: None,
};
let root_package_name = EcoString::from("deps_proj");
list_package_and_dependencies_tree(
&mut buffer,
options,
manifest.packages.clone(),
root_package_name,
)
.unwrap();
assert_eq!(
std::str::from_utf8(&buffer).unwrap(),
r#"gleam_regexp v1.0.0
└── gleam_stdlib v0.52.0
"#
)
}
#[test]
fn tree_invert_format() {
let mut buffer = vec![];
let manifest = Manifest {
requirements: HashMap::new(),
packages: vec![
ManifestPackage {
name: "gleam_stdlib".into(),
version: Version::new(0, 52, 0),
build_tools: ["rebar3".into(), "make".into()].into(),
otp_app: Some("aaa_app".into()),
requirements: vec![],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![3, 22]),
},
},
ManifestPackage {
name: "deps_proj".into(),
version: Version::parse("1.0.0").unwrap(),
build_tools: [].into(),
otp_app: None,
requirements: vec!["gleam_stdlib".into(), "gleam_regexp".into()],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![1, 2, 3, 4]),
},
},
ManifestPackage {
name: "gleam_regexp".into(),
version: Version::new(1, 0, 0),
build_tools: ["mix".into()].into(),
otp_app: None,
requirements: vec!["gleam_stdlib".into()],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![3, 22]),
},
},
],
};
let options = TreeOptions {
package: None,
invert: Some("gleam_stdlib".to_string()),
};
let root_package_name = EcoString::from("deps_proj");
list_package_and_dependencies_tree(
&mut buffer,
options,
manifest.packages.clone(),
root_package_name,
)
.unwrap();
assert_eq!(
std::str::from_utf8(&buffer).unwrap(),
r#"gleam_stdlib v0.52.0
├── deps_proj v1.0.0
└── gleam_regexp v1.0.0
└── deps_proj v1.0.0
"#
)
}
#[test]
fn list_tree_invalid_package_format() {
let mut buffer = vec![];
let manifest = Manifest {
requirements: HashMap::new(),
packages: vec![
ManifestPackage {
name: "gleam_stdlib".into(),
version: Version::new(0, 52, 0),
build_tools: ["rebar3".into(), "make".into()].into(),
otp_app: Some("aaa_app".into()),
requirements: vec![],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![3, 22]),
},
},
ManifestPackage {
name: "gleam_regexp".into(),
version: Version::new(1, 0, 0),
build_tools: ["mix".into()].into(),
otp_app: None,
requirements: vec!["gleam_stdlib".into()],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![3, 22]),
},
},
ManifestPackage {
name: "root".into(),
version: Version::parse("1.0.0").unwrap(),
build_tools: [].into(),
otp_app: None,
requirements: vec!["gleam_regexp".into(), "gleam_stdlib".into()],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![1, 2, 3, 4]),
},
},
],
};
let options = TreeOptions {
package: Some("zzzzzz".to_string()),
invert: None,
};
let root_package_name = EcoString::from("deps_proj");
list_package_and_dependencies_tree(
&mut buffer,
options,
manifest.packages.clone(),
root_package_name,
)
.unwrap();
assert_eq!(
std::str::from_utf8(&buffer).unwrap(),
r#"Package not found. Please check the package name.
"#
)
}
#[test]
fn parse_gleam_add_specifier_invalid_semver() {
assert!(parse_gleam_add_specifier("some_package@1.2.3.4").is_err());
}
#[test]
fn parse_gleam_add_specifier_non_numeric_version() {
assert!(parse_gleam_add_specifier("some_package@not_a_version").is_err());
}
#[test]
fn parse_gleam_add_specifier_default() {
let provided = "some_package";
let expected = Requirement::hex(">= 0.0.0").unwrap();
let (package, version) = parse_gleam_add_specifier(provided).unwrap();
assert_eq!(version, expected);
assert_eq!("some_package", package);
}
#[test]
fn parse_gleam_add_specifier_major_only() {
let provided = "wobble@1";
let expected = Requirement::hex(">= 1.0.0 and < 2.0.0").unwrap();
let (package, version) = parse_gleam_add_specifier(provided).unwrap();
assert_eq!(version, expected);
assert_eq!("wobble", package);
}
#[test]
fn parse_gleam_add_specifier_major_and_minor() {
let provided = "wibble@1.2";
let expected = Requirement::hex(">= 1.2.0 and < 2.0.0").unwrap();
let (package, version) = parse_gleam_add_specifier(provided).unwrap();
assert_eq!(version, expected);
assert_eq!("wibble", package);
}
#[test]
fn parse_gleam_add_specifier_major_minor_and_patch() {
let provided = "bobble@1.2.3";
let expected = Requirement::hex("1.2.3").unwrap();
let (package, version) = parse_gleam_add_specifier(provided).unwrap();
assert_eq!(version, expected);
assert_eq!("bobble", package);
}
#[test]
fn missing_local_packages() {
let manifest = Manifest {
requirements: HashMap::new(),
packages: vec![
ManifestPackage {
name: "root".into(),
version: Version::parse("1.0.0").unwrap(),
build_tools: ["gleam".into()].into(),
otp_app: None,
requirements: vec![],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![1, 2, 3, 4]),
},
},
ManifestPackage {
name: "local1".into(),
version: Version::parse("1.0.0").unwrap(),
build_tools: ["gleam".into()].into(),
otp_app: None,
requirements: vec![],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![1, 2, 3, 4, 5]),
},
},
ManifestPackage {
name: "local2".into(),
version: Version::parse("3.0.0").unwrap(),
build_tools: ["gleam".into()].into(),
otp_app: None,
requirements: vec![],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![1, 2, 3, 4, 5]),
},
},
],
};
let mut extra = LocalPackages {
packages: [
("local2".into(), Version::parse("2.0.0").unwrap()),
("local3".into(), Version::parse("3.0.0").unwrap()),
]
.into(),
}
.missing_local_packages(&manifest, "root");
extra.sort();
assert_eq!(
extra,
[
&ManifestPackage {
name: "local1".into(),
version: Version::parse("1.0.0").unwrap(),
build_tools: ["gleam".into()].into(),
otp_app: None,
requirements: vec![],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![1, 2, 3, 4, 5]),
},
},
&ManifestPackage {
name: "local2".into(),
version: Version::parse("3.0.0").unwrap(),
build_tools: ["gleam".into()].into(),
otp_app: None,
requirements: vec![],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![1, 2, 3, 4, 5]),
},
},
]
)
}
#[test]
fn extra_local_packages() {
let mut extra = LocalPackages {
packages: [
("local1".into(), Version::parse("1.0.0").unwrap()),
("local2".into(), Version::parse("2.0.0").unwrap()),
("local3".into(), Version::parse("3.0.0").unwrap()),
]
.into(),
}
.extra_local_packages(&Manifest {
requirements: HashMap::new(),
packages: vec![
ManifestPackage {
name: "local1".into(),
version: Version::parse("1.0.0").unwrap(),
build_tools: ["gleam".into()].into(),
otp_app: None,
requirements: vec![],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![1, 2, 3, 4, 5]),
},
},
ManifestPackage {
name: "local2".into(),
version: Version::parse("3.0.0").unwrap(),
build_tools: ["gleam".into()].into(),
otp_app: None,
requirements: vec![],
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![4, 5]),
},
},
],
});
extra.sort();
assert_eq!(
extra,
[
("local2".into(), Version::new(2, 0, 0)),
("local3".into(), Version::new(3, 0, 0)),
]
)
}
#[test]
fn provide_wrong_package() {
let mut provided = HashMap::new();
let project_paths = crate::project_paths_at_current_directory_without_toml();
let result = provide_local_package(
"wrong_name".into(),
Utf8Path::new("./test/hello_world"),
Utf8Path::new("./"),
&project_paths,
&mut provided,
&mut vec!["root".into(), "subpackage".into()],
);
match result {
Err(Error::WrongDependencyProvided {
expected, found, ..
}) => {
assert_eq!(expected, "wrong_name");
assert_eq!(found, "hello_world");
}
_ => {
panic!("Expected WrongDependencyProvided error")
}
}
}
#[test]
fn provide_existing_package() {
let mut provided = HashMap::new();
let project_paths = crate::project_paths_at_current_directory_without_toml();
let result = provide_local_package(
"hello_world".into(),
Utf8Path::new("./test/hello_world"),
Utf8Path::new("./"),
&project_paths,
&mut provided,
&mut vec!["root".into(), "subpackage".into()],
);
assert_eq!(
result,
Ok(hexpm::version::Range::new("== 0.1.0".into()).unwrap())
);
let result = provide_local_package(
"hello_world".into(),
Utf8Path::new("./test/hello_world"),
Utf8Path::new("./"),
&project_paths,
&mut provided,
&mut vec!["root".into(), "subpackage".into()],
);
assert_eq!(
result,
Ok(hexpm::version::Range::new("== 0.1.0".into()).unwrap())
);
}
#[test]
fn provide_conflicting_package() {
let mut provided = HashMap::new();
let project_paths = crate::project_paths_at_current_directory_without_toml();
let result = provide_local_package(
"hello_world".into(),
Utf8Path::new("./test/hello_world"),
Utf8Path::new("./"),
&project_paths,
&mut provided,
&mut vec!["root".into(), "subpackage".into()],
);
assert_eq!(
result,
Ok(hexpm::version::Range::new("== 0.1.0".into()).unwrap())
);
let result = provide_package(
"hello_world".into(),
Utf8PathBuf::from("./test/other"),
ProvidedPackageSource::Local {
path: Utf8Path::new("./test/other").to_path_buf(),
},
&project_paths,
&mut provided,
&mut vec!["root".into(), "subpackage".into()],
);
match result {
Err(Error::ProvidedDependencyConflict { package, .. }) => {
assert_eq!(package, "hello_world");
}
_ => {
panic!("Expected ProvidedDependencyConflict error")
}
}
}
#[test]
fn provided_is_absolute() {
let mut provided = HashMap::new();
let project_paths = crate::project_paths_at_current_directory_without_toml();
let result = provide_local_package(
"hello_world".into(),
Utf8Path::new("./test/hello_world"),
Utf8Path::new("./"),
&project_paths,
&mut provided,
&mut vec!["root".into(), "subpackage".into()],
);
assert_eq!(
result,
Ok(hexpm::version::Range::new("== 0.1.0".into()).unwrap())
);
let package = provided.get("hello_world").unwrap().clone();
match package.source {
ProvidedPackageSource::Local { path } => {
assert!(path.is_absolute())
}
_ => {
panic!("Provide_local_package provided a package that is not local!")
}
}
}
#[test]
fn provided_recursive() {
let mut provided = HashMap::new();
let project_paths = crate::project_paths_at_current_directory_without_toml();
let result = provide_local_package(
"hello_world".into(),
Utf8Path::new("./test/hello_world"),
Utf8Path::new("./"),
&project_paths,
&mut provided,
&mut vec!["root".into(), "hello_world".into(), "subpackage".into()],
);
assert_eq!(
result,
Err(Error::PackageCycle {
packages: vec!["subpackage".into(), "hello_world".into()],
})
)
}
#[test]
fn provided_local_to_hex() {
let provided_package = ProvidedPackage {
version: Version::new(1, 0, 0),
source: ProvidedPackageSource::Local {
path: "canonical/path/to/package".into(),
},
requirements: [
(
"req_1".into(),
hexpm::version::Range::new("~> 1.0.0".into()).unwrap(),
),
(
"req_2".into(),
hexpm::version::Range::new("== 1.0.0".into()).unwrap(),
),
]
.into(),
};
let hex_package = hexpm::Package {
name: "package".into(),
repository: "local".into(),
releases: vec![hexpm::Release {
version: Version::new(1, 0, 0),
retirement_status: None,
outer_checksum: vec![],
meta: (),
requirements: [
(
"req_1".into(),
hexpm::Dependency {
requirement: hexpm::version::Range::new("~> 1.0.0".into()).unwrap(),
optional: false,
app: None,
repository: None,
},
),
(
"req_2".into(),
hexpm::Dependency {
requirement: hexpm::version::Range::new("== 1.0.0".into()).unwrap(),
optional: false,
app: None,
repository: None,
},
),
]
.into(),
}],
};
assert_eq!(
provided_package.to_hex_package(&"package".into()),
hex_package
);
}
#[test]
fn provided_git_to_hex() {
let provided_package = ProvidedPackage {
version: Version::new(1, 0, 0),
source: ProvidedPackageSource::Git {
repo: "https://github.com/gleam-lang/gleam.git".into(),
commit: "bd9fe02f72250e6a136967917bcb1bdccaffa3c8".into(),
},
requirements: [
(
"req_1".into(),
hexpm::version::Range::new("~> 1.0.0".into()).unwrap(),
),
(
"req_2".into(),
hexpm::version::Range::new("== 1.0.0".into()).unwrap(),
),
]
.into(),
};
let hex_package = hexpm::Package {
name: "package".into(),
repository: "local".into(),
releases: vec![hexpm::Release {
version: Version::new(1, 0, 0),
retirement_status: None,
outer_checksum: vec![],
meta: (),
requirements: [
(
"req_1".into(),
hexpm::Dependency {
requirement: hexpm::version::Range::new("~> 1.0.0".into()).unwrap(),
optional: false,
app: None,
repository: None,
},
),
(
"req_2".into(),
hexpm::Dependency {
requirement: hexpm::version::Range::new("== 1.0.0".into()).unwrap(),
optional: false,
app: None,
repository: None,
},
),
]
.into(),
}],
};
assert_eq!(
provided_package.to_hex_package(&"package".into()),
hex_package
);
}
#[test]
fn provided_local_to_manifest() {
let provided_package = ProvidedPackage {
version: Version::new(1, 0, 0),
source: ProvidedPackageSource::Local {
path: "canonical/path/to/package".into(),
},
requirements: [
(
"req_1".into(),
hexpm::version::Range::new("~> 1.0.0".into()).unwrap(),
),
(
"req_2".into(),
hexpm::version::Range::new("== 1.0.0".into()).unwrap(),
),
]
.into(),
};
let manifest_package = ManifestPackage {
name: "package".into(),
version: Version::new(1, 0, 0),
otp_app: None,
build_tools: vec!["gleam".into()],
requirements: vec!["req_1".into(), "req_2".into()],
source: ManifestPackageSource::Local {
path: "canonical/path/to/package".into(),
},
};
assert_eq!(
provided_package.to_manifest_package("package"),
manifest_package
);
}
#[test]
fn provided_git_to_manifest() {
let provided_package = ProvidedPackage {
version: Version::new(1, 0, 0),
source: ProvidedPackageSource::Git {
repo: "https://github.com/gleam-lang/gleam.git".into(),
commit: "bd9fe02f72250e6a136967917bcb1bdccaffa3c8".into(),
},
requirements: [
(
"req_1".into(),
hexpm::version::Range::new("~> 1.0.0".into()).unwrap(),
),
(
"req_2".into(),
hexpm::version::Range::new("== 1.0.0".into()).unwrap(),
),
]
.into(),
};
let manifest_package = ManifestPackage {
name: "package".into(),
version: Version::new(1, 0, 0),
otp_app: None,
build_tools: vec!["gleam".into()],
requirements: vec!["req_1".into(), "req_2".into()],
source: ManifestPackageSource::Git {
repo: "https://github.com/gleam-lang/gleam.git".into(),
commit: "bd9fe02f72250e6a136967917bcb1bdccaffa3c8".into(),
},
};
assert_eq!(
provided_package.to_manifest_package("package"),
manifest_package
);
}
#[test]
fn verified_requirements_equality_with_canonicalized_paths() {
let temp_dir = tempfile::tempdir().expect("Failed to create a temp directory");
let temp_path = Utf8PathBuf::from_path_buf(temp_dir.path().to_path_buf())
.expect("Path should be valid UTF-8");
let sub_dir = temp_path.join("subdir");
std::fs::create_dir(&sub_dir).expect("Failed to create a subdir");
let file_path = sub_dir.join("file.txt");
fs::write(&file_path, "content").expect("Failed to write to file");
let canonical_path = std::fs::canonicalize(&file_path).expect("Failed to canonicalize path");
let relative_path = temp_path.join("./subdir/../subdir/./file.txt");
let requirements1 = HashMap::from([(
EcoString::from("dep1"),
Requirement::Path {
path: Utf8PathBuf::from(canonical_path.to_str().expect("Path should be valid UTF-8")),
},
)]);
let requirements2 = HashMap::from([(
EcoString::from("dep1"),
Requirement::Path {
path: Utf8PathBuf::from(relative_path.to_string()),
},
)]);
assert!(
is_same_requirements(&requirements1, &requirements2, &temp_path)
.expect("Requirements should be the same")
);
}
fn create_testable_unlock_manifest(
packages: Vec<(EcoString, Version, Vec<EcoString>)>,
requirements: Vec<(EcoString, EcoString)>,
) -> Manifest {
let manifest_packages = packages
.into_iter()
.map(|(name, version, requirements)| ManifestPackage {
name,
version,
build_tools: vec!["gleam".into()],
otp_app: None,
requirements,
source: ManifestPackageSource::Hex {
outer_checksum: Base16Checksum(vec![]),
},
})
.collect();
let root_requirements = requirements
.into_iter()
.map(|(name, range)| {
(
name,
Requirement::Hex {
version: hexpm::version::Range::new(range.into()).unwrap(),
},
)
})
.collect();
Manifest {
packages: manifest_packages,
requirements: root_requirements,
}
}
#[test]
fn test_unlock_package() {
let mut locked = HashMap::from([
("package_a".into(), Version::new(1, 0, 0)),
("package_b".into(), Version::new(2, 0, 0)),
("package_c".into(), Version::new(3, 0, 0)),
("package_d".into(), Version::new(4, 0, 0)),
]);
let packages = vec![
(
"package_a".into(),
Version::new(1, 0, 0),
vec!["package_b".into()],
),
(
"package_b".into(),
Version::new(2, 0, 0),
vec!["package_c".into()],
),
("package_c".into(), Version::new(3, 0, 0), vec![]),
("package_d".into(), Version::new(4, 0, 0), vec![]),
];
let manifest = create_testable_unlock_manifest(packages, Vec::new());
let packages_to_unlock = vec!["package_a".into()];
unlock_packages(&mut locked, &packages_to_unlock, Some(&manifest)).unwrap();
assert!(!locked.contains_key("package_a"));
assert!(!locked.contains_key("package_b"));
assert!(!locked.contains_key("package_c"));
assert!(locked.contains_key("package_d"));
}
#[test]
fn test_unlock_package_without_manifest() {
let mut locked = HashMap::from([
("package_a".into(), Version::new(1, 0, 0)),
("package_b".into(), Version::new(2, 0, 0)),
("package_c".into(), Version::new(3, 0, 0)),
]);
let packages_to_unlock = vec!["package_a".into()];
unlock_packages(&mut locked, &packages_to_unlock, None).unwrap();
assert!(!locked.contains_key("package_a"));
assert!(locked.contains_key("package_b"));
assert!(locked.contains_key("package_c"));
}
#[test]
fn test_unlock_nonexistent_package() {
let initial_locked = HashMap::from([
("package_a".into(), Version::new(1, 0, 0)),
("package_b".into(), Version::new(2, 0, 0)),
]);
let packages = vec![
(
"package_a".into(),
Version::new(1, 0, 0),
vec!["package_b".into()],
),
("package_b".into(), Version::new(2, 0, 0), vec![]),
];
let manifest = create_testable_unlock_manifest(packages, Vec::new());
let packages_to_unlock = vec!["nonexistent_package".into()];
let mut locked = initial_locked.clone();
unlock_packages(&mut locked, &packages_to_unlock, Some(&manifest)).unwrap();
assert_eq!(
initial_locked, locked,
"Locked packages should remain unchanged"
);
}
#[test]
fn test_unlock_multiple_packages() {
let mut locked = HashMap::from([
("package_a".into(), Version::new(1, 0, 0)),
("package_b".into(), Version::new(2, 0, 0)),
("package_c".into(), Version::new(3, 0, 0)),
("package_d".into(), Version::new(4, 0, 0)),
("package_e".into(), Version::new(5, 0, 0)),
]);
let packages = vec![
(
"package_a".into(),
Version::new(1, 0, 0),
vec!["package_b".into()],
),
(
"package_b".into(),
Version::new(2, 0, 0),
vec!["package_c".into()],
),
("package_c".into(), Version::new(3, 0, 0), vec![]),
(
"package_d".into(),
Version::new(4, 0, 0),
vec!["package_e".into()],
),
("package_e".into(), Version::new(5, 0, 0), vec![]),
];
let manifest = create_testable_unlock_manifest(packages, Vec::new());
let packages_to_unlock = vec!["package_a".into(), "package_d".into()];
unlock_packages(&mut locked, &packages_to_unlock, Some(&manifest)).unwrap();
assert!(!locked.contains_key("package_a"));
assert!(!locked.contains_key("package_b"));
assert!(!locked.contains_key("package_c"));
assert!(!locked.contains_key("package_d"));
assert!(!locked.contains_key("package_e"));
}
#[test]
fn test_unlock_packages_empty_input() {
let initial_locked = HashMap::from([
("package_a".into(), Version::new(1, 0, 0)),
("package_b".into(), Version::new(2, 0, 0)),
]);
let packages = vec![
(
"package_a".into(),
Version::new(1, 0, 0),
vec!["package_b".into()],
),
("package_b".into(), Version::new(2, 0, 0), vec![]),
];
let manifest = create_testable_unlock_manifest(packages, Vec::new());
let packages_to_unlock: Vec<EcoString> = vec![];
let mut locked = initial_locked.clone();
unlock_packages(&mut locked, &packages_to_unlock, Some(&manifest)).unwrap();
assert_eq!(
initial_locked, locked,
"Locked packages should remain unchanged when no packages are specified to unlock"
);
}
#[test]
fn test_unlock_package_preserve_shared_deps() {
let mut locked = HashMap::from([
("package_a".into(), Version::new(1, 0, 0)),
("package_b".into(), Version::new(2, 0, 0)),
("package_c".into(), Version::new(3, 0, 0)),
]);
let packages = vec![
(
"package_a".into(),
Version::new(1, 0, 0),
vec!["package_c".into()],
),
(
"package_b".into(),
Version::new(2, 0, 0),
vec!["package_c".into()],
),
("package_c".into(), Version::new(3, 0, 0), vec![]),
];
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | true |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/dependencies/dependency_manager.rs | compiler-cli/src/dependencies/dependency_manager.rs | use ecow::EcoString;
use futures::future;
use gleam_core::{
Error, Result,
build::{Mode, Telemetry},
config::PackageConfig,
dependency,
manifest::{Manifest, ManifestPackageSource, PackageChanges, Resolved},
paths::ProjectPaths,
requirement::Requirement,
};
use std::collections::HashMap;
use crate::{
build_lock::BuildLock,
dependencies::{pretty_print_major_versions_available, write_manifest_to_disc},
fs::ProjectIO,
};
use super::{
CheckMajorVersions, LocalPackages, UseManifest, add_missing_packages, is_same_requirements,
lookup_package, provide_git_package, provide_local_package, read_manifest_from_disc,
remove_extra_packages, unlock_packages,
};
pub struct DependencyManagerConfig {
// If `Yes` we read the manifest from disc. If not set then we ignore any
// manifest which will result in the latest versions of the dependency
// packages being resolved (not the locked ones).
pub use_manifest: UseManifest,
/// When set to `Yes`, the cli will check for major version updates of direct dependencies and
/// print them to the console if the major versions are not upgradeable due to constraints.
pub check_major_versions: CheckMajorVersions,
}
impl DependencyManagerConfig {
pub fn into_dependency_manager<Telem: Telemetry, P: dependency::PackageFetcher>(
self,
runtime: tokio::runtime::Handle,
package_fetcher: P,
telemetry: Telem,
mode: Mode,
) -> DependencyManager<Telem, P> {
DependencyManager {
runtime,
package_fetcher,
telemetry,
mode,
use_manifest: self.use_manifest,
check_major_versions: self.check_major_versions,
}
}
}
pub struct DependencyManager<Telem, P> {
runtime: tokio::runtime::Handle,
package_fetcher: P,
mode: Mode,
use_manifest: UseManifest,
telemetry: Telem,
check_major_versions: CheckMajorVersions,
}
impl<Telem, P> DependencyManager<Telem, P>
where
P: dependency::PackageFetcher,
Telem: Telemetry,
{
/// Resolve the dependency versions used by a package.
///
/// If the `use_manifest` configuration was set to `false` then it'll always resolve all the
/// versions, even if there are already versions locked in the manifest.
pub fn resolve_versions(
&self,
paths: &ProjectPaths,
config: &PackageConfig,
packages_to_update: Vec<EcoString>,
) -> Result<Resolved> {
// If there's no manifest then the only thing we can do is attempt to update the versions.
if !paths.manifest().exists() {
tracing::debug!("manifest_not_present");
let manifest = self.perform_version_resolution(paths, config, None, Vec::new())?;
return Ok(Resolved::all_added(manifest));
}
let existing_manifest = read_manifest_from_disc(paths)?;
// If we have been asked not to use the manifest then
let (requirements_changed, manifest_for_resolver) = match self.use_manifest {
UseManifest::No => (true, None),
UseManifest::Yes => {
let same_requirements = is_same_requirements(
&existing_manifest.requirements,
&config.all_direct_dependencies()?,
paths.root(),
)?;
// If the manifest is to be used and the requirements have not changed then there's
// no point in performing resolution, it'll always result in the same versions
// already specified in the manifest.
if packages_to_update.is_empty() && same_requirements {
return Ok(Resolved::no_change(existing_manifest));
}
// Otherwise, use the manifest to inform resolution.
(!same_requirements, Some(&existing_manifest))
}
};
tracing::debug!("manifest_outdated");
let new_manifest = self.perform_version_resolution(
paths,
config,
manifest_for_resolver,
packages_to_update,
)?;
let resolved = Resolved {
package_changes: PackageChanges::between_manifests(&existing_manifest, &new_manifest),
manifest: new_manifest,
requirements_changed,
};
Ok(resolved)
}
pub fn resolve_and_download_versions(
&self,
paths: &ProjectPaths,
new_package: Option<(Vec<(EcoString, Requirement)>, bool)>,
packages_to_update: Vec<EcoString>,
) -> Result<Manifest> {
let span = tracing::info_span!("download_deps");
let _enter = span.enter();
// We do this before acquiring the build lock so that we don't create the
// build directory if there is no gleam.toml
crate::config::ensure_config_exists(paths)?;
let lock = BuildLock::new_packages(paths)?;
let _guard = lock.lock(&self.telemetry);
let fs = ProjectIO::boxed();
// Read the project config
let mut config = crate::config::read(paths.root_config())?;
let project_name = config.name.clone();
// Insert the new packages to add, if it exists
if let Some((packages, dev)) = new_package {
for (package, requirement) in packages {
if dev {
_ = config.dev_dependencies.insert(package, requirement);
} else {
_ = config.dependencies.insert(package, requirement);
};
}
}
// Determine what versions we need
let resolved = self.resolve_versions(paths, &config, packages_to_update)?;
let local = LocalPackages::read_from_disc(paths)?;
// Remove any packages that are no longer required due to gleam.toml changes
remove_extra_packages(paths, &local, &resolved.manifest, &self.telemetry)?;
// Download them from Hex to the local cache
self.runtime.block_on(add_missing_packages(
paths,
fs,
&resolved.manifest,
&local,
project_name,
&self.telemetry,
))?;
if resolved.any_changes() {
// Record new state of the packages directory
// TODO: test
tracing::debug!("writing_manifest_toml");
write_manifest_to_disc(paths, &resolved.manifest)?;
}
LocalPackages::from_manifest(&resolved.manifest).write_to_disc(paths)?;
// Display the changes in versions to the user.
self.telemetry
.resolved_package_versions(&resolved.package_changes);
// If requested to do so, check if there are major upgrades that could be performed with
// more relaxed version requirements, and inform the user if so.
if let CheckMajorVersions::Yes = self.check_major_versions {
let major_versions_available = dependency::check_for_major_version_updates(
&resolved.manifest,
&self.package_fetcher,
);
if !major_versions_available.is_empty() {
eprintln!(
"{}",
pretty_print_major_versions_available(major_versions_available)
);
}
}
Ok(resolved.manifest)
}
fn perform_version_resolution(
&self,
project_paths: &ProjectPaths,
config: &PackageConfig,
manifest: Option<&Manifest>,
packages_to_update: Vec<EcoString>,
) -> Result<Manifest, Error> {
self.telemetry.resolving_package_versions();
let dependencies = config.dependencies_for(self.mode)?;
let mut locked = config.locked(manifest)?;
if !packages_to_update.is_empty() {
unlock_packages(&mut locked, &packages_to_update, manifest)?;
}
// Packages which are provided directly instead of downloaded from hex
let mut provided_packages = HashMap::new();
// The version requires of the current project
let mut root_requirements = HashMap::new();
// Populate the provided_packages and root_requirements maps
for (name, requirement) in dependencies.into_iter() {
let version = match requirement {
Requirement::Hex { version } => version,
Requirement::Path { path } => provide_local_package(
name.clone(),
&path,
project_paths.root(),
project_paths,
&mut provided_packages,
&mut vec![],
)?,
Requirement::Git { git, ref_ } => {
// If this package is locked and we already resolved a commit
// hash for it, we want to use that hash rather than pulling
// the latest commit.
let ref_to_use = if locked.contains_key(&name)
&& let Some(manifest) = manifest
&& let Some(package) = manifest
.packages
.iter()
.find(|package| package.name == name)
&& let ManifestPackageSource::Git { commit, .. } = &package.source
{
commit
} else {
// If the package is unlocked or we haven't resolved a version yet, we use
// the ref specified in `gleam.toml`.
&ref_
};
provide_git_package(
name.clone(),
&git,
ref_to_use,
project_paths,
&mut provided_packages,
&mut Vec::new(),
)?
}
};
let _ = root_requirements.insert(name, version);
}
// Convert provided packages into hex packages for pub-grub resolve
let provided_hex_packages = provided_packages
.iter()
.map(|(name, package)| (name.clone(), package.to_hex_package(name)))
.collect();
let resolved = dependency::resolve_versions(
&self.package_fetcher,
provided_hex_packages,
config.name.clone(),
root_requirements.into_iter(),
&locked,
)?;
// Convert the hex packages and local packages into manifest packages
let manifest_packages = self.runtime.block_on(future::try_join_all(
resolved
.into_iter()
.map(|(name, version)| lookup_package(name, version, &provided_packages)),
))?;
let manifest = Manifest {
packages: manifest_packages,
requirements: config.all_direct_dependencies()?,
};
Ok(manifest)
}
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/compiler-cli/src/fs/tests.rs | compiler-cli/src/fs/tests.rs | use camino::Utf8Path;
use itertools::Itertools;
#[test]
fn is_inside_git_work_tree_ok() {
let tmp_dir = tempfile::tempdir().unwrap();
let path = Utf8Path::from_path(tmp_dir.path()).expect("Non Utf-8 Path");
assert!(!super::is_inside_git_work_tree(path).unwrap());
assert_eq!(super::git_init(path), Ok(()));
assert!(super::is_inside_git_work_tree(path).unwrap())
}
#[test]
fn git_init_success() {
let tmp_dir = tempfile::tempdir().unwrap();
let path = Utf8Path::from_path(tmp_dir.path()).expect("Non Utf-8 Path");
let git = path.join(".git");
assert!(!git.exists());
assert_eq!(super::git_init(path), Ok(()));
assert!(git.exists());
}
#[test]
fn git_init_already_in_git() {
let tmp_dir = tempfile::tempdir().unwrap();
let git = Utf8Path::from_path(tmp_dir.path())
.expect("Non Utf-8 Path")
.join(".git");
assert!(!git.exists());
assert_eq!(
super::git_init(Utf8Path::from_path(tmp_dir.path()).expect("Non Utf-8 Path")),
Ok(())
);
assert!(git.exists());
let sub = Utf8Path::from_path(tmp_dir.path())
.expect("Non Utf-8 Path")
.join("subproject");
let git = sub.join(".git");
crate::fs::mkdir(&sub).unwrap();
assert!(!git.exists());
assert_eq!(super::git_init(&sub), Ok(()));
assert!(!git.exists());
}
#[test]
fn exclude_build_dir() {
/*
a
|-- gleam.toml
|-- build
| |-- f.gleam # do not count as gleam file
b
|-- build
| |-- f.gleam # count as gleam file
*/
let tmp_dir = tempfile::tempdir().unwrap();
let path = Utf8Path::from_path(tmp_dir.path()).expect("Non Utf-8 Path");
// excluded gleam file
{
let gleam_toml = path.join("a/gleam.toml").to_path_buf();
super::write(&gleam_toml, "").unwrap();
let gleam_file = path.join("a/build/f.gleam").to_path_buf();
super::write(&gleam_file, "").unwrap();
};
// included gleam file
let gleam_file = path.join("b/build/f.gleam").to_path_buf();
super::write(&gleam_file, "").unwrap();
let files = super::gleam_files(path).collect::<Vec<_>>();
assert_eq!(files, vec![gleam_file]);
}
#[test]
fn erlang_files_include_gitignored_files() {
let tmp_dir = tempfile::tempdir().unwrap();
let path = Utf8Path::from_path(tmp_dir.path()).expect("Non Utf-8 Path");
let included_files = &[
".hidden.erl",
"abc.erl",
"abc.hrl",
"build/include/abc.erl",
"build/include/abc.hrl",
"ignored.erl",
"ignored.hrl",
];
let excluded_files = &[
".gitignore",
"abc.gleam",
"abc.js",
"build/abc.gleam",
"build/abc.js",
];
let gitignore = "build/
ignored.*";
for &file in included_files.iter().chain(excluded_files) {
let contents = match file {
".gitignore" => gitignore,
_ => "",
};
super::write(&path.join(file), contents).unwrap();
}
let mut chosen_files = super::erlang_files(path).collect_vec();
chosen_files.sort_unstable();
let expected_files = included_files.iter().map(|s| path.join(s)).collect_vec();
assert_eq!(expected_files, chosen_files);
}
#[test]
fn is_gleam_path_test() {
assert!(super::is_gleam_path(
Utf8Path::new("/some-prefix/a.gleam"),
Utf8Path::new("/some-prefix/")
));
assert!(super::is_gleam_path(
Utf8Path::new("/some-prefix/one_two/a.gleam"),
Utf8Path::new("/some-prefix/")
));
assert!(super::is_gleam_path(
Utf8Path::new("/some-prefix/one_two/a123.gleam"),
Utf8Path::new("/some-prefix/")
));
assert!(super::is_gleam_path(
Utf8Path::new("/some-prefix/one_2/a123.gleam"),
Utf8Path::new("/some-prefix/")
));
}
#[test]
fn extract_distro_id_test() {
let os_release = "
PRETTY_NAME=\"Debian GNU/Linux 12 (bookworm)\"
NAME=\"Debian GNU/Linux\"
VERSION_ID=\"12\"
VERSION=\"12 (bookworm)\"
VERSION_CODENAME=bookworm
ID=debian
HOME_URL=\"https://www.debian.org/\"
";
assert_eq!(super::extract_distro_id(os_release.to_string()), "debian");
let os_release = "
VERSION_CODENAME=jammy
ID=ubuntu
ID_LIKE=debian
HOME_URL=\"https://www.ubuntu.com/\"
";
assert_eq!(super::extract_distro_id(os_release.to_string()), "ubuntu");
assert_eq!(super::extract_distro_id("".to_string()), "");
assert_eq!(super::extract_distro_id("\n".to_string()), "");
assert_eq!(super::extract_distro_id("ID=".to_string()), "");
assert_eq!(super::extract_distro_id("ID= ".to_string()), " ");
assert_eq!(
super::extract_distro_id("ID= space test ".to_string()),
" space test "
);
assert_eq!(super::extract_distro_id("id=ubuntu".to_string()), "");
assert_eq!(
super::extract_distro_id("NAME=\"Debian\"\nID=debian".to_string()),
"debian"
);
assert_eq!(
super::extract_distro_id("\n\nNAME=\n\n\nID=test123\n".to_string()),
"test123"
);
assert_eq!(
super::extract_distro_id("\nID=\"id first\"\nID=another_id".to_string()),
"id first"
);
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/test-output/src/lib.rs | test-output/src/lib.rs | #[cfg(test)]
mod tests;
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/test-output/src/tests.rs | test-output/src/tests.rs | #[cfg(test)]
mod echo;
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/test-output/src/tests/echo.rs | test-output/src/tests/echo.rs | use std::{io::Read, process::Stdio};
use camino::{Utf8Path, Utf8PathBuf};
use gleam_core::{
build::{Runtime, Target},
io::Command,
paths::ProjectPaths,
};
use gleam_cli::{
fs,
run::{self, Which},
};
fn run_and_produce_pretty_snapshot(
target: Option<Target>,
runtime: Option<Runtime>,
project_directory: Utf8PathBuf,
) -> String {
let project_root = fs::get_project_root(project_directory).expect("project root");
let paths = ProjectPaths::new(project_root);
let output = run_and_capture_output(&paths, "main", target, runtime)
// Since the echo output's contains a path we will replace the `\` with a `/`
// so that the snapshot doesn't fail on Windows in CI.
.replace("src\\", "src/");
let main_module_content =
fs::read(paths.src_directory().join("main.gleam")).expect("read main module");
format!(
"--- main.gleam ----------------------
{main_module_content}
--- gleam run output ----------------
{output}
"
)
}
fn run_and_capture_output(
paths: &ProjectPaths,
main_module: &str,
target: Option<Target>,
runtime: Option<Runtime>,
) -> String {
fs::delete_directory(&paths.build_directory()).expect("delete build directory content");
let Command {
program,
args,
env,
cwd: _,
stdio: _,
} = run::setup(
paths,
vec![],
target,
runtime,
Some(main_module.into()),
Which::Src,
true,
)
.expect("run setup");
let mut process = std::process::Command::new(&program)
.args(args)
.stdin(Stdio::null())
.stdout(Stdio::null())
.stderr(Stdio::piped())
.envs(env.iter().map(|pair| (&pair.0, &pair.1)))
.current_dir(paths.root())
.spawn()
.unwrap_or_else(|e| panic!("Failed to spawn process '{}': {}", &program, &e));
let mut stderr = process.stderr.take().expect("take stderr");
let mut output = String::new();
let _ = stderr.read_to_string(&mut output).expect("read stderr");
let _ = process.wait().expect("run with no errors");
output
}
macro_rules! assert_echo {
($project_name: expr) => {
let snapshot_name = snapshot_name(None, None, $project_name);
insta::allow_duplicates! {
assert_echo!(&snapshot_name, Some(Target::Erlang), None, $project_name);
assert_echo!(&snapshot_name, Some(Target::JavaScript), Some(Runtime::Bun), $project_name);
assert_echo!(&snapshot_name, Some(Target::JavaScript), Some(Runtime::Deno), $project_name);
assert_echo!(&snapshot_name, Some(Target::JavaScript), Some(Runtime::NodeJs), $project_name);
}
};
($target: expr, $project_name: expr) => {
let snapshot_name = snapshot_name(Some($target), None, $project_name);
match $target {
Target::JavaScript => insta::allow_duplicates! {
assert_echo!(&snapshot_name, Some($target), Some(Runtime::Bun), $project_name);
assert_echo!(&snapshot_name, Some($target), Some(Runtime::Deno), $project_name);
assert_echo!(&snapshot_name, Some($target), Some(Runtime::NodeJs), $project_name);
},
Target::Erlang => {
assert_echo!(&snapshot_name, Some($target), None, $project_name);
}
}
};
($snapshot_name: expr, $target: expr, $runtime: expr, $project_name: expr) => {
let path = fs::canonicalise(&Utf8Path::new("../test-output/cases").join($project_name))
.expect("canonicalise path");
let output = run_and_produce_pretty_snapshot($target, $runtime, path);
insta::assert_snapshot!($snapshot_name.to_string(), output);
};
}
fn snapshot_name(target: Option<Target>, runtime: Option<Runtime>, suffix: &str) -> String {
let prefix = match (target, runtime) {
(None, None) => "".into(),
(None, Some(runtime)) => format!("{runtime}-"),
(Some(target), None) => format!("{target}-"),
(Some(target), Some(runtime)) => format!("{target}-{runtime}-"),
};
format!("{prefix}{suffix}")
}
#[test]
fn echo_bitarray() {
assert_echo!(Target::JavaScript, "echo_bitarray");
assert_echo!(Target::Erlang, "echo_bitarray");
}
#[test]
fn echo_bool() {
assert_echo!("echo_bool");
}
#[test]
fn echo_charlist() {
assert_echo!("echo_charlist");
}
#[test]
fn echo_custom_type() {
assert_echo!(Target::Erlang, "echo_custom_type");
assert_echo!(Target::JavaScript, "echo_custom_type");
}
#[test]
fn echo_dict() {
assert_echo!("echo_dict");
}
#[test]
fn echo_float() {
assert_echo!(Target::Erlang, "echo_float");
assert_echo!(Target::JavaScript, "echo_float");
}
#[test]
fn echo_function() {
assert_echo!("echo_function");
}
#[test]
fn echo_importing_module_named_inspect() {
assert_echo!("echo_importing_module_named_inspect");
}
#[test]
fn echo_int() {
assert_echo!("echo_int");
}
#[test]
fn echo_list() {
assert_echo!("echo_list");
}
#[test]
fn echo_nil() {
assert_echo!("echo_nil");
}
#[test]
fn echo_string() {
assert_echo!("echo_string");
}
#[test]
fn echo_tuple() {
assert_echo!("echo_tuple");
}
#[test]
fn echo_non_record_atom_tag() {
assert_echo!(Target::Erlang, "echo_non_record_atom_tag");
}
#[test]
fn echo_circular_reference() {
assert_echo!(Target::JavaScript, "echo_circular_reference");
}
#[test]
fn echo_singleton() {
assert_echo!("echo_singleton");
}
#[test]
fn echo_with_message() {
assert_echo!("echo_with_message");
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
gleam-lang/gleam | https://github.com/gleam-lang/gleam/blob/f424547f02e621f1c5f28749786e05eda7feb098/test-project-compiler/build.rs | test-project-compiler/build.rs | use std::path::PathBuf;
pub fn main() {
println!("cargo:rerun-if-changed=cases");
let mut module = "//! This file is generated by build.rs
//! Do not edit it directly, instead add new test cases to ./cases
use gleam_core::build::Mode;
"
.to_string();
let cases = PathBuf::from("./cases");
let mut names: Vec<_> = std::fs::read_dir(&cases)
.unwrap()
.map(|entry| entry.unwrap().file_name().into_string().unwrap())
.collect();
names.sort();
for name in names {
let path = cases.join(&name);
let path = path.to_str().unwrap().replace('\\', "/");
module.push_str(&testcase(&name, &path, "Dev"));
module.push_str(&testcase(&name, &path, "Prod"));
module.push_str(&testcase(&name, &path, "Lsp"));
}
let out = PathBuf::from("./src/generated_tests.rs");
std::fs::write(out, module).unwrap();
}
fn testcase(name: &str, path: &str, mode: &str) -> String {
format!(
r#"
#[rustfmt::skip]
#[test]
fn {name}() {{
let output = crate::prepare("{path}", Mode::{mode});
insta::assert_snapshot!(
"{name}",
output,
"{path}",
);
}}
"#,
name = format!("{name}_{}", mode.to_lowercase())
)
}
| rust | Apache-2.0 | f424547f02e621f1c5f28749786e05eda7feb098 | 2026-01-04T15:40:22.554517Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.