repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
wasmerio/ate | https://github.com/wasmerio/ate/blob/87635b5b49c4163885ce840f4f1c2f30977f40cc/wasmer-deploy-cli/src/bus/server.rs | wasmer-deploy-cli/src/bus/server.rs | #![allow(unused_imports, dead_code)]
use async_trait::async_trait;
use ate::loader::DummyLoader;
use ate::prelude::*;
use ate::utils::LoadProgress;
use wasmer_auth::prelude::*;
use ate_files::codes::*;
use ate_files::prelude::*;
use derivative::*;
use std::io::Write;
use std::sync::Arc;
use std::time::Duration;
#[allow(unused_imports, dead_code)]
use tracing::{debug, error, info, trace, warn};
use wasmer_bus::prelude::*;
use wasmer_bus_deploy::api;
use wasmer_bus_deploy::prelude::*;
use wasmer_auth::cmd::query_command;
use wasmer_auth::request::QueryResponse;
use wasmer_auth::error::QueryError;
use wasmer_auth::error::QueryErrorKind;
use super::file_system::FileSystem;
use crate::opt::OptsBus;
#[derive(Derivative)]
#[derivative(Debug)]
pub struct DeployServer {
#[derivative(Debug = "ignore")]
registry: Arc<Registry>,
#[derivative(Debug = "ignore")]
opts: Arc<OptsBus>,
conf: AteConfig,
session_user: AteSessionUser,
auth_url: url::Url,
}
impl DeployServer {
pub async fn listen(
opts: Arc<OptsBus>,
registry: Arc<Registry>,
session_user: AteSessionUser,
conf: AteConfig,
auth_url: url::Url,
) -> Result<(), crate::error::BusError> {
// Register so we can respond to calls
let server = Arc::new(DeployServer {
registry,
opts,
conf,
session_user,
auth_url,
});
api::TokService::listen(server);
Ok(())
}
}
#[async_trait]
impl api::TokSimplified for DeployServer {
async fn user_exists(
&self,
email: String,
) -> api::TokResult<bool> {
let query = query_command(&self.registry, email, self.auth_url.clone()).await;
match query {
Ok(_) => Ok(true),
Err(QueryError(QueryErrorKind::Banned, _)) => Ok(true),
Err(QueryError(QueryErrorKind::Suspended, _)) => Ok(true),
Err(QueryError(QueryErrorKind::NotFound, _)) => Ok(false),
Err(QueryError(QueryErrorKind::InternalError(code), _)) => Err(api::TokError::InternalError(code)),
Err(err) => {
let code = ate::utils::obscure_error(err);
Err(api::TokError::InternalError(code))
}
}
}
async fn user_create(
&self,
_email: String,
_password: String
) -> api::TokResult<()> {
return Err(api::TokError::NotImplemented);
}
async fn login(
&self,
_email: String,
_password: String,
_code: Option<String>
) -> Result<Arc<dyn api::Session>, wasmer_bus_deploy::prelude::BusError> {
return Err(wasmer_bus_deploy::prelude::BusError::Unsupported);
}
}
| rust | Apache-2.0 | 87635b5b49c4163885ce840f4f1c2f30977f40cc | 2026-01-04T20:14:33.413949Z | false |
wasmerio/ate | https://github.com/wasmerio/ate/blob/87635b5b49c4163885ce840f4f1c2f30977f40cc/wasmer-deploy-cli/src/bus/main.rs | wasmer-deploy-cli/src/bus/main.rs | #![allow(unused_imports, dead_code)]
use crate::opt::OptsBus;
use ate::prelude::*;
use wasmer_auth::prelude::*;
use ate_files::codes::*;
use ate_files::prelude::*;
use std::sync::Arc;
use std::sync::Mutex;
use std::time::Duration;
use tokio::sync::mpsc;
use tokio::sync::watch;
#[allow(unused_imports, dead_code)]
use tracing::{debug, error, info, trace, warn};
use wasmer_bus::abi::CallHandle;
use wasmer_bus::abi::SerializationFormat;
use wasmer_bus::task::listen;
use wasmer_bus::task::respond_to;
use wasmer_bus::task::serve;
use wasmer_bus_fuse::api;
use wasmer_bus_fuse::api::FuseService;
use wasmer_bus_fuse::prelude::*;
use super::fuse::FuseServer;
use super::server::DeployServer;
pub async fn main_opts_bus(
opts: OptsBus,
conf: AteConfig,
token_path: String,
auth_url: url::Url,
) -> Result<(), crate::error::BusError> {
info!("wasmer-bus initializing");
// Freeze the opts
let opts = Arc::new(opts);
// Load the session
info!("loading the user session");
let session_user = match main_session_user(None, Some(token_path.clone()), None).await {
Ok(a) => a,
Err(err) => {
warn!("failed to acquire token - {}", err);
return Err(crate::error::BusErrorKind::LoginFailed.into());
}
};
// Build the configuration used to access the chains
let mut conf = conf.clone();
conf.configured_for(opts.configured_for);
conf.log_format.meta = opts.meta_format;
conf.log_format.data = opts.data_format;
conf.recovery_mode = opts.recovery_mode;
conf.compact_mode = opts
.compact_mode
.with_growth_factor(opts.compact_threshold_factor)
.with_growth_size(opts.compact_threshold_size)
.with_timer_value(Duration::from_secs(opts.compact_timer));
// Create the registry
let registry = Arc::new(Registry::new(&conf).await);
// Start the fuse and tok implementations
debug!("listing for wasmer commands");
DeployServer::listen(opts.clone(), registry.clone(), session_user.clone(), conf.clone(), auth_url.clone()).await?;
debug!("listing for fuse commands");
FuseServer::listen(opts.clone(), registry.clone(), session_user.clone(), conf.clone(), auth_url.clone()).await?;
debug!("switching to request serving mode");
wasmer_bus::task::serve().await;
Ok(())
}
fn conv_file_type(kind: ate_files::api::FileKind) -> api::FileType {
let mut ret = api::FileType::default();
match kind {
ate_files::api::FileKind::Directory => {
ret.dir = true;
}
ate_files::api::FileKind::RegularFile => {
ret.file = true;
}
ate_files::api::FileKind::FixedFile => {
ret.file = true;
}
ate_files::api::FileKind::SymLink => {
ret.symlink = true;
}
}
ret
}
fn conv_meta(file: ate_files::attr::FileAttr) -> api::Metadata {
api::Metadata {
ft: conv_file_type(file.kind),
accessed: file.accessed,
created: file.created,
modified: file.updated,
len: file.size,
}
}
| rust | Apache-2.0 | 87635b5b49c4163885ce840f4f1c2f30977f40cc | 2026-01-04T20:14:33.413949Z | false |
wasmerio/ate | https://github.com/wasmerio/ate/blob/87635b5b49c4163885ce840f4f1c2f30977f40cc/wasmer-deploy-cli/src/bus/opened_file.rs | wasmer-deploy-cli/src/bus/opened_file.rs | use super::file_io::*;
use async_trait::async_trait;
use ate_files::prelude::*;
use derivative::*;
use std::sync::Arc;
use std::sync::Mutex;
#[allow(unused_imports, dead_code)]
use tracing::{debug, error, info, trace, warn};
use wasmer_bus_fuse::api;
use wasmer_bus_fuse::prelude::*;
#[derive(Derivative, Clone)]
#[derivative(Debug)]
pub struct OpenedFile {
#[derivative(Debug = "ignore")]
handle: FsResult<Arc<OpenHandle>>,
offset: Arc<Mutex<u64>>,
append: bool,
path: String,
context: RequestContext,
#[derivative(Debug = "ignore")]
accessor: Arc<FileAccessor>,
}
impl OpenedFile {
pub fn new(
file: FsResult<Arc<OpenHandle>>,
offset: Arc<Mutex<u64>>,
context: RequestContext,
append: bool,
path: String,
accessor: Arc<FileAccessor>,
) -> OpenedFile {
OpenedFile {
handle: file,
offset,
context,
append,
path,
accessor,
}
}
pub async fn io(&self) -> Result<Arc<FileIo>, BusError> {
let handle = self.handle.clone().map_err(|_err| BusError::BadRequest)?;
Ok(Arc::new(FileIo::new(
handle,
self.offset.clone(),
self.append,
)))
}
}
#[async_trait]
impl api::OpenedFileSimplified for OpenedFile {
async fn meta(&self) -> FsResult<api::Metadata> {
if let Ok(Some(file)) = self
.accessor
.search(&self.context, self.path.as_str())
.await
{
FsResult::Ok(super::conv_meta(file))
} else {
FsResult::Err(FsError::EntityNotFound)
}
}
async fn unlink(&self) -> FsResult<()> {
let path = std::path::Path::new(&self.path);
let name = path.file_name().ok_or_else(|| FsError::InvalidInput)?;
let parent = path.parent().ok_or_else(|| FsError::InvalidInput)?;
if let Ok(Some(parent)) = self
.accessor
.search(&self.context, parent.to_string_lossy().as_ref())
.await
{
let _ = self
.accessor
.unlink(&self.context, parent.ino, name.to_string_lossy().as_ref())
.await;
Ok(())
} else {
Err(FsError::EntityNotFound)
}
}
async fn set_len(&self, len: u64) -> FsResult<()> {
let file = self.handle.clone()?;
if let Ok(_) = file.spec.fallocate(len).await {
Ok(())
} else {
Err(FsError::IOError)
}
}
async fn io(&self) -> Result<Arc<dyn api::FileIO>, BusError> {
let ret = OpenedFile::io(self).await?;
Ok(ret)
}
}
| rust | Apache-2.0 | 87635b5b49c4163885ce840f4f1c2f30977f40cc | 2026-01-04T20:14:33.413949Z | false |
wasmerio/ate | https://github.com/wasmerio/ate/blob/87635b5b49c4163885ce840f4f1c2f30977f40cc/wasmer-deploy-cli/src/request/cancel_deposit.rs | wasmer-deploy-cli/src/request/cancel_deposit.rs | use serde::*;
use crate::model::*;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct CancelDepositRequest {
pub owner: Ownership,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct CancelDepositResponse {}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum CancelDepositFailed {
AuthenticationFailed,
AlreadyPaid,
InvalidCommodity,
InvalidCoin,
Forbidden,
InternalError(u16),
}
impl<E> From<E> for CancelDepositFailed
where
E: std::error::Error + Sized,
{
fn from(err: E) -> Self {
CancelDepositFailed::InternalError(ate::utils::obscure_error(err))
}
}
impl std::fmt::Display for CancelDepositFailed {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
CancelDepositFailed::AuthenticationFailed => {
write!(f, "The caller has no authentication to this coin")
}
CancelDepositFailed::AlreadyPaid => {
write!(f, "The deposit has already been paid")
}
CancelDepositFailed::InvalidCommodity => {
write!(f, "THe supplied commodity is not vaild")
}
CancelDepositFailed::InvalidCoin => {
write!(f, "The supplied coin is not valid")
}
CancelDepositFailed::Forbidden => {
write!(f, "This operation is forbidden")
}
CancelDepositFailed::InternalError(a) => {
write!(
f,
"An internal error occured while processing the deposit request (code={})",
a
)
}
}
}
}
| rust | Apache-2.0 | 87635b5b49c4163885ce840f4f1c2f30977f40cc | 2026-01-04T20:14:33.413949Z | false |
wasmerio/ate | https://github.com/wasmerio/ate/blob/87635b5b49c4163885ce840f4f1c2f30977f40cc/wasmer-deploy-cli/src/request/service_find.rs | wasmer-deploy-cli/src/request/service_find.rs | use serde::*;
use crate::model::*;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ServiceFindRequest {
pub service_name: Option<String>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ServiceFindResponse {
pub services: Vec<AdvertisedService>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum ServiceFindFailed {
Forbidden,
InternalError(u16),
}
impl<E> From<E> for ServiceFindFailed
where
E: std::error::Error + Sized,
{
fn from(err: E) -> Self {
ServiceFindFailed::InternalError(ate::utils::obscure_error(err))
}
}
impl std::fmt::Display for ServiceFindFailed {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
ServiceFindFailed::Forbidden => {
write!(f, "This operation is forbidden")
}
ServiceFindFailed::InternalError(a) => {
write!(
f,
"An internal error occured while processing the service find request (code={})",
a
)
}
}
}
}
| rust | Apache-2.0 | 87635b5b49c4163885ce840f4f1c2f30977f40cc | 2026-01-04T20:14:33.413949Z | false |
wasmerio/ate | https://github.com/wasmerio/ate/blob/87635b5b49c4163885ce840f4f1c2f30977f40cc/wasmer-deploy-cli/src/request/coin_proof.rs | wasmer-deploy-cli/src/request/coin_proof.rs | use crate::model::*;
use ate::crypto::SignedProtectedData;
use serde::*;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct CoinProofInner {
/// Amount to be deposited into this account
pub amount: Decimal,
/// National currency to be deposited into this account (e.g. aud,eur,gbp,usd,hkd)
pub currency: NationalCurrency,
/// Who has to pay for this invoice
pub email: String,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct CoinProof {
/// Proof that the caller has write access to the account specified
pub inner: SignedProtectedData<CoinProofInner>,
}
| rust | Apache-2.0 | 87635b5b49c4163885ce840f4f1c2f30977f40cc | 2026-01-04T20:14:33.413949Z | false |
wasmerio/ate | https://github.com/wasmerio/ate/blob/87635b5b49c4163885ce840f4f1c2f30977f40cc/wasmer-deploy-cli/src/request/withdraw.rs | wasmer-deploy-cli/src/request/withdraw.rs | use ate::crypto::SignedProtectedData;
use serde::*;
use crate::model::*;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct WithdrawRequestParams {
pub sender: String,
pub receiver: String,
pub wallet: String,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct WithdrawRequest {
pub coins: Vec<CarvedCoin>,
pub params: SignedProtectedData<WithdrawRequestParams>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct WithdrawResponse {
pub currency: NationalCurrency,
pub amount_less_fees: Decimal,
pub fees: Decimal,
pub receipt_number: String,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum WithdrawFailed {
OperatorNotFound,
OperatorBanned,
AccountSuspended,
AuthenticationFailed,
UnsupportedCurrency(NationalCurrency),
NotDeposited,
AlreadyWithdrawn,
TooSmall,
InvalidCommodity,
InvalidCoin,
Forbidden,
InternalError(u16),
}
impl<E> From<E> for WithdrawFailed
where
E: std::error::Error + Sized,
{
fn from(err: E) -> Self {
WithdrawFailed::InternalError(ate::utils::obscure_error(err))
}
}
impl std::fmt::Display for WithdrawFailed {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
WithdrawFailed::OperatorNotFound => {
write!(f, "The operator could not be found")
}
WithdrawFailed::OperatorBanned => {
write!(f, "The operator is currently banned")
}
WithdrawFailed::AccountSuspended => {
write!(f, "The account is suspended")
}
WithdrawFailed::AuthenticationFailed => {
write!(f, "The calling user failed the proof authentication check")
}
WithdrawFailed::NotDeposited => {
write!(f, "The funds do not exist as the deposit was not completed")
}
WithdrawFailed::AlreadyWithdrawn => {
write!(f, "The funds have already been withdrawn")
}
WithdrawFailed::InvalidCommodity => {
write!(f, "THe supplied commodity is not vaild")
}
WithdrawFailed::InvalidCoin => {
write!(f, "The supplied coin is not valid")
}
WithdrawFailed::TooSmall => {
write!(f, "The withdrawl amount is too small")
}
WithdrawFailed::UnsupportedCurrency(a) => {
write!(f, "The national currency ({}) is not supported", a)
}
WithdrawFailed::Forbidden => {
write!(f, "This operation is forbidden")
}
WithdrawFailed::InternalError(a) => {
write!(
f,
"An internal error occured while processing the withdraw request (code={})",
a
)
}
}
}
}
| rust | Apache-2.0 | 87635b5b49c4163885ce840f4f1c2f30977f40cc | 2026-01-04T20:14:33.413949Z | false |
wasmerio/ate | https://github.com/wasmerio/ate/blob/87635b5b49c4163885ce840f4f1c2f30977f40cc/wasmer-deploy-cli/src/request/coin_carve.rs | wasmer-deploy-cli/src/request/coin_carve.rs | use serde::*;
use ate::prelude::*;
use crate::model::*;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct CoinCarveRequest {
pub coin: PrimaryKey,
pub owner: Ownership,
pub needed_denomination: Decimal,
pub new_token: EncryptKey,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct CoinCarveResponse {
pub coins: Vec<CarvedCoin>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum CoinCarveFailed {
AuthenticationFailed,
InvalidCommodity,
InvalidCoin,
InvalidAmount,
InternalError(u16),
}
impl<E> From<E> for CoinCarveFailed
where
E: std::error::Error + Sized,
{
fn from(err: E) -> Self {
CoinCarveFailed::InternalError(ate::utils::obscure_error(err))
}
}
impl std::fmt::Display for CoinCarveFailed {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
CoinCarveFailed::AuthenticationFailed => {
write!(f, "The caller has no authentication to this coin")
}
CoinCarveFailed::InvalidCommodity => {
write!(f, "The supplied commodity is not vaild")
}
CoinCarveFailed::InvalidCoin => {
write!(f, "The supplied coin is not valid")
}
CoinCarveFailed::InvalidAmount => {
write!(f, "The coin is not big enough to be carved by this amount of the carvng amount is invalid")
}
CoinCarveFailed::InternalError(a) => {
write!(
f,
"An internal error occured while processing the carve request (code={})",
a
)
}
}
}
}
| rust | Apache-2.0 | 87635b5b49c4163885ce840f4f1c2f30977f40cc | 2026-01-04T20:14:33.413949Z | false |
wasmerio/ate | https://github.com/wasmerio/ate/blob/87635b5b49c4163885ce840f4f1c2f30977f40cc/wasmer-deploy-cli/src/request/coin_rotate.rs | wasmer-deploy-cli/src/request/coin_rotate.rs | use ate::crypto::SignedProtectedData;
use ate::prelude::*;
use serde::*;
use crate::model::*;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct CoinRotateNotification {
pub operator: String,
pub receipt_number: String,
pub from: String,
pub to: String,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct CoinRotateRequest {
pub coins: Vec<CarvedCoin>,
pub new_token: EncryptKey,
pub notification: Option<SignedProtectedData<CoinRotateNotification>>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct CoinRotateResponse {
pub new_owners: Vec<Ownership>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum CoinRotateFailed {
OperatorNotFound,
OperatorBanned,
AuthenticationFailed,
NoOwnership,
InvalidCommodity,
InvalidCoin,
AccountSuspended,
InternalError(u16),
}
impl<E> From<E> for CoinRotateFailed
where
E: std::error::Error + Sized,
{
fn from(err: E) -> Self {
CoinRotateFailed::InternalError(ate::utils::obscure_error(err))
}
}
impl std::fmt::Display for CoinRotateFailed {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
CoinRotateFailed::OperatorNotFound => {
write!(f, "The operator could not be found")
}
CoinRotateFailed::OperatorBanned => {
write!(f, "The operator is currently banned")
}
CoinRotateFailed::NoOwnership => {
write!(
f,
"The caller does not have access to one or more of the coins"
)
}
CoinRotateFailed::AuthenticationFailed => {
write!(f, "The caller has no authentication to this coin")
}
CoinRotateFailed::InvalidCommodity => {
write!(f, "The supplied commodity is not vaild")
}
CoinRotateFailed::InvalidCoin => {
write!(f, "The supplied coin is not valid")
}
CoinRotateFailed::AccountSuspended => {
write!(f, "The account is suspended")
}
CoinRotateFailed::InternalError(a) => {
write!(
f,
"An internal error occured while processing the carve request (code={})",
a
)
}
}
}
}
| rust | Apache-2.0 | 87635b5b49c4163885ce840f4f1c2f30977f40cc | 2026-01-04T20:14:33.413949Z | false |
wasmerio/ate | https://github.com/wasmerio/ate/blob/87635b5b49c4163885ce840f4f1c2f30977f40cc/wasmer-deploy-cli/src/request/contract_action.rs | wasmer-deploy-cli/src/request/contract_action.rs | use ate::crypto::*;
use serde::*;
use crate::model::BagOfCoins;
use crate::model::ContractStatus;
use crate::model::Invoice;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ContractEntropy {
/// What is this consumption related to
pub related_to: String,
/// Any coins created by this entropy should be
/// encrypted with this key before being returned
pub coin_key: PublicEncryptKey,
/// Amount of compute resources that were consumed
/// (measured in microseconds)
pub compute_used: u64,
/// Amount of download bandwidth that was consumed
/// (measured in bytes)
pub download_bandwidth_used: u64,
/// Amount of upload bandwidth that was consumed
/// (measured in bytes)
pub upload_bandwidth_used: u64,
/// Total amount of storage that is being occupied
/// (measured in bytes)
pub storage_used: u64,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum ContractAction {
Cancel,
Elevate,
Entropy(ContractEntropy),
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ContractActionRequestParams {
pub service_code: String,
pub consumer_identity: String,
pub action: ContractAction,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ContractActionRequest {
pub requester_identity: String,
pub action_key: Option<EncryptKey>,
pub params: SignedProtectedData<ContractActionRequestParams>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum ContractActionResponse {
ContractTerminated,
Elevated {
broker_key: PublicEncryptedSecureData<EncryptKey>,
},
Entropy {
coins: Option<MultiEncryptedSecureData<BagOfCoins>>,
status: ContractStatus,
invoice: Option<Invoice>,
},
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum ContractActionFailed {
OperatorNotFound,
OperatorBanned,
AccountSuspended,
AuthenticationFailed,
NoMasterKey,
InvalidContractReference(String),
Forbidden,
InternalError(u16),
}
impl<E> From<E> for ContractActionFailed
where
E: std::error::Error + Sized,
{
fn from(err: E) -> Self {
ContractActionFailed::InternalError(ate::utils::obscure_error(err))
}
}
impl std::fmt::Display for ContractActionFailed {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
ContractActionFailed::OperatorNotFound => {
write!(f, "The operator could not be found")
}
ContractActionFailed::OperatorBanned => {
write!(f, "The operator is currently banned")
}
ContractActionFailed::AccountSuspended => {
write!(f, "The account is suspended")
}
ContractActionFailed::AuthenticationFailed => {
write!(f, "The calling user failed the proof authentication check")
}
ContractActionFailed::InvalidContractReference(reference) => {
write!(f, "The contract does not exist ({})", reference)
}
ContractActionFailed::NoMasterKey => {
write!(
f,
"The authentication server has not been properly initialized"
)
}
ContractActionFailed::Forbidden => {
write!(f, "This operation is forbidden")
}
ContractActionFailed::InternalError(a) => {
write!(
f,
"An internal error occured while attempting to perform an action on the contract (code={})",
a
)
}
}
}
}
| rust | Apache-2.0 | 87635b5b49c4163885ce840f4f1c2f30977f40cc | 2026-01-04T20:14:33.413949Z | false |
wasmerio/ate | https://github.com/wasmerio/ate/blob/87635b5b49c4163885ce840f4f1c2f30977f40cc/wasmer-deploy-cli/src/request/contract_create.rs | wasmer-deploy-cli/src/request/contract_create.rs | use ate::crypto::*;
use ate::prelude::*;
use serde::*;
use std::time::Duration;
use crate::model::Country;
use crate::model::NationalCurrency;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ContractCreateRequestParams {
pub service_code: String,
pub consumer_wallet: PrimaryKey,
pub gst_country: Country,
pub broker_unlock_key: EncryptKey,
pub broker_key: PublicEncryptedSecureData<EncryptKey>,
pub limited_duration: Option<Duration>,
pub force: bool,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ContractCreateRequest {
pub consumer_identity: String,
pub params: SignedProtectedData<ContractCreateRequestParams>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ContractCreateResponse {
pub contract_reference: String,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum ContractCreateFailed {
OperatorNotFound,
OperatorBanned,
AccountSuspended,
AuthenticationFailed,
NoMasterKey,
InvalidService,
UnsupportedCurrency(NationalCurrency),
AlreadyExists(String),
Forbidden,
InternalError(u16),
}
impl<E> From<E> for ContractCreateFailed
where
E: std::error::Error + Sized,
{
fn from(err: E) -> Self {
ContractCreateFailed::InternalError(ate::utils::obscure_error(err))
}
}
impl std::fmt::Display for ContractCreateFailed {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
ContractCreateFailed::OperatorNotFound => {
write!(f, "The operator could not be found")
}
ContractCreateFailed::OperatorBanned => {
write!(f, "The operator is currently banned")
}
ContractCreateFailed::AccountSuspended => {
write!(f, "The account is suspended")
}
ContractCreateFailed::UnsupportedCurrency(currency) => {
write!(
f,
"The service does not support your currency ({})",
currency
)
}
ContractCreateFailed::AuthenticationFailed => {
write!(f, "The calling user failed the proof authentication check")
}
ContractCreateFailed::NoMasterKey => {
write!(
f,
"The authentication server has not been properly initialized"
)
}
ContractCreateFailed::InvalidService => {
write!(f, "The service was this code could not be found")
}
ContractCreateFailed::AlreadyExists(msg) => {
write!(f, "{}", msg)
}
ContractCreateFailed::Forbidden => {
write!(f, "This operation is forbidden")
}
ContractCreateFailed::InternalError(a) => {
write!(
f,
"An internal error occured while attempting the contract creation (code={})",
a
)
}
}
}
}
| rust | Apache-2.0 | 87635b5b49c4163885ce840f4f1c2f30977f40cc | 2026-01-04T20:14:33.413949Z | false |
wasmerio/ate | https://github.com/wasmerio/ate/blob/87635b5b49c4163885ce840f4f1c2f30977f40cc/wasmer-deploy-cli/src/request/coin_collect.rs | wasmer-deploy-cli/src/request/coin_collect.rs | use ate::prelude::*;
use serde::*;
use crate::model::*;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct CoinCollectRequest {
pub coin_ancestors: Vec<Ownership>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct CoinCollectPending {
pub chain: ChainKey,
pub key: PrimaryKey,
pub invoice_number: String,
pub reserve: Decimal,
pub currency: NationalCurrency,
pub pay_url: String,
pub owner: Ownership,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct CoinCollectConfirmation {
pub when: chrono::DateTime<chrono::Utc>,
pub email: String,
pub amount: Decimal,
pub currency: NationalCurrency,
pub invoice_number: String,
pub invoice_id: String,
pub invoice_url: String,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct CoinCollectResponse {
pub cleared_coins: Vec<CarvedCoin>,
pub pending_deposits: Vec<CoinCollectPending>,
pub empty_ancestors: Vec<Ownership>,
pub confirmations: Vec<CoinCollectConfirmation>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum CoinCollectFailed {
AuthenticationFailed,
InvalidCommodity,
InvalidCoin,
OperatorBanned,
InternalError(u16),
}
impl<E> From<E> for CoinCollectFailed
where
E: std::error::Error + Sized,
{
fn from(err: E) -> Self {
CoinCollectFailed::InternalError(ate::utils::obscure_error(err))
}
}
impl std::fmt::Display for CoinCollectFailed {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
CoinCollectFailed::AuthenticationFailed => {
write!(f, "The caller has no authentication to this coin")
}
CoinCollectFailed::InvalidCommodity => {
write!(f, "The supplied commodity is not vaild")
}
CoinCollectFailed::InvalidCoin => {
write!(f, "The supplied coin is not valid")
}
&CoinCollectFailed::OperatorBanned => {
write!(f, "The operator is currently banned")
}
CoinCollectFailed::InternalError(a) => {
write!(
f,
"An internal error occured while processing the coin query request (code={})",
a
)
}
}
}
}
| rust | Apache-2.0 | 87635b5b49c4163885ce840f4f1c2f30977f40cc | 2026-01-04T20:14:33.413949Z | false |
wasmerio/ate | https://github.com/wasmerio/ate/blob/87635b5b49c4163885ce840f4f1c2f30977f40cc/wasmer-deploy-cli/src/request/mod.rs | wasmer-deploy-cli/src/request/mod.rs | mod cancel_deposit;
mod coin_carve;
mod coin_collect;
mod coin_combine;
mod coin_proof;
mod coin_rotate;
mod contract_action;
mod contract_create;
mod deposit;
mod service_find;
mod withdraw;
pub use wasmer_auth::request::*;
pub use cancel_deposit::*;
pub use coin_carve::*;
pub use coin_collect::*;
pub use coin_combine::*;
pub use coin_proof::*;
pub use coin_rotate::*;
pub use contract_action::*;
pub use contract_create::*;
pub use deposit::*;
pub use service_find::*;
pub use withdraw::*; | rust | Apache-2.0 | 87635b5b49c4163885ce840f4f1c2f30977f40cc | 2026-01-04T20:14:33.413949Z | false |
wasmerio/ate | https://github.com/wasmerio/ate/blob/87635b5b49c4163885ce840f4f1c2f30977f40cc/wasmer-deploy-cli/src/request/deposit.rs | wasmer-deploy-cli/src/request/deposit.rs | use serde::*;
use crate::model::*;
use super::CoinProof;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct DepositRequest {
/// Proof that the caller has write access to the account specified
pub proof: CoinProof,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct DepositCoin {
pub value: Decimal,
pub owner: Ownership,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct DepositResponse {
pub invoice_id: String,
pub invoice_number: String,
pub pay_url: String,
pub qr_code: String,
pub coin_ancestor: Ownership,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum DepositFailed {
OperatorNotFound,
OperatorBanned,
AccountSuspended,
AuthenticationFailed,
UnsupportedCurrency(NationalCurrency),
Forbidden,
InternalError(u16),
}
impl<E> From<E> for DepositFailed
where
E: std::error::Error + Sized,
{
fn from(err: E) -> Self {
DepositFailed::InternalError(ate::utils::obscure_error(err))
}
}
impl std::fmt::Display for DepositFailed {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
DepositFailed::OperatorNotFound => {
write!(f, "The operator could not be found")
}
DepositFailed::OperatorBanned => {
write!(f, "The operator is currently banned")
}
DepositFailed::AccountSuspended => {
write!(f, "The account is suspended")
}
DepositFailed::AuthenticationFailed => {
write!(f, "The calling user failed the proof authentication check")
}
DepositFailed::UnsupportedCurrency(a) => {
write!(f, "The national currency ({}) is not supported", a)
}
DepositFailed::Forbidden => {
write!(f, "This operation is forbidden")
}
DepositFailed::InternalError(a) => {
write!(
f,
"An internal error occured while processing the deposit request (code={})",
a
)
}
}
}
}
| rust | Apache-2.0 | 87635b5b49c4163885ce840f4f1c2f30977f40cc | 2026-01-04T20:14:33.413949Z | false |
wasmerio/ate | https://github.com/wasmerio/ate/blob/87635b5b49c4163885ce840f4f1c2f30977f40cc/wasmer-deploy-cli/src/request/coin_combine.rs | wasmer-deploy-cli/src/request/coin_combine.rs | use serde::*;
use crate::model::*;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct CoinCombineRequest {
pub coins: Vec<CarvedCoin>,
pub new_ownership: Ownership,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct CoinCombineResponse {
pub super_coin: CarvedCoin,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum CoinCombineFailed {
AuthenticationFailed,
InvalidCommodity,
InvalidCoin,
OperatorBanned,
InvalidRequest(String),
InternalError(u16),
}
impl<E> From<E> for CoinCombineFailed
where
E: std::error::Error + Sized,
{
fn from(err: E) -> Self {
CoinCombineFailed::InternalError(ate::utils::obscure_error(err))
}
}
impl std::fmt::Display for CoinCombineFailed {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
CoinCombineFailed::AuthenticationFailed => {
write!(f, "The caller has no authentication to this coin")
}
CoinCombineFailed::InvalidCommodity => {
write!(f, "The supplied commodity is not vaild")
}
CoinCombineFailed::InvalidCoin => {
write!(f, "The supplied coin is not valid")
}
CoinCombineFailed::OperatorBanned => {
write!(f, "The operator is currently banned")
}
CoinCombineFailed::InvalidRequest(err) => {
write!(
f,
"The requested coins to be combined were invalid - {}",
err
)
}
CoinCombineFailed::InternalError(a) => {
write!(
f,
"An internal error occured while processing the coin combine request (code={})",
a
)
}
}
}
}
| rust | Apache-2.0 | 87635b5b49c4163885ce840f4f1c2f30977f40cc | 2026-01-04T20:14:33.413949Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/build.rs | build.rs | fn main() -> Result<(), Box<dyn std::error::Error>> {
vergen::EmitBuilder::builder()
.all_build()
.all_git()
.emit()?;
Ok(())
}
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/config.rs | src/config.rs | use std::{env, path::PathBuf, str::FromStr, sync::OnceLock};
use color_eyre::eyre::{Result, eyre};
use directories::ProjectDirs;
use figment::{
Figment,
providers::{Env, Format, Serialized, Toml, Yaml},
};
use ratatui::style::Color;
use serde::{Deserialize, Serialize};
use serde_with::{DisplayFromStr, NoneAsEmptyString, serde_as};
use tracing::level_filters::LevelFilter;
use crate::{cli::Cli, serde_helper::keybindings::KeyBindings};
static CONFIG: OnceLock<Config> = OnceLock::new();
pub const CONFIG_DEFAULT: &str = include_str!("../.config/config.default.toml");
#[serde_as]
#[derive(Debug, Clone, Copy, Deserialize, Serialize)]
pub struct Base16Palette {
/// Default Background
#[serde_as(as = "DisplayFromStr")]
pub base00: Color,
/// Lighter Background (Used for status bars, line number and folding marks)
#[serde_as(as = "DisplayFromStr")]
pub base01: Color,
/// Selection Background (Settings where you need to highlight text, such as find results)
#[serde_as(as = "DisplayFromStr")]
pub base02: Color,
/// Comments, Invisibles, Line Highlighting
#[serde_as(as = "DisplayFromStr")]
pub base03: Color,
/// Dark Foreground (Used for status bars)
#[serde_as(as = "DisplayFromStr")]
pub base04: Color,
/// Default Foreground, Caret, Delimiters, Operators
#[serde_as(as = "DisplayFromStr")]
pub base05: Color,
/// Light Foreground (Not often used, could be used for hover states or dividers)
#[serde_as(as = "DisplayFromStr")]
pub base06: Color,
/// Light Background (Probably at most for cursor line background color)
#[serde_as(as = "DisplayFromStr")]
pub base07: Color,
/// Variables, XML Tags, Markup Link Text, Markup Lists, Diff Deleted
#[serde_as(as = "DisplayFromStr")]
pub base08: Color,
/// Integers, Boolean, Constants, XML Attributes, Markup Link Url
#[serde_as(as = "DisplayFromStr")]
pub base09: Color,
/// Classes, Keywords, Storage, Selector, Markup Italic, Diff Changed
#[serde_as(as = "DisplayFromStr")]
pub base0a: Color,
/// Strings, Inherited Class, Markup Code, Diff Inserted
#[serde_as(as = "DisplayFromStr")]
pub base0b: Color,
/// Support, Regular Expressions, Escape Characters, Markup Quotes
#[serde_as(as = "DisplayFromStr")]
pub base0c: Color,
/// Functions, Methods, Attribute IDs, Headings
#[serde_as(as = "DisplayFromStr")]
pub base0d: Color,
/// Keywords, Storage, Selector, Markup Bold, Diff Renamed
#[serde_as(as = "DisplayFromStr")]
pub base0e: Color,
/// Deprecated, Opening/Closing Embedded Language Tags e.g., `<? ?>`
#[serde_as(as = "DisplayFromStr")]
pub base0f: Color,
}
impl Default for Base16Palette {
fn default() -> Self {
Self {
base00: Color::from_str("#191724").unwrap(),
base01: Color::from_str("#1f1d2e").unwrap(),
base02: Color::from_str("#26233a").unwrap(),
base03: Color::from_str("#6e6a86").unwrap(),
base04: Color::from_str("#908caa").unwrap(),
base05: Color::from_str("#e0def4").unwrap(),
base06: Color::from_str("#e0def4").unwrap(),
base07: Color::from_str("#524f67").unwrap(),
base08: Color::from_str("#eb6f92").unwrap(),
base09: Color::from_str("#f6c177").unwrap(),
base0a: Color::from_str("#ebbcba").unwrap(),
base0b: Color::from_str("#31748f").unwrap(),
base0c: Color::from_str("#9ccfd8").unwrap(),
base0d: Color::from_str("#c4a7e7").unwrap(),
base0e: Color::from_str("#f6c177").unwrap(),
base0f: Color::from_str("#524f67").unwrap(),
}
}
}
/// Application configuration.
///
/// This is the main configuration struct for the application.
#[serde_as]
#[derive(Debug, Deserialize, Serialize)]
pub struct Config {
/// The directory to use for storing application data (logs etc.).
pub data_dir: PathBuf,
/// The directory to use for storing application configuration (colors
/// etc.).
pub config_home: PathBuf,
/// The directory to use for storing application configuration (colors
/// etc.).
pub config_file: PathBuf,
/// The log level to use. Valid values are: error, warn, info, debug, trace,
/// off. The default is info.
#[serde_as(as = "NoneAsEmptyString")]
pub log_level: Option<LevelFilter>,
pub tick_rate: f64,
pub frame_rate: f64,
pub key_refresh_rate: f64,
pub enable_mouse: bool,
pub enable_paste: bool,
pub prompt_padding: u16,
pub key_bindings: KeyBindings,
pub color: Base16Palette,
}
impl Default for Config {
fn default() -> Self {
let key_bindings: KeyBindings = Default::default();
let rose_pine = Base16Palette::default();
Self {
data_dir: default_data_dir(),
config_home: default_config_dir(),
config_file: default_config_file(),
log_level: None,
tick_rate: 1.0,
frame_rate: 15.0,
key_refresh_rate: 0.5,
enable_mouse: false,
enable_paste: false,
prompt_padding: 1,
key_bindings,
color: rose_pine,
}
}
}
/// Initialize the application configuration.
///
/// This function should be called before any other function in the application.
/// It will initialize the application config from the following sources:
/// - default values
/// - a configuration file
/// - environment variables
/// - command line arguments
pub fn init(cli: &Cli) -> Result<()> {
let config_file = cli.config_file.clone().unwrap_or_else(default_config_file);
let color_file = cli.color_file.clone().unwrap_or_else(default_color_file);
let mut config = Figment::new()
.merge(Serialized::defaults(Config::default()))
.merge(Toml::string(CONFIG_DEFAULT))
.merge(Toml::file(config_file))
.merge(Env::prefixed("CRATES_TUI_"))
.merge(Serialized::defaults(cli))
.extract::<Config>()?;
let base16 = Figment::new()
.merge(Serialized::defaults(Base16Palette::default()))
.merge(Yaml::file(color_file))
.extract::<Base16Palette>()?;
config.color = base16;
if let Some(data_dir) = cli.data_dir.clone() {
config.data_dir = data_dir;
}
CONFIG
.set(config)
.map_err(|config| eyre!("failed to set config {config:?}"))
}
/// Get the application configuration.
///
/// This function should only be called after [`init()`] has been called.
///
/// # Panics
///
/// This function will panic if [`init()`] has not been called.
pub fn get() -> &'static Config {
CONFIG.get().expect("config not initialized")
}
/// Returns the path to the default configuration file.
pub fn default_config_file() -> PathBuf {
default_config_dir().join("config.toml")
}
/// Returns the path to the default configuration file.
pub fn default_color_file() -> PathBuf {
default_config_dir().join("color.yaml")
}
/// Returns the directory to use for storing config files.
fn default_config_dir() -> PathBuf {
env::var("CRATES_TUI_CONFIG_HOME")
.map(PathBuf::from)
.or_else(|_| project_dirs().map(|dirs| dirs.config_local_dir().to_path_buf()))
.unwrap_or(PathBuf::from(".").join(".config"))
}
/// Returns the directory to use for storing data files.
pub fn default_data_dir() -> PathBuf {
env::var("CRATES_TUI_DATA_HOME")
.map(PathBuf::from)
.or_else(|_| project_dirs().map(|dirs| dirs.data_local_dir().to_path_buf()))
.unwrap_or(PathBuf::from(".").join(".data"))
}
/// Returns the project directories.
fn project_dirs() -> Result<ProjectDirs> {
ProjectDirs::from("rs", "ratatui", "crates-tui")
.ok_or_else(|| eyre!("user home directory not found"))
}
#[cfg(test)]
mod tests {
use crate::serde_helper::keybindings::parse_key_sequence;
use super::*;
#[test]
fn create_config() {
let mut c = Config::default();
c.key_bindings.insert(
crate::app::Mode::PickerShowCrateInfo,
&parse_key_sequence("q").unwrap(),
crate::command::Command::Quit,
);
println!("{}", toml::to_string_pretty(&c).unwrap());
}
}
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/app.rs | src/app.rs | use std::sync::{
Arc,
atomic::{AtomicBool, Ordering},
};
use color_eyre::eyre::Result;
use crossterm::event::{Event as CrosstermEvent, KeyEvent};
use ratatui::{DefaultTerminal, prelude::*, widgets::*};
use serde::{Deserialize, Serialize};
use strum::{Display, EnumIs};
use tokio::sync::mpsc::{self, UnboundedReceiver, UnboundedSender};
use tracing::{debug, error, info};
use crate::{
action::Action,
config,
events::{Event, Events},
serde_helper::keybindings::key_event_to_string,
widgets::{
help::{Help, HelpWidget},
popup_message::{PopupMessageState, PopupMessageWidget},
search_filter_prompt::SearchFilterPromptWidget,
search_page::SearchPage,
search_page::SearchPageWidget,
status_bar::StatusBarWidget,
summary::{Summary, SummaryWidget},
tabs::SelectedTab,
},
};
#[derive(
Default, Debug, Display, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, EnumIs,
)]
#[serde(rename_all = "snake_case")]
pub enum Mode {
Common,
#[default]
Summary,
PickerShowCrateInfo,
PickerHideCrateInfo,
Search,
Filter,
Popup,
Help,
Quit,
}
impl Mode {
pub fn is_prompt(&self) -> bool {
self.is_search() || self.is_filter()
}
pub fn is_picker(&self) -> bool {
self.is_picker_hide_crate_info() || self.is_picker_show_crate_info()
}
}
struct AppWidget;
#[derive(Debug)]
pub struct App {
/// Receiver end of an asynchronous channel for actions that the app needs
/// to process.
rx: UnboundedReceiver<Action>,
/// Sender end of an asynchronous channel for dispatching actions from
/// various parts of the app to be handled by the event loop.
tx: UnboundedSender<Action>,
/// A thread-safe indicator of whether data is currently being loaded,
/// allowing different parts of the app to know if it's in a loading state.
loading_status: Arc<AtomicBool>,
/// The active mode of the application, which could change how user inputs
/// and commands are interpreted.
mode: Mode,
/// The active mode of the application, which could change how user inputs
/// and commands are interpreted.
last_mode: Mode,
/// A list of key events that have been held since the last tick, useful for
/// interpreting sequences of key presses.
last_tick_key_events: Vec<KeyEvent>,
/// frame counter
frame_count: usize,
summary: Summary,
search: SearchPage,
popup: Option<(PopupMessageWidget, PopupMessageState)>,
help: Help,
selected_tab: SelectedTab,
}
impl App {
pub fn new(query: Option<String>) -> Self {
let (tx, rx) = mpsc::unbounded_channel();
let _ = tx.send(Action::Init { query });
let loading_status = Arc::new(AtomicBool::default());
let search = SearchPage::new(tx.clone(), loading_status.clone());
let summary = Summary::new(tx.clone(), loading_status.clone());
Self {
rx,
tx,
mode: Mode::default(),
last_mode: Mode::default(),
loading_status,
search,
summary,
popup: Default::default(),
last_tick_key_events: Default::default(),
frame_count: Default::default(),
help: Default::default(),
selected_tab: Default::default(),
}
}
/// Runs the main loop of the application, handling events and actions
#[tokio::main]
pub async fn run(&mut self, tui: &mut DefaultTerminal) -> Result<()> {
// uncomment to test error handling
// panic!("test panic");
// Err(color_eyre::eyre::eyre!("Error"))?;
let mut events = Events::new();
loop {
if let Some(e) = events.next().await {
self.handle_event(e)?.map(|action| self.tx.send(action));
}
while let Ok(action) = self.rx.try_recv() {
self.handle_action(action.clone())?;
if matches!(action, Action::Resize(_, _) | Action::Render) {
tui.draw(|frame| self.render(frame))?;
}
}
if self.should_quit() {
break;
}
}
Ok(())
}
/// Handles an event by producing an optional `Action` that the application
/// should perform in response.
///
/// This method maps incoming events from the terminal user interface to
/// specific `Action` that represents tasks or operations the
/// application needs to carry out.
fn handle_event(&mut self, e: Event) -> Result<Option<Action>> {
let maybe_action = match e {
Event::Quit => Some(Action::Quit),
Event::Tick => Some(Action::Tick),
Event::KeyRefresh => Some(Action::KeyRefresh),
Event::Render => Some(Action::Render),
Event::Crossterm(CrosstermEvent::Resize(x, y)) => Some(Action::Resize(x, y)),
Event::Crossterm(CrosstermEvent::Key(key)) => self.handle_key_event(key)?,
_ => None,
};
Ok(maybe_action)
}
fn handle_key_event(&mut self, key: KeyEvent) -> Result<Option<Action>> {
debug!("Received key {:?}", key);
match self.mode {
Mode::Search => {
self.search.handle_key(key);
}
Mode::Filter => {
self.search.handle_key(key);
self.search.handle_filter_prompt_change();
}
_ => (),
};
Ok(self.handle_key_events_from_config(key))
}
/// Evaluates a sequence of key events against user-configured key bindings
/// to determine if an `Action` should be triggered.
///
/// This method supports user-configurable key sequences by collecting key
/// events over time and then translating them into actions according to the
/// current mode.
fn handle_key_events_from_config(&mut self, key: KeyEvent) -> Option<Action> {
self.last_tick_key_events.push(key);
let config = config::get();
config
.key_bindings
.event_to_command(self.mode, &self.last_tick_key_events)
.or_else(|| {
config
.key_bindings
.event_to_command(Mode::Common, &self.last_tick_key_events)
})
.map(|command| config.key_bindings.command_to_action(command))
}
/// Performs the `Action` by calling on a respective app method.
///
/// Upon receiving an action, this function updates the application state, performs necessary
/// operations like drawing or resizing the view, or changing the mode. Actions that affect the
/// navigation within the application, are also handled. Certain actions generate a follow-up
/// action which will be to be processed in the next iteration of the main event loop.
fn handle_action(&mut self, action: Action) -> Result<()> {
if action != Action::Tick && action != Action::Render && action != Action::KeyRefresh {
info!("{action:?}");
}
match action {
Action::Quit => self.quit(),
Action::KeyRefresh => self.key_refresh_tick(),
Action::Init { ref query } => self.init(query)?,
Action::Tick => self.tick(),
Action::StoreTotalNumberOfCrates(n) => self.store_total_number_of_crates(n),
Action::ScrollUp => self.scroll_up(),
Action::ScrollDown => self.scroll_down(),
Action::ScrollTop
| Action::ScrollBottom
| Action::ScrollSearchResultsUp
| Action::ScrollSearchResultsDown => self.search.handle_action(action.clone()),
Action::ScrollCrateInfoUp => self.search.crate_info.scroll_previous(),
Action::ScrollCrateInfoDown => self.search.crate_info.scroll_next(),
Action::ReloadData => self.search.reload_data(),
Action::IncrementPage => self.search.increment_page(),
Action::DecrementPage => self.search.decrement_page(),
Action::NextSummaryMode => self.summary.next_mode(),
Action::PreviousSummaryMode => self.summary.previous_mode(),
Action::NextTab => self.goto_next_tab(),
Action::PreviousTab => self.goto_previous_tab(),
Action::SwitchMode(mode) => self.switch_mode(mode),
Action::SwitchToLastMode => self.switch_to_last_mode(),
Action::SubmitSearch => self.search.submit_query(),
Action::ToggleShowCrateInfo => self.search.toggle_show_crate_info(),
Action::UpdateCurrentSelectionCrateInfo => self.update_current_selection_crate_info(),
Action::UpdateSearchTableResults => self.search.update_search_table_results(),
Action::UpdateSummary => self.summary.update(),
Action::ShowFullCrateInfo => self.show_full_crate_details(),
Action::ShowErrorPopup(ref err) => self.show_error_popup(err.clone()),
Action::ShowInfoPopup(ref info) => self.show_info_popup(info.clone()),
Action::ClosePopup => self.close_popup(),
Action::ToggleSortBy { reload, forward } => {
self.search.toggle_sort_by(reload, forward)?
}
Action::ClearTaskDetailsHandle(ref id) => self
.search
.clear_task_details_handle(uuid::Uuid::parse_str(id)?)?,
Action::OpenDocsUrlInBrowser => self.open_docs_url_in_browser()?,
Action::OpenCratesIOUrlInBrowser if self.mode.is_summary() => {
self.open_summary_url_in_browser()?
}
Action::OpenCratesIOUrlInBrowser => self.open_crates_io_url_in_browser()?,
Action::CopyCargoAddCommandToClipboard => self.copy_cargo_add_command_to_clipboard()?,
_ => {}
}
match action {
Action::ScrollUp | Action::ScrollDown | Action::ScrollTop | Action::ScrollBottom
if self.mode.is_prompt() || self.mode.is_picker() =>
{
let _ = self.tx.send(Action::UpdateCurrentSelectionCrateInfo);
}
Action::SubmitSearch => {
let _ = self.tx.send(Action::ReloadData);
}
_ => {}
};
Ok(())
}
// Render the `AppWidget` as a stateful widget using `self` as the `State`
fn render(&mut self, frame: &mut Frame) {
frame.render_stateful_widget(AppWidget, frame.area(), self);
self.update_frame_count(frame);
self.update_cursor(frame);
}
}
impl App {
fn tick(&mut self) {
self.search.update_search_table_results();
}
fn init(&mut self, query: &Option<String>) -> Result<()> {
if let Some(query) = query {
self.search.search = query.clone();
let _ = self.tx.send(Action::SwitchMode(Mode::Search));
let _ = self.tx.send(Action::SubmitSearch);
} else {
self.summary.request()?;
}
Ok(())
}
fn key_refresh_tick(&mut self) {
self.last_tick_key_events.drain(..);
}
fn should_quit(&self) -> bool {
self.mode == Mode::Quit
}
fn quit(&mut self) {
self.mode = Mode::Quit
}
fn scroll_up(&mut self) {
match self.mode {
Mode::Popup => {
if let Some((_, popup_state)) = &mut self.popup {
popup_state.scroll_up();
}
}
Mode::Summary => self.summary.scroll_previous(),
Mode::Help => self.help.scroll_up(),
_ => self.search.scroll_up(),
}
}
fn scroll_down(&mut self) {
match self.mode {
Mode::Popup => {
if let Some((_, popup_state)) = &mut self.popup {
popup_state.scroll_down();
}
}
Mode::Summary => self.summary.scroll_next(),
Mode::Help => self.help.scroll_down(),
_ => self.search.scroll_down(),
}
}
fn switch_mode(&mut self, mode: Mode) {
self.last_mode = self.mode;
self.mode = mode;
self.search.mode = mode;
match self.mode {
Mode::Search => {
self.selected_tab.select(SelectedTab::Search);
self.search.enter_search_insert_mode();
}
Mode::Filter => {
self.selected_tab.select(SelectedTab::Search);
self.search.enter_filter_insert_mode();
}
Mode::Summary => {
self.search.enter_normal_mode();
self.selected_tab.select(SelectedTab::Summary);
}
Mode::Help => {
self.search.enter_normal_mode();
self.help.mode = Some(self.last_mode);
self.selected_tab.select(SelectedTab::None)
}
Mode::PickerShowCrateInfo | Mode::PickerHideCrateInfo => {
self.search.enter_normal_mode();
self.selected_tab.select(SelectedTab::Search)
}
_ => {
self.search.enter_normal_mode();
self.selected_tab.select(SelectedTab::None)
}
}
}
fn switch_to_last_mode(&mut self) {
self.switch_mode(self.last_mode);
}
fn goto_next_tab(&mut self) {
match self.mode {
Mode::Summary => self.switch_mode(Mode::Search),
Mode::Search => self.switch_mode(Mode::Summary),
_ => self.switch_mode(Mode::Summary),
}
}
fn goto_previous_tab(&mut self) {
match self.mode {
Mode::Summary => self.switch_mode(Mode::Search),
Mode::Search => self.switch_mode(Mode::Summary),
_ => self.switch_mode(Mode::Summary),
}
}
fn show_error_popup(&mut self, message: String) {
error!("Error: {message}");
self.popup = Some((
PopupMessageWidget::new("Error".into(), message),
PopupMessageState::default(),
));
self.switch_mode(Mode::Popup);
}
fn show_info_popup(&mut self, info: String) {
info!("Info: {info}");
self.popup = Some((
PopupMessageWidget::new("Info".into(), info),
PopupMessageState::default(),
));
self.switch_mode(Mode::Popup);
}
fn close_popup(&mut self) {
self.popup = None;
if self.last_mode.is_popup() {
self.switch_mode(Mode::Search);
} else {
self.switch_mode(self.last_mode);
}
}
fn update_current_selection_crate_info(&mut self) {
self.search.clear_all_previous_task_details_handles();
self.search.request_crate_details();
}
fn show_full_crate_details(&mut self) {
self.search.clear_all_previous_task_details_handles();
self.search.request_full_crate_details();
}
fn store_total_number_of_crates(&mut self, n: u64) {
self.search.total_num_crates = Some(n)
}
fn open_docs_url_in_browser(&self) -> Result<()> {
if let Some(crate_response) = self.search.crate_response.lock().unwrap().clone() {
let name = crate_response.crate_data.name;
webbrowser::open(&format!("https://docs.rs/{name}/latest"))?;
}
Ok(())
}
fn open_summary_url_in_browser(&self) -> Result<()> {
if let Some(url) = self.summary.url() {
webbrowser::open(&url)?;
} else {
let _ = self.tx.send(Action::ShowErrorPopup(
"Unable to open URL in browser: No summary data loaded".into(),
));
}
Ok(())
}
fn open_crates_io_url_in_browser(&self) -> Result<()> {
if let Some(crate_response) = self.search.crate_response.lock().unwrap().clone() {
let name = crate_response.crate_data.name;
webbrowser::open(&format!("https://crates.io/crates/{name}"))?;
}
Ok(())
}
fn copy_cargo_add_command_to_clipboard(&self) -> Result<()> {
use copypasta::ClipboardProvider;
match copypasta::ClipboardContext::new() {
Ok(mut ctx) => {
if let Some(crate_response) = self.search.crate_response.lock().unwrap().clone() {
let msg = format!("cargo add {}", crate_response.crate_data.name);
let _ = match ctx.set_contents(msg.clone()).ok() {
Some(_) => self.tx.send(Action::ShowInfoPopup(format!(
"Copied to clipboard: `{msg}`"
))),
None => self.tx.send(Action::ShowErrorPopup(format!(
"Unable to copied to clipboard: `{msg}`"
))),
};
} else {
let _ = self
.tx
.send(Action::ShowErrorPopup("No selection made to copy".into()));
}
}
Err(err) => {
let _ = self.tx.send(Action::ShowErrorPopup(format!(
"Unable to create ClipboardContext: {err}"
)));
}
}
Ok(())
}
// Sets the frame count
fn update_frame_count(&mut self, frame: &mut Frame<'_>) {
self.frame_count = frame.count();
}
// Sets cursor for the prompt
fn update_cursor(&mut self, frame: &mut Frame<'_>) {
if self.mode.is_prompt()
&& let Some(cursor_position) = self.search.cursor_position()
{
frame.set_cursor_position(cursor_position);
}
}
fn events_widget(&self) -> Option<Block<'_>> {
if self.last_tick_key_events.is_empty() {
return None;
}
let title = format!(
"{:?}",
self.last_tick_key_events
.iter()
.map(key_event_to_string)
.collect::<Vec<_>>()
);
Some(
Block::default()
.title(title)
.title_position(ratatui::widgets::TitlePosition::Top)
.title_alignment(ratatui::layout::Alignment::Right),
)
}
fn loading(&self) -> bool {
self.loading_status.load(Ordering::SeqCst)
}
}
impl StatefulWidget for AppWidget {
type State = App;
fn render(self, area: Rect, buf: &mut Buffer, state: &mut Self::State) {
// Background color
Block::default()
.bg(config::get().color.base00)
.render(area, buf);
use Constraint::*;
let [header, main] = Layout::vertical([Length(1), Fill(1)]).areas(area);
let [tabs, events] = Layout::horizontal([Min(15), Fill(1)]).areas(header);
state.render_tabs(tabs, buf);
state.events_widget().render(events, buf);
let mode = if matches!(state.mode, Mode::Popup | Mode::Quit) {
state.last_mode
} else {
state.mode
};
match mode {
Mode::Summary => state.render_summary(main, buf),
Mode::Help => state.render_help(main, buf),
Mode::Search => state.render_search(main, buf),
Mode::Filter => state.render_search(main, buf),
Mode::PickerShowCrateInfo => state.render_search(main, buf),
Mode::PickerHideCrateInfo => state.render_search(main, buf),
Mode::Common => {}
Mode::Popup => {}
Mode::Quit => {}
};
if state.loading() {
Line::from(state.spinner())
.right_aligned()
.render(main, buf);
}
if let Some((popup, popup_state)) = &mut state.popup {
popup.render(area, buf, popup_state);
}
}
}
impl App {
fn render_tabs(&self, area: Rect, buf: &mut Buffer) {
use strum::IntoEnumIterator;
let titles = SelectedTab::iter().map(|tab| tab.title());
let highlight_style = SelectedTab::highlight_style();
let selected_tab_index = self.selected_tab as usize;
Tabs::new(titles)
.highlight_style(highlight_style)
.select(selected_tab_index)
.padding("", "")
.divider(" ")
.render(area, buf);
}
fn render_summary(&mut self, area: Rect, buf: &mut Buffer) {
let [main, status_bar] =
Layout::vertical([Constraint::Fill(0), Constraint::Length(1)]).areas(area);
SummaryWidget.render(main, buf, &mut self.summary);
self.render_status_bar(status_bar, buf);
}
fn render_help(&mut self, area: Rect, buf: &mut Buffer) {
let [main, status_bar] =
Layout::vertical([Constraint::Fill(0), Constraint::Length(1)]).areas(area);
HelpWidget.render(main, buf, &mut self.help);
self.render_status_bar(status_bar, buf);
}
fn render_search(&mut self, area: Rect, buf: &mut Buffer) {
let prompt_height = if self.mode.is_prompt() && self.search.is_prompt() {
5
} else {
0
};
let [main, prompt, status_bar] = Layout::vertical([
Constraint::Min(0),
Constraint::Length(prompt_height),
Constraint::Length(1),
])
.areas(area);
SearchPageWidget.render(main, buf, &mut self.search);
self.render_prompt(prompt, buf);
self.render_status_bar(status_bar, buf);
}
fn render_prompt(&mut self, area: Rect, buf: &mut Buffer) {
let p = SearchFilterPromptWidget::new(
self.mode,
self.search.sort.clone(),
&self.search.input,
self.search.search_mode,
);
p.render(area, buf, &mut self.search.prompt);
}
fn render_status_bar(&mut self, area: Rect, buf: &mut Buffer) {
let s = StatusBarWidget::new(
self.mode,
self.search.sort.clone(),
self.search.input.value().to_string(),
);
s.render(area, buf);
}
fn spinner(&self) -> String {
let spinner = ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'];
let index = self.frame_count % spinner.len();
let symbol = spinner[index];
symbol.into()
}
}
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/errors.rs | src/errors.rs | use color_eyre::eyre::Result;
use cfg_if::cfg_if;
pub fn install_hooks() -> Result<()> {
cfg_if! {
if #[cfg(debug_assertions)] {
install_better_panic();
} else {
human_panic::setup_panic!();
}
}
color_eyre::install()
}
#[allow(dead_code)]
fn install_better_panic() {
better_panic::Settings::auto()
.most_recent_first(false)
.verbosity(better_panic::Verbosity::Full)
.install()
}
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/command.rs | src/command.rs | use serde::{Deserialize, Serialize};
use strum::Display;
use crate::app::Mode;
#[derive(Debug, Display, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum Command {
Quit,
NextTab,
PreviousTab,
ClosePopup,
SwitchMode(Mode),
SwitchToLastMode,
IncrementPage,
DecrementPage,
NextSummaryMode,
PreviousSummaryMode,
ToggleSortBy { reload: bool, forward: bool },
ScrollBottom,
ScrollTop,
ScrollDown,
ScrollUp,
ScrollCrateInfoDown,
ScrollCrateInfoUp,
ScrollSearchResultsDown,
ScrollSearchResultsUp,
SubmitSearch,
ReloadData,
ToggleShowCrateInfo,
CopyCargoAddCommandToClipboard,
OpenDocsUrlInBrowser,
OpenCratesIOUrlInBrowser,
}
pub const HELP_COMMANDS: &[Command] = &[Command::SwitchToLastMode];
pub const PICKER_COMMANDS: &[Command] = &[
Command::SwitchMode(Mode::Help),
Command::SwitchMode(Mode::Summary),
Command::SwitchMode(Mode::Search),
Command::SwitchMode(Mode::Filter),
Command::ScrollUp,
Command::ScrollDown,
Command::ScrollCrateInfoUp,
Command::ScrollCrateInfoDown,
Command::ToggleSortBy {
reload: true,
forward: true,
},
Command::ToggleSortBy {
reload: true,
forward: false,
},
Command::ToggleSortBy {
reload: false,
forward: true,
},
Command::ToggleSortBy {
reload: false,
forward: false,
},
Command::IncrementPage,
Command::DecrementPage,
Command::ReloadData,
Command::ToggleShowCrateInfo,
Command::OpenDocsUrlInBrowser,
Command::OpenCratesIOUrlInBrowser,
Command::CopyCargoAddCommandToClipboard,
];
pub const SUMMARY_COMMANDS: &[Command] = &[
Command::Quit,
Command::ScrollDown,
Command::ScrollUp,
Command::PreviousSummaryMode,
Command::NextSummaryMode,
Command::SwitchMode(Mode::Help),
Command::SwitchMode(Mode::Search),
Command::SwitchMode(Mode::Filter),
];
pub const SEARCH_COMMANDS: &[Command] = &[
Command::SwitchMode(Mode::PickerHideCrateInfo),
Command::SubmitSearch,
Command::ToggleSortBy {
reload: false,
forward: true,
},
Command::ToggleSortBy {
reload: false,
forward: false,
},
Command::ToggleSortBy {
reload: true,
forward: true,
},
Command::ToggleSortBy {
reload: true,
forward: false,
},
Command::ScrollSearchResultsUp,
Command::ScrollSearchResultsDown,
Command::SwitchMode(Mode::PickerHideCrateInfo),
Command::ScrollSearchResultsUp,
Command::ScrollSearchResultsDown,
];
pub const ALL_COMMANDS: &[(Mode, &[Command])] = &[
(Mode::Help, HELP_COMMANDS),
(Mode::PickerHideCrateInfo, PICKER_COMMANDS),
(Mode::Summary, SUMMARY_COMMANDS),
(Mode::Search, SEARCH_COMMANDS),
];
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/crates_io_api_helper.rs | src/crates_io_api_helper.rs | use std::sync::{Arc, Mutex, atomic::AtomicBool};
use crates_io_api::CratesQuery;
use tokio::sync::mpsc::UnboundedSender;
use crate::action::Action;
use color_eyre::Result;
/// Represents the parameters needed for fetching crates asynchronously.
pub struct SearchParameters {
pub search: String,
pub page: u64,
pub page_size: u64,
pub crates: Arc<Mutex<Vec<crates_io_api::Crate>>>,
pub versions: Arc<Mutex<Vec<crates_io_api::Version>>>,
pub loading_status: Arc<AtomicBool>,
pub sort: crates_io_api::Sort,
pub tx: UnboundedSender<Action>,
}
/// Performs the actual search, and sends the result back through the
/// sender.
pub async fn request_search_results(params: &SearchParameters) -> Result<(), String> {
// Fetch crates using the created client with the error handling in one place.
let client = create_client()?;
let query = create_query(params);
let (crates, versions, total) = fetch_crates_and_metadata(client, query).await?;
update_state_with_fetched_crates(crates, versions, total, params);
Ok(())
}
/// Helper function to create client and fetch crates, wrapping both actions
/// into a result pattern.
fn create_client() -> Result<crates_io_api::AsyncClient, String> {
// Attempt to create the API client
crates_io_api::AsyncClient::new(
"crates-tui (crates-tui@kdheepak.com)",
std::time::Duration::from_millis(1000),
)
.map_err(|err| format!("API Client Error: {err:#?}"))
}
fn create_query(params: &SearchParameters) -> CratesQuery {
// Form the query and fetch the crates, passing along any errors.
crates_io_api::CratesQueryBuilder::default()
.search(¶ms.search)
.page(params.page)
.page_size(params.page_size)
.sort(params.sort.clone())
.build()
}
async fn fetch_crates_and_metadata(
client: crates_io_api::AsyncClient,
query: crates_io_api::CratesQuery,
) -> Result<(Vec<crates_io_api::Crate>, Vec<crates_io_api::Version>, u64), String> {
let page_result = client
.crates(query)
.await
.map_err(|err| format!("API Client Error: {err:#?}"))?;
let crates = page_result.crates;
let total = page_result.meta.total;
let versions = page_result.versions;
Ok((crates, versions, total))
}
/// Handles the result after fetching crates and sending corresponding
/// actions.
fn update_state_with_fetched_crates(
crates: Vec<crates_io_api::Crate>,
versions: Vec<crates_io_api::Version>,
total: u64,
params: &SearchParameters,
) {
// Lock and update the shared state container
let mut app_crates = params.crates.lock().unwrap();
app_crates.clear();
app_crates.extend(crates);
let mut app_versions = params.versions.lock().unwrap();
app_versions.clear();
app_versions.extend(versions);
// After a successful fetch, send relevant actions based on the result
if app_crates.is_empty() {
let _ = params.tx.send(Action::ShowErrorPopup(format!(
"Could not find any crates with query `{}`.",
params.search
)));
} else {
let _ = params.tx.send(Action::StoreTotalNumberOfCrates(total));
let _ = params.tx.send(Action::Tick);
let _ = params.tx.send(Action::ScrollDown);
}
}
// Performs the async fetch of crate details.
pub async fn request_crate_details(
crate_name: &str,
crate_info: Arc<Mutex<Option<crates_io_api::CrateResponse>>>,
) -> Result<(), String> {
let client = create_client()?;
let crate_data = client
.get_crate(crate_name)
.await
.map_err(|err| format!("Error fetching crate details: {err:#?}"))?;
*crate_info.lock().unwrap() = Some(crate_data);
Ok(())
}
// Performs the async fetch of crate details.
pub async fn request_full_crate_details(
crate_name: &str,
full_crate_info: Arc<Mutex<Option<crates_io_api::FullCrate>>>,
) -> Result<(), String> {
let client = create_client()?;
let full_crate_data = client
.full_crate(crate_name, false)
.await
.map_err(|err| format!("Error fetching crate details: {err:#?}"))?;
*full_crate_info.lock().unwrap() = Some(full_crate_data);
Ok(())
}
pub async fn request_summary(
summary: Arc<Mutex<Option<crates_io_api::Summary>>>,
) -> Result<(), String> {
let client = create_client()?;
let summary_data = client
.summary()
.await
.map_err(|err| format!("Error fetching crate details: {err:#?}"))?;
*summary.lock().unwrap() = Some(summary_data);
Ok(())
}
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/cli.rs | src/cli.rs | use std::path::PathBuf;
use clap::{
Parser,
builder::{Styles, styling::AnsiColor},
};
use serde::Serialize;
use serde_with::{NoneAsEmptyString, serde_as, skip_serializing_none};
use tracing::level_filters::LevelFilter;
pub fn version() -> String {
let git_describe = if env!("VERGEN_GIT_DESCRIBE") != "VERGEN_IDEMPOTENT_OUTPUT" {
format!("-{}", env!("VERGEN_GIT_DESCRIBE"))
} else {
"".into()
};
let version_message = format!(
"{}{} ({})",
env!("CARGO_PKG_VERSION"),
git_describe,
env!("VERGEN_BUILD_DATE"),
);
let author = clap::crate_authors!();
format!(
"\
{version_message}
Authors: {author}"
)
}
const HELP_STYLES: Styles = Styles::styled()
.header(AnsiColor::Blue.on_default().bold())
.usage(AnsiColor::Blue.on_default().bold())
.literal(AnsiColor::White.on_default())
.placeholder(AnsiColor::Green.on_default());
/// Command line arguments.
///
/// Implements Serialize so that we can use it as a source for Figment
/// configuration.
#[serde_as]
#[skip_serializing_none]
#[derive(Debug, Default, Parser, Serialize)]
#[command(author, version = version(), about, long_about = None, styles = HELP_STYLES)]
pub struct Cli {
/// Initial Query
#[arg(value_name = "QUERY")]
pub query: Option<String>,
/// Print default configuration
#[arg(long)]
pub print_default_config: bool,
/// A path to a crates-tui configuration file.
#[arg(
short,
long,
value_name = "FILE",
default_value = get_default_config_path()
)]
pub config_file: Option<PathBuf>,
/// A path to a base16 color file.
#[arg(long, value_name = "FILE", default_value = get_default_color_file())]
pub color_file: Option<PathBuf>,
/// Frame rate, i.e. number of frames per second
#[arg(short, long, value_name = "FLOAT", default_value_t = 15.0)]
pub frame_rate: f64,
/// The directory to use for storing application data.
#[arg(long, value_name = "DIR", default_value = get_default_data_dir())]
pub data_dir: Option<PathBuf>,
/// The log level to use. Valid values are: error, warn, info, debug, trace, off.
///
/// [default: info]
#[arg(long, value_name = "LEVEL", alias = "log")]
#[serde_as(as = "NoneAsEmptyString")]
pub log_level: Option<LevelFilter>,
}
fn get_default_config_path() -> String {
crate::config::default_config_file()
.to_string_lossy()
.into_owned()
}
fn get_default_color_file() -> String {
crate::config::default_color_file()
.to_string_lossy()
.into_owned()
}
fn get_default_data_dir() -> String {
crate::config::default_data_dir()
.to_string_lossy()
.into_owned()
}
pub fn parse() -> Cli {
Cli::parse()
}
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/serde_helper.rs | src/serde_helper.rs | pub mod keybindings {
use std::collections::HashMap;
use color_eyre::eyre::Result;
use crossterm::event::{KeyCode, KeyEvent, KeyModifiers};
use derive_deref::{Deref, DerefMut};
use itertools::Itertools;
use serde::{Deserialize, Serialize, Serializer, de::Deserializer};
use crate::{action::Action, app::Mode, command::Command};
#[derive(Clone, Debug, Default, Deref, DerefMut)]
pub struct KeyBindings(pub HashMap<Mode, HashMap<Vec<KeyEvent>, Command>>);
impl KeyBindings {
pub fn command_to_action(&self, command: Command) -> Action {
match command {
Command::Quit => Action::Quit,
Command::NextTab => Action::NextTab,
Command::PreviousTab => Action::PreviousTab,
Command::ClosePopup => Action::ClosePopup,
Command::SwitchMode(m) => Action::SwitchMode(m),
Command::SwitchToLastMode => Action::SwitchToLastMode,
Command::IncrementPage => Action::IncrementPage,
Command::DecrementPage => Action::DecrementPage,
Command::NextSummaryMode => Action::NextSummaryMode,
Command::PreviousSummaryMode => Action::PreviousSummaryMode,
Command::ToggleSortBy { reload, forward } => {
Action::ToggleSortBy { reload, forward }
}
Command::ScrollBottom => Action::ScrollBottom,
Command::ScrollTop => Action::ScrollTop,
Command::ScrollDown => Action::ScrollDown,
Command::ScrollUp => Action::ScrollUp,
Command::ScrollCrateInfoDown => Action::ScrollCrateInfoDown,
Command::ScrollCrateInfoUp => Action::ScrollCrateInfoUp,
Command::ScrollSearchResultsDown => Action::ScrollSearchResultsDown,
Command::ScrollSearchResultsUp => Action::ScrollSearchResultsUp,
Command::SubmitSearch => Action::SubmitSearch,
Command::ReloadData => Action::ReloadData,
Command::ToggleShowCrateInfo => Action::ToggleShowCrateInfo,
Command::CopyCargoAddCommandToClipboard => Action::CopyCargoAddCommandToClipboard,
Command::OpenDocsUrlInBrowser => Action::OpenDocsUrlInBrowser,
Command::OpenCratesIOUrlInBrowser => Action::OpenCratesIOUrlInBrowser,
}
}
#[allow(dead_code)]
pub fn insert(&mut self, mode: Mode, key_events: &[KeyEvent], command: Command) {
// Convert the slice of `KeyEvent`(s) to a `Vec`.
let key_events_vec = key_events.to_vec();
// Retrieve or create the inner `HashMap` corresponding to the mode.
let bindings_for_mode = self.0.entry(mode).or_default();
// Insert the `Command` into the inner `HashMap` using the key events `Vec` as
// the key.
bindings_for_mode.insert(key_events_vec, command);
}
pub fn event_to_command(&self, mode: Mode, key_events: &[KeyEvent]) -> Option<Command> {
if key_events.is_empty() {
None
} else if let Some(Some(command)) = self.0.get(&mode).map(|kb| kb.get(key_events)) {
Some(*command)
} else {
self.event_to_command(mode, &key_events[1..])
}
}
pub fn get_keybindings_for_command(
&self,
mode: Mode,
command: Command,
) -> Vec<Vec<KeyEvent>> {
let bindings_for_mode = self.0.get(&mode).cloned().unwrap_or_default();
bindings_for_mode
.into_iter()
.filter(|(_, v)| *v == command)
.map(|(k, _)| k)
.collect_vec()
}
pub fn get_config_for_command(&self, mode: Mode, command: Command) -> Vec<String> {
self.get_keybindings_for_command(mode, command)
.iter()
.map(|key_events| {
key_events
.iter()
.map(key_event_to_string)
.collect_vec()
.join("")
})
.collect_vec()
}
}
impl<'de> Deserialize<'de> for KeyBindings {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let parsed_map = HashMap::<Mode, HashMap<String, Command>>::deserialize(deserializer)?;
let keybindings = parsed_map
.into_iter()
.map(|(mode, inner_map)| {
let converted_inner_map = inner_map
.into_iter()
.map(|(key_str, cmd)| (parse_key_sequence(&key_str).unwrap(), cmd))
.collect();
(mode, converted_inner_map)
})
.collect();
Ok(KeyBindings(keybindings))
}
}
impl Serialize for KeyBindings {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut serialized_map: HashMap<Mode, HashMap<String, Command>> = HashMap::new();
for (mode, key_event_map) in self.0.iter() {
let mut string_event_map = HashMap::new();
for (key_events, command) in key_event_map {
let key_string = key_events
.iter()
.map(|key_event| format!("<{}>", key_event_to_string(key_event)))
.collect::<Vec<String>>()
.join("");
string_event_map.insert(key_string, *command);
}
serialized_map.insert(*mode, string_event_map);
}
serialized_map.serialize(serializer)
}
}
fn parse_key_event(raw: &str) -> Result<KeyEvent, String> {
let (remaining, modifiers) = extract_modifiers(raw);
parse_key_code_with_modifiers(remaining, modifiers)
}
fn extract_modifiers(raw: &str) -> (&str, KeyModifiers) {
let mut modifiers = KeyModifiers::empty();
let mut current = raw;
loop {
match current {
rest if rest.to_lowercase().starts_with("ctrl-") => {
modifiers.insert(KeyModifiers::CONTROL);
current = &rest[5..];
}
rest if rest.to_lowercase().starts_with("alt-") => {
modifiers.insert(KeyModifiers::ALT);
current = &rest[4..];
}
rest if rest.to_lowercase().starts_with("shift-") => {
modifiers.insert(KeyModifiers::SHIFT);
current = &rest[6..];
}
_ => break, // break out of the loop if no known prefix is detected
};
}
(current, modifiers)
}
// FIXME - seems excessively verbose. Use strum to simplify?
fn parse_key_code_with_modifiers(
raw: &str,
mut modifiers: KeyModifiers,
) -> Result<KeyEvent, String> {
let c = match raw.to_lowercase().as_str() {
"esc" => KeyCode::Esc,
"enter" => KeyCode::Enter,
"left" => KeyCode::Left,
"right" => KeyCode::Right,
"up" => KeyCode::Up,
"down" => KeyCode::Down,
"home" => KeyCode::Home,
"end" => KeyCode::End,
"pageup" => KeyCode::PageUp,
"pagedown" => KeyCode::PageDown,
"backtab" => {
modifiers.insert(KeyModifiers::SHIFT);
KeyCode::BackTab
}
"backspace" => KeyCode::Backspace,
"delete" => KeyCode::Delete,
"insert" => KeyCode::Insert,
"f1" => KeyCode::F(1),
"f2" => KeyCode::F(2),
"f3" => KeyCode::F(3),
"f4" => KeyCode::F(4),
"f5" => KeyCode::F(5),
"f6" => KeyCode::F(6),
"f7" => KeyCode::F(7),
"f8" => KeyCode::F(8),
"f9" => KeyCode::F(9),
"f10" => KeyCode::F(10),
"f11" => KeyCode::F(11),
"f12" => KeyCode::F(12),
"space" => KeyCode::Char(' '),
"hyphen" => KeyCode::Char('-'),
"minus" => KeyCode::Char('-'),
"tab" => KeyCode::Tab,
c if c.len() == 1 => {
let mut c = raw.chars().next().unwrap();
if modifiers.contains(KeyModifiers::SHIFT) {
c = c.to_ascii_uppercase();
}
KeyCode::Char(c)
}
_ => return Err(format!("Unable to parse {raw}")),
};
Ok(KeyEvent::new(c, modifiers))
}
pub fn key_event_to_string(key_event: &KeyEvent) -> String {
let char;
let key_code = match key_event.code {
KeyCode::Backspace => "Backspace",
KeyCode::Enter => "Enter",
KeyCode::Left => "Left",
KeyCode::Right => "Right",
KeyCode::Up => "Up",
KeyCode::Down => "Down",
KeyCode::Home => "Home",
KeyCode::End => "End",
KeyCode::PageUp => "PageUp",
KeyCode::PageDown => "PageDown",
KeyCode::Tab => "Tab",
KeyCode::BackTab => "Backtab",
KeyCode::Delete => "Delete",
KeyCode::Insert => "Insert",
KeyCode::F(c) => {
char = format!("F({c})");
&char
}
KeyCode::Char(' ') => "Space",
KeyCode::Char(c) => {
char = c.to_string();
&char
}
KeyCode::Esc => "Esc",
KeyCode::Null => "",
KeyCode::CapsLock => "",
KeyCode::Menu => "",
KeyCode::ScrollLock => "",
KeyCode::Media(_) => "",
KeyCode::NumLock => "",
KeyCode::PrintScreen => "",
KeyCode::Pause => "",
KeyCode::KeypadBegin => "",
KeyCode::Modifier(_) => "",
};
let mut modifiers = Vec::with_capacity(3);
if key_event.modifiers.intersects(KeyModifiers::CONTROL) {
modifiers.push("Ctrl");
}
if key_event.modifiers.intersects(KeyModifiers::SHIFT) {
modifiers.push("Shift");
}
if key_event.modifiers.intersects(KeyModifiers::ALT) {
modifiers.push("Alt");
}
let mut key = modifiers.join("-");
if !key.is_empty() {
key.push('-');
}
key.push_str(key_code);
key
}
pub fn parse_key_sequence(raw: &str) -> Result<Vec<KeyEvent>, String> {
if raw.chars().filter(|c| *c == '>').count() != raw.chars().filter(|c| *c == '<').count() {
return Err(format!("Unable to parse `{raw}`"));
}
let raw = if !raw.contains("><") {
let raw = raw.strip_prefix('<').unwrap_or(raw);
raw.strip_prefix('>').unwrap_or(raw)
} else {
raw
};
let sequences = raw
.split("><")
.map(|seq| {
if let Some(s) = seq.strip_prefix('<') {
s
} else if let Some(s) = seq.strip_suffix('>') {
s
} else {
seq
}
})
.collect::<Vec<_>>();
sequences.into_iter().map(parse_key_event).collect()
}
}
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/widgets.rs | src/widgets.rs | pub mod crate_info_table;
pub mod help;
pub mod popup_message;
pub mod search_filter_prompt;
pub mod search_page;
pub mod search_results;
pub mod status_bar;
pub mod summary;
pub mod tabs;
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/main.rs | src/main.rs | mod action;
mod app;
mod cli;
mod command;
mod config;
mod crates_io_api_helper;
mod errors;
mod events;
mod logging;
mod serde_helper;
mod widgets;
use app::App;
fn main() -> color_eyre::Result<()> {
let cli = cli::parse();
config::init(&cli)?;
logging::init()?;
errors::install_hooks()?;
if cli.print_default_config {
println!("{}", toml::to_string_pretty(config::get())?);
return Ok(());
}
let mut app = App::new(cli.query);
ratatui::run(|tui| app.run(tui))
}
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/events.rs | src/events.rs | use core::fmt;
use std::{pin::Pin, time::Duration};
use crossterm::event::{Event as CrosstermEvent, *};
use futures::{Stream, StreamExt};
use serde::{Deserialize, Serialize};
use tokio::time::interval;
use tokio_stream::{StreamMap, wrappers::IntervalStream};
use crate::config;
pub struct Events {
streams: StreamMap<StreamName, Pin<Box<dyn Stream<Item = Event>>>>,
}
impl fmt::Debug for Events {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Events").finish_non_exhaustive()
}
}
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
enum StreamName {
Ticks,
KeyRefresh,
Render,
Crossterm,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum Event {
Init,
Quit,
Error,
Closed,
Tick,
KeyRefresh,
Render,
Crossterm(CrosstermEvent),
}
impl Events {
pub fn new() -> Self {
Self {
streams: StreamMap::from_iter([
(StreamName::Ticks, tick_stream()),
(StreamName::KeyRefresh, key_refresh_stream()),
(StreamName::Render, render_stream()),
(StreamName::Crossterm, crossterm_stream()),
]),
}
}
pub async fn next(&mut self) -> Option<Event> {
self.streams.next().await.map(|(_name, event)| event)
}
}
fn tick_stream() -> Pin<Box<dyn Stream<Item = Event>>> {
let tick_delay = Duration::from_secs_f64(1.0 / config::get().tick_rate);
let tick_interval = interval(tick_delay);
Box::pin(IntervalStream::new(tick_interval).map(|_| Event::Tick))
}
fn key_refresh_stream() -> Pin<Box<dyn Stream<Item = Event>>> {
let key_refresh_delay = Duration::from_secs_f64(1.0 / config::get().key_refresh_rate);
let key_refresh_interval = interval(key_refresh_delay);
Box::pin(IntervalStream::new(key_refresh_interval).map(|_| Event::KeyRefresh))
}
fn render_stream() -> Pin<Box<dyn Stream<Item = Event>>> {
let render_delay = Duration::from_secs_f64(1.0 / config::get().frame_rate);
let render_interval = interval(render_delay);
Box::pin(IntervalStream::new(render_interval).map(|_| Event::Render))
}
fn crossterm_stream() -> Pin<Box<dyn Stream<Item = Event>>> {
Box::pin(EventStream::new().fuse().filter_map(|event| async move {
match event {
// Ignore key release / repeat events
Ok(CrosstermEvent::Key(key)) if key.kind == KeyEventKind::Release => None,
Ok(event) => Some(Event::Crossterm(event)),
Err(_) => Some(Event::Error),
}
}))
}
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/logging.rs | src/logging.rs | use color_eyre::eyre::Result;
use tracing::level_filters::LevelFilter;
use tracing_error::ErrorLayer;
use tracing_subscriber::{
self, prelude::__tracing_subscriber_SubscriberExt, util::SubscriberInitExt,
};
use crate::config;
pub fn init() -> Result<()> {
let config = config::get();
let directory = config.data_dir.clone();
std::fs::create_dir_all(directory.clone())?;
let log_file = format!("{}.log", env!("CARGO_PKG_NAME"));
let log_path = directory.join(log_file);
let log_file = std::fs::File::create(log_path)?;
let file_subscriber = tracing_subscriber::fmt::layer()
.with_file(true)
.with_line_number(true)
.with_writer(log_file)
.with_target(false)
.with_ansi(false);
tracing_subscriber::registry()
.with(file_subscriber)
.with(ErrorLayer::default())
.with(
tracing_subscriber::filter::EnvFilter::from_default_env()
.add_directive("tokio_util=off".parse().unwrap())
.add_directive("hyper=off".parse().unwrap())
.add_directive("reqwest=off".parse().unwrap())
.add_directive(config.log_level.unwrap_or(LevelFilter::OFF).into()),
)
.init();
Ok(())
}
/// Similar to the `std::dbg!` macro, but generates `tracing` events rather
/// than printing to stdout.
///
/// By default, the verbosity level for the generated events is `DEBUG`, but
/// this can be customized.
///
/// Originally from https://github.com/tokio-rs/tracing/blob/baeba47cdaac9ed32d5ef3f6f1d7b0cc71ffdbdf/tracing-macros/src/lib.rs#L27
#[macro_export]
macro_rules! trace_dbg {
(target: $target:expr, level: $level:expr, $ex:expr) => {{
match $ex {
value => {
tracing::event!(target: $target, $level, ?value, stringify!($ex));
value
}
}
}};
(level: $level:expr, $ex:expr) => {
trace_dbg!(target: module_path!(), level: $level, $ex)
};
(target: $target:expr, $ex:expr) => {
trace_dbg!(target: $target, level: tracing::Level::DEBUG, $ex)
};
($ex:expr) => {
trace_dbg!(level: tracing::Level::DEBUG, $ex)
};
}
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/action.rs | src/action.rs | use serde::{Deserialize, Serialize};
use strum::Display;
use crate::app::Mode;
#[derive(Debug, Display, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum Action {
Tick,
Render,
KeyRefresh,
Resize(u16, u16),
Suspend,
Resume,
Quit,
Init { query: Option<String> },
Refresh,
NextTab,
PreviousTab,
ShowErrorPopup(String),
ShowInfoPopup(String),
ClosePopup,
Help,
GetCrates,
SwitchMode(Mode),
SwitchToLastMode,
IncrementPage,
DecrementPage,
NextSummaryMode,
PreviousSummaryMode,
ToggleSortBy { reload: bool, forward: bool },
ScrollBottom,
ScrollTop,
ScrollDown,
ScrollUp,
ScrollCrateInfoDown,
ScrollCrateInfoUp,
ScrollSearchResultsDown,
ScrollSearchResultsUp,
SubmitSearch,
UpdateSearchTableResults,
UpdateSummary,
UpdateCurrentSelectionCrateInfo,
UpdateCurrentSelectionSummary,
ReloadData,
ToggleShowCrateInfo,
StoreTotalNumberOfCrates(u64),
ClearTaskDetailsHandle(String),
CopyCargoAddCommandToClipboard,
OpenDocsUrlInBrowser,
OpenCratesIOUrlInBrowser,
ShowFullCrateInfo,
}
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/widgets/tabs.rs | src/widgets/tabs.rs | use ratatui::{prelude::*, widgets::*};
use strum::{Display, EnumIter, FromRepr};
use crate::config;
#[derive(Debug, Default, Clone, Copy, Display, FromRepr, EnumIter)]
pub enum SelectedTab {
#[default]
Summary,
Search,
None,
}
impl SelectedTab {
pub fn select(&mut self, selected_tab: SelectedTab) {
*self = selected_tab
}
pub fn highlight_style() -> Style {
Style::default()
.fg(config::get().color.base00)
.bg(config::get().color.base0a)
.bold()
}
}
impl Widget for &SelectedTab {
fn render(self, area: Rect, buf: &mut Buffer) {
match self {
SelectedTab::Summary => self.render_tab_summary(area, buf),
SelectedTab::Search => self.render_tab_search(area, buf),
SelectedTab::None => (),
}
}
}
impl SelectedTab {
pub fn title(&self) -> Line<'static> {
match self {
SelectedTab::None => "".into(),
_ => format!(" {self} ")
.fg(config::get().color.base0d)
.bg(config::get().color.base00)
.into(),
}
}
fn render_tab_summary(&self, area: Rect, buf: &mut Buffer) {
Paragraph::new("Summary")
.block(self.block())
.render(area, buf)
}
fn render_tab_search(&self, area: Rect, buf: &mut Buffer) {
Paragraph::new("Search")
.block(self.block())
.render(area, buf)
}
fn block(&self) -> Block<'static> {
Block::default()
.borders(Borders::ALL)
.border_set(symbols::border::PLAIN)
.padding(Padding::horizontal(1))
.border_style(config::get().color.base03)
}
}
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/widgets/status_bar.rs | src/widgets/status_bar.rs | use ratatui::{prelude::*, widgets::*};
use crate::{app::Mode, command::Command, config};
pub struct StatusBarWidget {
text: String,
mode: Mode,
sort: crates_io_api::Sort,
}
impl StatusBarWidget {
pub fn new(mode: Mode, sort: crates_io_api::Sort, text: String) -> Self {
Self { text, mode, sort }
}
}
impl Widget for StatusBarWidget {
fn render(self, area: Rect, buf: &mut Buffer) {
self.status().render(area, buf);
}
}
impl StatusBarWidget {
fn input_text(&self) -> Line<'_> {
if self.mode.is_picker() {
Line::from(vec![
self.text.clone().into(),
" (".into(),
format!("{:?}", self.sort.clone()).fg(config::get().color.base0d),
")".into(),
])
} else {
"".into()
}
}
fn status(&self) -> Block<'_> {
let line = if self.mode.is_filter() {
let help = config::get()
.key_bindings
.get_config_for_command(self.mode, Command::SwitchMode(Mode::Help))
.into_iter()
.next()
.unwrap_or_default();
vec![
"Enter".bold(),
" to submit, ".into(),
help.bold(),
" for help".into(),
]
} else if self.mode.is_search() {
let toggle_sort = config::get()
.key_bindings
.get_config_for_command(
Mode::Search,
Command::ToggleSortBy {
reload: false,
forward: true,
},
)
.into_iter()
.next()
.unwrap_or_default();
let help = config::get()
.key_bindings
.get_config_for_command(self.mode, Command::SwitchMode(Mode::Help))
.into_iter()
.next()
.unwrap_or_default();
vec![
toggle_sort.bold(),
" to toggle sort, ".into(),
"Enter".bold(),
" to submit, ".into(),
help.bold(),
" for help".into(),
]
} else if self.mode.is_summary() {
let help = config::get()
.key_bindings
.get_config_for_command(self.mode, Command::SwitchMode(Mode::Help))
.into_iter()
.next()
.unwrap_or_default();
let open_in_browser = config::get()
.key_bindings
.get_config_for_command(self.mode, Command::OpenCratesIOUrlInBrowser)
.into_iter()
.next()
.unwrap_or_default();
let search = config::get()
.key_bindings
.get_config_for_command(Mode::Common, Command::NextTab)
.into_iter()
.next()
.unwrap_or_default();
vec![
open_in_browser.bold(),
" to open in browser, ".into(),
search.bold(),
" to enter search, ".into(),
help.bold(),
" for help".into(),
]
} else if self.mode.is_help() {
vec!["ESC".bold(), " to return".into()]
} else {
let search = config::get()
.key_bindings
.get_config_for_command(self.mode, Command::SwitchMode(Mode::Search))
.into_iter()
.next()
.unwrap_or_default();
let filter = config::get()
.key_bindings
.get_config_for_command(self.mode, Command::SwitchMode(Mode::Filter))
.into_iter()
.next()
.unwrap_or_default();
let help = config::get()
.key_bindings
.get_config_for_command(self.mode, Command::SwitchMode(Mode::Help))
.into_iter()
.next()
.unwrap_or_default();
vec![
search.bold(),
" to search, ".into(),
filter.bold(),
" to filter, ".into(),
help.bold(),
" for help".into(),
]
};
let border_color = match self.mode {
Mode::Search => config::get().color.base0a,
Mode::Filter => config::get().color.base0b,
_ => config::get().color.base06,
};
Block::default()
.title(Line::from(line).right_aligned())
.title(self.input_text().left_aligned())
.fg(config::get().color.base05)
.border_style(border_color)
}
}
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/widgets/search_results.rs | src/widgets/search_results.rs | use crates_io_api::Crate;
use itertools::Itertools;
use num_format::{Locale, ToFormattedString};
use ratatui::{prelude::*, widgets::*};
use unicode_width::UnicodeWidthStr;
use crate::config;
#[derive(Debug, Default)]
pub struct SearchResults {
pub crates: Vec<crates_io_api::Crate>,
pub table_state: TableState,
pub scrollbar_state: ScrollbarState,
}
impl SearchResults {
pub fn selected_crate_name(&self) -> Option<String> {
self.selected()
.and_then(|index| self.crates.get(index))
.filter(|krate| !krate.name.is_empty())
.map(|krate| krate.name.clone())
}
pub fn selected(&self) -> Option<usize> {
self.table_state.selected()
}
pub fn content_length(&mut self, content_length: usize) {
self.scrollbar_state = self.scrollbar_state.content_length(content_length)
}
pub fn select(&mut self, index: Option<usize>) {
self.table_state.select(index)
}
pub fn scroll_next(&mut self) {
let wrap_index = self.crates.len().max(1);
let next = self
.table_state
.selected()
.map_or(0, |i| (i + 1) % wrap_index);
self.scroll_to(next);
}
pub fn scroll_previous(&mut self) {
let last = self.crates.len().saturating_sub(1);
let wrap_index = self.crates.len().max(1);
let previous = self
.table_state
.selected()
.map_or(last, |i| (i + last) % wrap_index);
self.scroll_to(previous);
}
pub fn scroll_to_top(&mut self) {
self.scroll_to(0);
}
pub fn scroll_to_bottom(&mut self) {
let bottom = self.crates.len().saturating_sub(1);
self.scroll_to(bottom);
}
fn scroll_to(&mut self, index: usize) {
if self.crates.is_empty() {
self.table_state.select(None)
} else {
self.table_state.select(Some(index));
self.scrollbar_state = self.scrollbar_state.position(index);
}
}
}
pub struct SearchResultsWidget {
highlight: bool,
}
impl SearchResultsWidget {
pub fn new(highlight: bool) -> Self {
Self { highlight }
}
}
impl StatefulWidget for SearchResultsWidget {
type State = SearchResults;
fn render(self, area: Rect, buf: &mut Buffer, state: &mut Self::State) {
use Constraint::*;
const TABLE_HEADER_HEIGHT: u16 = 3;
const COLUMN_SPACING: u16 = 3;
let [table_area, scrollbar_area] = Layout::horizontal([Fill(1), Length(1)]).areas(area);
let [_, scrollbar_area] =
Layout::vertical([Length(TABLE_HEADER_HEIGHT), Fill(1)]).areas(scrollbar_area);
Scrollbar::default()
.track_symbol(Some(" "))
.thumb_symbol("▐")
.begin_symbol(None)
.end_symbol(None)
.track_style(config::get().color.base06)
.render(scrollbar_area, buf, &mut state.scrollbar_state);
let highlight_symbol = if self.highlight {
" █ "
} else {
" \u{2022} "
};
let column_widths = [Max(20), Fill(1), Max(11)];
// Emulate the table layout calculations using Layout so we can render the vertical borders
// in the space between the columns and can wrap the description field based on the actual
// width of the description column
let highlight_symbol_width = highlight_symbol.width() as u16;
let [_highlight_column, table_columns] =
Layout::horizontal([Length(highlight_symbol_width), Fill(1)]).areas(table_area);
let column_layout = Layout::horizontal(column_widths).spacing(COLUMN_SPACING);
let [_name_column, description_column, _downloads_column] =
column_layout.areas(table_columns);
let spacers: [Rect; 4] = column_layout.spacers(table_columns);
let vertical_pad = |line| Text::from(vec!["".into(), line, "".into()]);
let header_cells = ["Name", "Description", "Downloads"]
.map(|h| h.bold().into())
.map(vertical_pad);
let header = Row::new(header_cells)
.fg(config::get().color.base05)
.bg(config::get().color.base00)
.height(TABLE_HEADER_HEIGHT);
let description_column_width = description_column.width as usize;
let selected_index = state.selected().unwrap_or_default();
let rows = state
.crates
.iter()
.enumerate()
.map(|(index, krate)| {
row_from_crate(krate, description_column_width, index, selected_index)
})
.collect_vec();
let table = Table::new(rows, column_widths)
.header(header)
.column_spacing(COLUMN_SPACING)
.highlight_symbol(vertical_pad(highlight_symbol.into()))
.row_highlight_style(config::get().color.base05)
.highlight_spacing(HighlightSpacing::Always);
StatefulWidget::render(table, table_area, buf, &mut state.table_state);
render_table_borders(state, spacers, buf);
}
}
fn row_from_crate(
krate: &Crate,
description_column_width: usize,
index: usize,
selected_index: usize,
) -> Row<'_> {
let mut description = textwrap::wrap(
&krate.description.clone().unwrap_or_default(),
description_column_width,
)
.iter()
.map(|s| Line::from(s.to_string()))
.collect_vec();
description.insert(0, "".into());
description.push("".into());
let vertical_padded = |line| Text::from(vec!["".into(), line, "".into()]);
let crate_name = Line::from(krate.name.clone());
let downloads = Line::from(krate.downloads.to_formatted_string(&Locale::en)).right_aligned();
let description_height = description.len() as u16;
Row::new([
vertical_padded(crate_name),
Text::from(description),
vertical_padded(downloads),
])
.height(description_height)
.fg(config::get().color.base05)
.bg(bg_color(index, selected_index))
}
fn bg_color(index: usize, selected_index: usize) -> Color {
if index == selected_index {
config::get().color.base02
} else {
match index % 2 {
0 => config::get().color.base00,
1 => config::get().color.base01,
_ => unreachable!("mod 2 is always 0 or 1"),
}
}
}
fn render_table_borders(state: &mut SearchResults, spacers: [Rect; 4], buf: &mut Buffer) {
// only render margins when there's items in the table
if !state.crates.is_empty() {
// don't render margin for the first column
for space in spacers.iter().skip(1).copied() {
Text::from(
std::iter::once(" ".into())
.chain(std::iter::once(" ".into()))
.chain(std::iter::once(" ".into()))
.chain(std::iter::repeat_n(
" │".fg(config::get().color.base0f),
space.height as usize,
))
.map(Line::from)
.collect_vec(),
)
.render(space, buf);
}
}
}
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/widgets/help.rs | src/widgets/help.rs | use itertools::Itertools;
use ratatui::{prelude::*, widgets::*};
use crate::{
app::Mode,
command::{ALL_COMMANDS, Command},
config,
};
#[derive(Default, Debug, Clone)]
pub struct Help {
pub state: TableState,
pub mode: Option<Mode>,
}
impl Help {
pub fn new(state: TableState, mode: Option<Mode>) -> Self {
Self { state, mode }
}
pub fn scroll_up(&mut self) {
let i = self.state.selected().map_or(0, |i| i.saturating_sub(1));
self.state.select(Some(i));
}
pub fn scroll_down(&mut self) {
let i = self.state.selected().map_or(0, |i| i.saturating_add(1));
self.state.select(Some(i));
}
}
pub struct HelpWidget;
const HIGHLIGHT_SYMBOL: &str = "█ ";
impl StatefulWidget for &HelpWidget {
type State = Help;
fn render(self, area: Rect, buf: &mut Buffer, state: &mut Self::State) {
use Constraint::*;
let [_, area] = Layout::vertical([Min(0), Percentage(90)]).areas(area);
let [_, area, _] = Layout::horizontal([Min(0), Percentage(85), Min(0)]).areas(area);
let all_key_bindings = all_key_bindings();
select_by_mode(state, &all_key_bindings);
let widths = [Max(10), Max(10), Min(0)];
let header = Row::new(["Mode", "Keys", "Command"].map(|h| Line::from(h.bold())))
.fg(config::get().color.base05)
.bg(config::get().color.base00);
let table = Table::new(into_rows(&all_key_bindings), widths)
.header(header)
.column_spacing(5)
.highlight_symbol(HIGHLIGHT_SYMBOL)
.row_highlight_style(config::get().color.base05)
.highlight_spacing(HighlightSpacing::Always);
StatefulWidget::render(table, area, buf, &mut state.state);
}
}
/// Returns all key bindings for all commands and modes
///
/// The result is a vector of tuples containing the mode, command and key bindings joined by a comma
fn all_key_bindings() -> Vec<(Mode, Command, String)> {
ALL_COMMANDS
.iter()
.flat_map(|(mode, commands)| {
commands.iter().map(|command| {
let key_bindings = key_bindings_for_command(*mode, *command);
let key_bindings = key_bindings.join(", ");
(*mode, *command, key_bindings)
})
})
.collect_vec()
}
/// Returns the key bindings for a specific command and mode
fn key_bindings_for_command(mode: Mode, command: Command) -> Vec<String> {
config::get()
.key_bindings
.get_config_for_command(mode, command)
}
/// updates the selected index based on the current mode
///
/// Only changes the selected index for the first render
fn select_by_mode(state: &mut Help, rows: &[(Mode, Command, String)]) {
if let Some(mode) = state.mode {
tracing::debug!("{:?}", mode);
let selected = rows
.iter()
.find_position(|(m, _, _)| *m == mode)
.map(|(index, _)| index)
.unwrap_or_default();
*state.state.selected_mut() = Some(selected);
*state.state.offset_mut() = selected.saturating_sub(2);
// Reset the mode after the first render - let the user scroll
state.mode = None;
};
// ensure the selected index is within the bounds
*state.state.selected_mut() = Some(
state
.state
.selected()
.unwrap_or_default()
.min(rows.len().saturating_sub(1)),
);
}
fn into_rows(rows: &[(Mode, Command, String)]) -> impl Iterator<Item = Row<'_>> {
rows.iter().map(|(mode, command, keys)| {
Row::new([
Line::styled(format!("{mode} "), Color::DarkGray),
Line::raw(keys.to_string()),
Line::raw(format!("{command:?} ")),
])
.fg(config::get().color.base05)
.bg(config::get().color.base00)
})
}
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/widgets/summary.rs | src/widgets/summary.rs | use color_eyre::Result;
use std::sync::{
Arc, Mutex,
atomic::{AtomicBool, Ordering},
};
use itertools::Itertools;
use ratatui::{layout::Flex, prelude::*, widgets::*};
use strum::{Display, EnumIs, EnumIter, FromRepr};
use tokio::sync::mpsc::UnboundedSender;
use crate::{action::Action, config, crates_io_api_helper};
#[derive(Default, Debug, Clone, Copy, EnumIs, FromRepr, Display, EnumIter)]
pub enum SummaryMode {
#[default]
NewCrates,
MostDownloaded,
JustUpdated,
MostRecentlyDownloaded,
PopularKeywords,
PopularCategories,
}
const HIGHLIGHT_SYMBOL: &str = " █ ";
impl SummaryMode {
/// Get the previous tab, if there is no previous tab return the current tab.
fn previous(&mut self) {
let current_index: usize = *self as usize;
let previous_index = current_index.saturating_sub(1);
*self = Self::from_repr(previous_index).unwrap_or(*self)
}
/// Get the next tab, if there is no next tab return the current tab.
fn next(&mut self) {
let current_index = *self as usize;
let next_index = current_index.saturating_add(1);
*self = Self::from_repr(next_index).unwrap_or(*self)
}
fn url_prefix(&self) -> String {
match self {
SummaryMode::NewCrates => "https://crates.io/crates/",
SummaryMode::MostDownloaded => "https://crates.io/crates/",
SummaryMode::JustUpdated => "https://crates.io/crates/",
SummaryMode::MostRecentlyDownloaded => "https://crates.io/crates/",
SummaryMode::PopularKeywords => "https://crates.io/keywords/",
SummaryMode::PopularCategories => "https://crates.io/categories/",
}
.into()
}
}
#[derive(Debug, Clone)]
pub struct Summary {
pub state: [ListState; 6],
pub last_selection: [usize; 6],
pub mode: SummaryMode,
pub summary_data: Option<crates_io_api::Summary>,
/// A thread-safe shared container holding the detailed information about
/// the currently selected crate; this can be `None` if no crate is
/// selected.
pub data: Arc<Mutex<Option<crates_io_api::Summary>>>,
/// Sender end of an asynchronous channel for dispatching actions from
/// various parts of the app to be handled by the event loop.
tx: UnboundedSender<Action>,
/// A thread-safe indicator of whether data is currently being loaded,
/// allowing different parts of the app to know if it's in a loading state.
loading_status: Arc<AtomicBool>,
}
impl Summary {
pub fn new(tx: UnboundedSender<Action>, loading_status: Arc<AtomicBool>) -> Self {
Self {
tx,
loading_status,
state: Default::default(),
last_selection: Default::default(),
mode: Default::default(),
summary_data: Default::default(),
data: Default::default(),
}
}
pub fn mode(&self) -> SummaryMode {
self.mode
}
pub fn url(&self) -> Option<String> {
let prefix = self.mode.url_prefix();
if let Some(ref summary) = self.summary_data {
let state = self.get_state(self.mode);
let i = state.selected().unwrap_or_default().saturating_sub(1); // starting index for list is 1 because we render empty line as the 0th element
tracing::debug!("i = {i}");
let suffix = match self.mode {
SummaryMode::NewCrates => summary.new_crates[i].name.clone(),
SummaryMode::MostDownloaded => summary.most_downloaded[i].name.clone(),
SummaryMode::JustUpdated => summary.just_updated[i].name.clone(),
SummaryMode::MostRecentlyDownloaded => {
summary.most_recently_downloaded[i].name.clone()
}
SummaryMode::PopularKeywords => summary.popular_keywords[i].id.clone(),
SummaryMode::PopularCategories => summary.popular_categories[i].slug.clone(),
};
Some(format!("{prefix}{suffix}"))
} else {
None
}
}
pub fn get_state_mut(&mut self, mode: SummaryMode) -> &mut ListState {
&mut self.state[mode as usize]
}
pub fn get_state(&self, mode: SummaryMode) -> &ListState {
&self.state[mode as usize]
}
pub fn selected(&self, mode: SummaryMode) -> Option<usize> {
self.get_state(mode).selected().map(|i| i.max(1)) // never let index go to 0 because we render an empty line as a the first element
}
pub fn scroll_previous(&mut self) {
let state = self.get_state_mut(self.mode);
let i = state.selected().map_or(0, |i| i.saturating_sub(1));
state.select(Some(i));
}
pub fn scroll_next(&mut self) {
let state = self.get_state_mut(self.mode);
let i = state.selected().map_or(0, |i| i.saturating_add(1));
state.select(Some(i));
}
pub fn save_state(&mut self) {
if let Some(i) = self.get_state(self.mode).selected() {
self.last_selection[self.mode as usize] = i
}
}
pub fn next_mode(&mut self) {
self.save_state();
let old_state = self.get_state_mut(self.mode);
*old_state.selected_mut() = None;
self.mode.next();
let i = self.last_selection[self.mode as usize];
let new_state = self.get_state_mut(self.mode);
*new_state.selected_mut() = Some(i);
}
pub fn previous_mode(&mut self) {
self.save_state();
let old_state = self.get_state_mut(self.mode);
*old_state.selected_mut() = None;
self.mode.previous();
let i = self.last_selection[self.mode as usize];
let new_state = self.get_state_mut(self.mode);
*new_state.selected_mut() = Some(i);
}
pub fn request(&self) -> Result<()> {
let tx = self.tx.clone();
let loading_status = self.loading_status.clone();
let summary = self.data.clone();
tokio::spawn(async move {
loading_status.store(true, Ordering::SeqCst);
if let Err(error_message) = crates_io_api_helper::request_summary(summary).await {
let _ = tx.send(Action::ShowErrorPopup(error_message));
}
loading_status.store(false, Ordering::SeqCst);
let _ = tx.send(Action::UpdateSummary);
let _ = tx.send(Action::ScrollDown);
});
Ok(())
}
pub fn update(&mut self) {
if let Some(summary) = self.data.lock().unwrap().clone() {
self.summary_data = Some(summary);
} else {
self.summary_data = None;
}
}
}
impl Summary {
fn borders(&self, _selected: bool) -> Borders {
Borders::NONE
}
fn new_crates(&self) -> List<'static> {
let selected = self.mode.is_new_crates();
let borders = self.borders(selected);
let items = std::iter::once(Text::from(Line::raw("")))
.chain(
self.summary_data
.as_ref()
.unwrap()
.new_crates
.iter()
.map(|item| {
Text::from(vec![
Line::styled(item.name.clone(), config::get().color.base05),
Line::raw(""),
])
}),
)
.collect_vec();
list_builder(items, "New Crates", selected, borders)
}
fn most_downloaded(&self) -> List<'static> {
let selected = self.mode.is_most_downloaded();
let borders = self.borders(selected);
let items = std::iter::once(Text::from(Line::raw("")))
.chain(
self.summary_data
.as_ref()
.unwrap()
.most_downloaded
.iter()
.map(|item| {
Text::from(vec![
Line::styled(item.name.clone(), config::get().color.base05),
Line::raw(""),
])
}),
)
.collect_vec();
list_builder(items, "Most Downloaded", selected, borders)
}
fn just_updated(&self) -> List<'static> {
let selected = self.mode.is_just_updated();
let borders = self.borders(selected);
let items = std::iter::once(Text::from(Line::raw("")))
.chain(
self.summary_data
.as_ref()
.unwrap()
.just_updated
.iter()
.map(|item| {
Text::from(vec![
Line::from(vec![
item.name.clone().fg(config::get().color.base05),
" ".into(),
Span::styled(
format!("v{}", item.max_version),
Style::default().fg(config::get().color.base05),
),
]),
Line::raw(""),
])
}),
)
.collect_vec();
list_builder(items, "Just Updated", selected, borders)
}
fn most_recently_downloaded(&self) -> List<'static> {
let selected = self.mode.is_most_recently_downloaded();
let borders = self.borders(selected);
let items = std::iter::once(Text::from(Line::raw("")))
.chain(
self.summary_data
.as_ref()
.unwrap()
.most_recently_downloaded
.iter()
.map(|item| {
Text::from(vec![
Line::styled(item.name.clone(), config::get().color.base05),
Line::raw(""),
])
}),
)
.collect_vec();
list_builder(items, "Most Recently Downloaded", selected, borders)
}
fn popular_keywords(&self) -> List<'static> {
let selected = self.mode.is_popular_keywords();
let borders = self.borders(selected);
let items = std::iter::once(Text::from(Line::raw("")))
.chain(
self.summary_data
.as_ref()
.unwrap()
.popular_keywords
.iter()
.map(|item| {
Text::from(vec![
Line::styled(item.keyword.clone(), config::get().color.base05),
Line::raw(""),
])
}),
)
.collect_vec();
list_builder(items, "Popular Keywords", selected, borders)
}
fn popular_categories(&self) -> List<'static> {
let selected = self.mode.is_popular_categories();
let borders = self.borders(selected);
let items = std::iter::once(Text::from(Line::raw("")))
.chain(
self.summary_data
.as_ref()
.unwrap()
.popular_categories
.iter()
.map(|item| {
Text::from(vec![
Line::styled(item.category.clone(), config::get().color.base05),
Line::raw(""),
])
}),
)
.collect_vec();
list_builder(items, "Popular Categories", selected, borders)
}
}
fn list_builder<'a>(
items: Vec<Text<'a>>,
title: &'a str,
selected: bool,
borders: Borders,
) -> List<'a> {
let title_style = if selected {
Style::default()
.fg(config::get().color.base00)
.bg(config::get().color.base0a)
.bold()
} else {
Style::default().fg(config::get().color.base0d).bold()
};
List::new(items)
.block(
Block::default()
.borders(borders)
.title(Line::from(vec![" ".into(), title.into(), " ".into()]))
.title_style(title_style)
.title_alignment(Alignment::Left),
)
.highlight_symbol(HIGHLIGHT_SYMBOL)
.highlight_style(config::get().color.base05)
.highlight_spacing(HighlightSpacing::Always)
}
pub struct SummaryWidget;
impl SummaryWidget {
fn render_list(
&self,
area: Rect,
buf: &mut Buffer,
list: List,
mode: SummaryMode,
state: &mut Summary,
) {
*(state.get_state_mut(mode).selected_mut()) = state
.selected(mode)
.map(|i| i.min(list.len().saturating_sub(1)));
StatefulWidget::render(list, area, buf, state.get_state_mut(mode));
}
}
impl StatefulWidget for &SummaryWidget {
type State = Summary;
fn render(self, area: Rect, buf: &mut Buffer, state: &mut Self::State) {
if state.summary_data.is_none() {
return;
}
use Constraint::*;
let [_, area] = Layout::vertical([Min(0), Percentage(90)]).areas(area);
let [_, area, _] = Layout::horizontal([Min(0), Percentage(85), Min(0)]).areas(area);
let [top, bottom] = Layout::vertical([Percentage(50), Percentage(50)])
.spacing(1)
.areas(area);
let [new_crates, most_downloaded, just_updated] =
Layout::horizontal([Percentage(30), Percentage(30), Percentage(30)])
.flex(Flex::Center)
.spacing(2)
.areas(top);
let list = state.new_crates();
self.render_list(new_crates, buf, list, SummaryMode::NewCrates, state);
let list = state.most_downloaded();
self.render_list(
most_downloaded,
buf,
list,
SummaryMode::MostDownloaded,
state,
);
let list = state.just_updated();
self.render_list(just_updated, buf, list, SummaryMode::JustUpdated, state);
let [
most_recently_downloaded,
popular_keywords,
popular_categories,
] = Layout::horizontal([Percentage(30), Percentage(30), Percentage(30)])
.flex(Flex::Center)
.spacing(2)
.areas(bottom);
let list = state.most_recently_downloaded();
self.render_list(
most_recently_downloaded,
buf,
list,
SummaryMode::MostRecentlyDownloaded,
state,
);
let list = state.popular_categories();
self.render_list(
popular_categories,
buf,
list,
SummaryMode::PopularCategories,
state,
);
let list = state.popular_keywords();
self.render_list(
popular_keywords,
buf,
list,
SummaryMode::PopularKeywords,
state,
);
}
}
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/widgets/search_filter_prompt.rs | src/widgets/search_filter_prompt.rs | use ratatui::{layout::Constraint::*, layout::Position, prelude::*, widgets::*};
use crate::{app::Mode, config};
use super::search_page::SearchMode;
#[derive(Default, Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct SearchFilterPrompt {
cursor_position: Option<Position>,
}
impl SearchFilterPrompt {
pub fn cursor_position(&self) -> Option<Position> {
self.cursor_position
}
}
pub struct SearchFilterPromptWidget<'a> {
mode: Mode,
sort: crates_io_api::Sort,
input: &'a tui_input::Input,
vertical_margin: u16,
horizontal_margin: u16,
search_mode: SearchMode,
}
impl<'a> SearchFilterPromptWidget<'a> {
pub fn new(
mode: Mode,
sort: crates_io_api::Sort,
input: &'a tui_input::Input,
search_mode: SearchMode,
) -> Self {
Self {
mode,
sort,
input,
vertical_margin: 2,
horizontal_margin: 2,
search_mode,
}
}
}
impl StatefulWidget for SearchFilterPromptWidget<'_> {
type State = SearchFilterPrompt;
fn render(self, area: Rect, buf: &mut Buffer, state: &mut Self::State) {
let [input, meta] = Layout::horizontal([Percentage(75), Fill(0)]).areas(area);
self.input_block().render(area, buf);
if self.search_mode.is_focused() {
self.sort_by_info().render(meta.inner(self.margin()), buf);
}
self.input_text(input.width as usize)
.render(input.inner(self.margin()), buf);
self.update_cursor_state(area, state);
}
}
impl SearchFilterPromptWidget<'_> {
fn input_block(&self) -> Block<'_> {
let borders = if self.search_mode.is_focused() {
Borders::ALL
} else {
Borders::NONE
};
let border_color = match self.mode {
Mode::Search => config::get().color.base0a,
Mode::Filter => config::get().color.base0b,
_ => config::get().color.base06,
};
Block::default()
.borders(borders)
.fg(config::get().color.base05)
.border_style(border_color)
}
fn sort_by_info(&self) -> impl Widget {
Paragraph::new(Line::from(vec![
"Sort By: ".into(),
format!("{:?}", self.sort.clone()).fg(config::get().color.base0d),
]))
.right_aligned()
}
fn input_text(&self, width: usize) -> impl Widget + '_ {
let scroll = self.input.cursor().saturating_sub(width.saturating_sub(4));
let text = if self.search_mode.is_focused() {
Line::from(vec![self.input.value().into()])
} else if self.mode.is_summary() || self.mode.is_help() {
Line::from(vec![])
} else {
Line::from(vec![
self.input.value().into(),
" (".into(),
format!("{:?}", self.sort.clone()).fg(config::get().color.base0d),
")".into(),
])
};
Paragraph::new(text).scroll((0, scroll as u16))
}
fn update_cursor_state(&self, area: Rect, state: &mut SearchFilterPrompt) {
let width = ((area.width as f64 * 0.75) as u16).saturating_sub(2);
if self.search_mode.is_focused() {
let margin = self.margin();
state.cursor_position = Some(Position::new(
(area.x + margin.horizontal + self.input.cursor() as u16).min(width),
area.y + margin.vertical,
));
} else {
state.cursor_position = None
}
}
fn margin(&self) -> Margin {
if self.search_mode.is_focused() {
Margin::new(self.horizontal_margin, self.vertical_margin)
} else {
Margin::default()
}
}
}
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/widgets/popup_message.rs | src/widgets/popup_message.rs | use itertools::Itertools;
use ratatui::{layout::Flex, prelude::*, widgets::*};
#[derive(Debug, Default, Clone, Copy)]
pub struct PopupMessageState {
scroll: usize,
}
impl PopupMessageState {
pub fn scroll_up(&mut self) {
self.scroll = self.scroll.saturating_sub(1)
}
pub fn scroll_down(&mut self) {
self.scroll = self.scroll.saturating_add(1)
}
pub fn scroll_top(&mut self) {
self.scroll = 0;
}
}
#[derive(Debug, Clone)]
pub struct PopupMessageWidget {
title: String,
message: String,
}
impl PopupMessageWidget {
pub fn new(title: String, message: String) -> Self {
Self { title, message }
}
}
impl StatefulWidget for &PopupMessageWidget {
type State = PopupMessageState;
fn render(self, area: Rect, buf: &mut Buffer, state: &mut Self::State) {
let [center] = Layout::horizontal([Constraint::Percentage(50)])
.flex(Flex::Center)
.areas(area);
let message = textwrap::wrap(&self.message, center.width as usize)
.iter()
.map(|s| Line::from(s.to_string()))
.collect_vec();
let line_count = message.len();
let [center] = Layout::vertical([Constraint::Length(line_count as u16 + 3)])
.flex(Flex::Center)
.areas(center);
state.scroll = state.scroll.min(line_count.saturating_sub(1));
let instruction = Line::from(vec!["Esc".bold(), " to close".into()]).right_aligned();
let block = Block::bordered()
.border_style(Color::DarkGray)
.title(self.title.clone())
.title_bottom(instruction);
Clear.render(center, buf);
Paragraph::new(self.message.clone())
.block(block)
.wrap(Wrap { trim: false })
.scroll((state.scroll as u16, 0))
.render(center, buf);
}
}
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/widgets/search_page.rs | src/widgets/search_page.rs | use color_eyre::Result;
use std::{
collections::HashMap,
sync::{
Arc, Mutex,
atomic::{AtomicBool, Ordering},
},
};
use strum::EnumIs;
use tracing::info;
use crossterm::event::{Event as CrosstermEvent, KeyEvent};
use itertools::Itertools;
use ratatui::prelude::*;
use ratatui::{layout::Position, widgets::StatefulWidget};
use tokio::{sync::mpsc::UnboundedSender, task::JoinHandle};
use tui_input::{Input, backend::crossterm::EventHandler};
use crate::{
action::Action,
app::Mode,
crates_io_api_helper,
widgets::{search_filter_prompt::SearchFilterPrompt, search_results::SearchResults},
};
use super::{
crate_info_table::{CrateInfo, CrateInfoTableWidget},
search_results::SearchResultsWidget,
};
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Hash, EnumIs)]
pub enum SearchMode {
#[default]
Search,
Filter,
ResultsHideCrate,
ResultsShowCrate,
}
impl SearchMode {
pub fn is_focused(&self) -> bool {
matches!(self, SearchMode::Search | SearchMode::Filter)
}
pub fn toggle_show_crate_info(&mut self) {
*self = match self {
SearchMode::ResultsShowCrate => SearchMode::ResultsHideCrate,
SearchMode::ResultsHideCrate => SearchMode::ResultsShowCrate,
_ => *self,
};
}
pub fn should_show_crate_info(&self) -> bool {
matches!(self, SearchMode::ResultsShowCrate)
}
}
#[derive(Debug)]
pub struct SearchPage {
pub mode: Mode,
pub search_mode: SearchMode,
pub crate_info: CrateInfo,
/// A string for the current search input by the user, submitted to
/// crates.io as a query
pub search: String,
/// A string for the current filter input by the user, used only locally
/// for filtering for the list of crates in the current view.
pub filter: String,
/// A table component designed to handle the listing and selection of crates
/// within the terminal UI.
pub results: SearchResults,
/// An input handler component for managing raw user input into textual
/// form.
pub input: tui_input::Input,
/// A prompt displaying the current search or filter query, if any, that the
/// user can interact with.
pub prompt: SearchFilterPrompt,
/// The current page number being displayed or interacted with in the UI.
pub page: u64,
/// The number of crates displayed per page in the UI.
pub page_size: u64,
/// Sort preference for search results
pub sort: crates_io_api::Sort,
/// The total number of crates fetchable from crates.io, which may not be
/// known initially and can be used for UI elements like pagination.
pub total_num_crates: Option<u64>,
/// A thread-safe, shared vector holding the list of crates fetched from
/// crates.io, wrapped in a mutex to control concurrent access.
pub crates: Arc<Mutex<Vec<crates_io_api::Crate>>>,
/// A thread-safe, shared vector holding the list of version fetched from
/// crates.io, wrapped in a mutex to control concurrent access.
pub versions: Arc<Mutex<Vec<crates_io_api::Version>>>,
/// A thread-safe shared container holding the detailed information about
/// the currently selected crate; this can be `None` if no crate is
/// selected.
pub full_crate_info: Arc<Mutex<Option<crates_io_api::FullCrate>>>,
/// A thread-safe shared container holding the detailed information about
/// the currently selected crate; this can be `None` if no crate is
/// selected.
pub crate_response: Arc<Mutex<Option<crates_io_api::CrateResponse>>>,
pub last_task_details_handle: HashMap<uuid::Uuid, JoinHandle<()>>,
/// Sender end of an asynchronous channel for dispatching actions from
/// various parts of the app to be handled by the event loop.
tx: UnboundedSender<Action>,
/// A thread-safe indicator of whether data is currently being loaded,
/// allowing different parts of the app to know if it's in a loading state.
loading_status: Arc<AtomicBool>,
}
impl SearchPage {
pub fn new(tx: UnboundedSender<Action>, loading_status: Arc<AtomicBool>) -> Self {
Self {
mode: Default::default(),
search_mode: Default::default(),
search: String::new(),
filter: String::new(),
results: SearchResults::default(),
input: Input::default(),
prompt: SearchFilterPrompt::default(),
page: 1,
page_size: 25,
sort: crates_io_api::Sort::Relevance,
total_num_crates: None,
crates: Default::default(),
versions: Default::default(),
full_crate_info: Default::default(),
crate_info: Default::default(),
crate_response: Default::default(),
last_task_details_handle: Default::default(),
tx,
loading_status,
}
}
pub fn handle_action(&mut self, action: Action) {
match action {
Action::ScrollTop => self.results.scroll_to_top(),
Action::ScrollBottom => self.results.scroll_to_bottom(),
Action::ScrollSearchResultsUp => self.scroll_up(),
Action::ScrollSearchResultsDown => self.scroll_down(),
_ => {}
}
}
pub fn update_search_table_results(&mut self) {
self.results.content_length(self.results.crates.len());
let filter = self.filter.clone();
let filter_words = filter.split_whitespace().collect::<Vec<_>>();
let crates: Vec<_> = self
.crates
.lock()
.unwrap()
.iter()
.filter(|c| {
filter_words.iter().all(|word| {
c.name.to_lowercase().contains(word)
|| c.description
.clone()
.unwrap_or_default()
.to_lowercase()
.contains(word)
})
})
.cloned()
.collect_vec();
self.results.crates = crates;
}
pub fn scroll_up(&mut self) {
self.results.scroll_previous();
}
pub fn scroll_down(&mut self) {
self.results.scroll_next();
}
pub fn handle_key(&mut self, key: KeyEvent) {
self.input.handle_event(&CrosstermEvent::Key(key));
}
pub fn handle_filter_prompt_change(&mut self) {
self.filter = self.input.value().into();
self.results.select(None);
}
pub fn cursor_position(&self) -> Option<Position> {
self.prompt.cursor_position()
}
pub fn increment_page(&mut self) {
if let Some(n) = self.total_num_crates {
let max_page_size = (n / self.page_size) + 1;
if self.page < max_page_size {
self.page = self.page.saturating_add(1).min(max_page_size);
self.reload_data();
}
}
}
pub fn decrement_page(&mut self) {
let min_page_size = 1;
if self.page > min_page_size {
self.page = self.page.saturating_sub(1).max(min_page_size);
self.reload_data();
}
}
pub fn clear_task_details_handle(&mut self, id: uuid::Uuid) -> Result<()> {
if let Some((_, handle)) = self.last_task_details_handle.remove_entry(&id) {
handle.abort()
}
Ok(())
}
pub fn is_prompt(&self) -> bool {
self.search_mode.is_focused()
}
pub fn clear_all_previous_task_details_handles(&mut self) {
*self.full_crate_info.lock().unwrap() = None;
for (_, v) in self.last_task_details_handle.iter() {
v.abort()
}
self.last_task_details_handle.clear()
}
pub fn submit_query(&mut self) {
self.clear_all_previous_task_details_handles();
self.filter.clear();
self.search = self.input.value().into();
let _ = self.tx.send(Action::SwitchMode(Mode::PickerHideCrateInfo));
}
/// Reloads the list of crates based on the current search parameters,
/// updating the application state accordingly. This involves fetching
/// data asynchronously from the crates.io API and updating various parts of
/// the application state, such as the crates listing, current crate
/// info, and loading status.
pub fn reload_data(&mut self) {
self.prepare_reload();
let search_params = self.create_search_parameters();
self.request_search_results(search_params);
}
/// Clears current search results and resets the UI to prepare for new data.
pub fn prepare_reload(&mut self) {
self.results.select(None);
*self.full_crate_info.lock().unwrap() = None;
*self.crate_response.lock().unwrap() = None;
}
/// Creates the parameters required for the search task.
pub fn create_search_parameters(&self) -> crates_io_api_helper::SearchParameters {
crates_io_api_helper::SearchParameters {
search: self.search.clone(),
page: self.page.clamp(1, u64::MAX),
page_size: self.page_size,
crates: self.crates.clone(),
versions: self.versions.clone(),
loading_status: self.loading_status.clone(),
sort: self.sort.clone(),
tx: self.tx.clone(),
}
}
/// Spawns an asynchronous task to fetch crate data from crates.io.
pub fn request_search_results(&self, params: crates_io_api_helper::SearchParameters) {
tokio::spawn(async move {
params.loading_status.store(true, Ordering::SeqCst);
if let Err(error_message) = crates_io_api_helper::request_search_results(¶ms).await
{
let _ = params.tx.send(Action::ShowErrorPopup(error_message));
}
let _ = params.tx.send(Action::UpdateSearchTableResults);
params.loading_status.store(false, Ordering::SeqCst);
});
}
/// Spawns an asynchronous task to fetch crate details from crates.io based
/// on currently selected crate
pub fn request_crate_details(&mut self) {
if self.results.crates.is_empty() {
return;
}
if let Some(crate_name) = self.results.selected_crate_name() {
let tx = self.tx.clone();
let crate_response = self.crate_response.clone();
let loading_status = self.loading_status.clone();
// Spawn the async work to fetch crate details.
let uuid = uuid::Uuid::new_v4();
let last_task_details_handle = tokio::spawn(async move {
info!("Requesting details for {crate_name}: {uuid}");
loading_status.store(true, Ordering::SeqCst);
if let Err(error_message) =
crates_io_api_helper::request_crate_details(&crate_name, crate_response).await
{
let _ = tx.send(Action::ShowErrorPopup(error_message));
};
loading_status.store(false, Ordering::SeqCst);
info!("Retrieved details for {crate_name}: {uuid}");
let _ = tx.send(Action::ClearTaskDetailsHandle(uuid.to_string()));
});
self.last_task_details_handle
.insert(uuid, last_task_details_handle);
}
}
/// Spawns an asynchronous task to fetch crate details from crates.io based
/// on currently selected crate
pub fn request_full_crate_details(&mut self) {
if self.results.crates.is_empty() {
return;
}
if let Some(crate_name) = self.results.selected_crate_name() {
let tx = self.tx.clone();
let full_crate_info = self.full_crate_info.clone();
let loading_status = self.loading_status.clone();
// Spawn the async work to fetch crate details.
let uuid = uuid::Uuid::new_v4();
let last_task_details_handle = tokio::spawn(async move {
info!("Requesting details for {crate_name}: {uuid}");
loading_status.store(true, Ordering::SeqCst);
if let Err(error_message) =
crates_io_api_helper::request_full_crate_details(&crate_name, full_crate_info)
.await
{
let _ = tx.send(Action::ShowErrorPopup(error_message));
};
loading_status.store(false, Ordering::SeqCst);
info!("Retrieved details for {crate_name}: {uuid}");
let _ = tx.send(Action::ClearTaskDetailsHandle(uuid.to_string()));
});
self.last_task_details_handle
.insert(uuid, last_task_details_handle);
}
}
pub fn results_status(&self) -> String {
let selected = self.selected_with_page_context();
let ncrates = self.total_num_crates.unwrap_or_default();
format!("{selected}/{ncrates} Results")
}
pub fn selected_with_page_context(&self) -> u64 {
self.results.selected().map_or(0, |n| {
(self.page.saturating_sub(1) * self.page_size) + n as u64 + 1
})
}
pub fn page_number_status(&self) -> String {
let max_page_size = (self.total_num_crates.unwrap_or_default() / self.page_size) + 1;
format!("Page: {}/{}", self.page, max_page_size)
}
pub fn enter_normal_mode(&mut self) {
self.search_mode = SearchMode::ResultsHideCrate;
if !self.results.crates.is_empty() && self.results.selected().is_none() {
self.results.select(Some(0))
}
}
pub fn enter_filter_insert_mode(&mut self) {
self.search_mode = SearchMode::Filter;
self.input = self.input.clone().with_value(self.filter.clone());
}
pub fn enter_search_insert_mode(&mut self) {
self.search_mode = SearchMode::Search;
self.input = self.input.clone().with_value(self.search.clone());
}
pub fn toggle_show_crate_info(&mut self) {
self.search_mode.toggle_show_crate_info();
if self.search_mode.should_show_crate_info() {
self.request_crate_details()
} else {
self.clear_all_previous_task_details_handles();
}
}
fn toggle_sort_by_forward(&mut self) {
use crates_io_api::Sort as S;
self.sort = match self.sort {
S::Alphabetical => S::Relevance,
S::Relevance => S::Downloads,
S::Downloads => S::RecentDownloads,
S::RecentDownloads => S::RecentUpdates,
S::RecentUpdates => S::NewlyAdded,
S::NewlyAdded => S::Alphabetical,
};
}
fn toggle_sort_by_backward(&mut self) {
use crates_io_api::Sort as S;
self.sort = match self.sort {
S::Relevance => S::Alphabetical,
S::Downloads => S::Relevance,
S::RecentDownloads => S::Downloads,
S::RecentUpdates => S::RecentDownloads,
S::NewlyAdded => S::RecentUpdates,
S::Alphabetical => S::NewlyAdded,
};
}
pub fn toggle_sort_by(&mut self, reload: bool, forward: bool) -> Result<()> {
if forward {
self.toggle_sort_by_forward()
} else {
self.toggle_sort_by_backward()
};
if reload {
self.tx.send(Action::ReloadData)?;
}
Ok(())
}
fn is_focused(&self) -> bool {
self.mode.is_picker()
}
}
pub struct SearchPageWidget;
impl SearchPageWidget {
fn render_crate_info(&self, area: Rect, buf: &mut Buffer, state: &mut SearchPage) {
if let Some(ci) = state.crate_response.lock().unwrap().clone() {
CrateInfoTableWidget::new(ci).render(area, buf, &mut state.crate_info);
}
}
}
impl StatefulWidget for SearchPageWidget {
type State = SearchPage;
fn render(
self,
area: ratatui::prelude::Rect,
buf: &mut ratatui::prelude::Buffer,
state: &mut Self::State,
) {
let area = if state.search_mode.is_results_show_crate() {
let [area, info] =
Layout::vertical([Constraint::Min(0), Constraint::Max(15)]).areas(area);
self.render_crate_info(info, buf, state);
area
} else {
area
};
SearchResultsWidget::new(!state.is_prompt() && state.is_focused()).render(
area,
buf,
&mut state.results,
);
Line::from(state.page_number_status())
.left_aligned()
.render(
area.inner(Margin {
horizontal: 1,
vertical: 2,
}),
buf,
);
Line::from(state.results_status()).right_aligned().render(
area.inner(Margin {
horizontal: 1,
vertical: 2,
}),
buf,
);
}
}
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
ratatui/crates-tui | https://github.com/ratatui/crates-tui/blob/e1be774ae75fe9711fa13ba808c87e52db98d251/src/widgets/crate_info_table.rs | src/widgets/crate_info_table.rs | use itertools::Itertools;
use ratatui::{prelude::*, widgets::*};
use crate::config;
#[derive(Debug, Default)]
pub struct CrateInfo {
crate_info: TableState,
}
impl CrateInfo {
pub fn scroll_previous(&mut self) {
let i = self
.crate_info
.selected()
.map_or(0, |i| i.saturating_sub(1));
self.crate_info.select(Some(i));
}
pub fn scroll_next(&mut self) {
let i = self
.crate_info
.selected()
.map_or(0, |i| i.saturating_add(1));
self.crate_info.select(Some(i));
}
}
pub struct CrateInfoTableWidget {
crate_info: crates_io_api::CrateResponse,
}
impl CrateInfoTableWidget {
pub fn new(crate_info: crates_io_api::CrateResponse) -> Self {
Self { crate_info }
}
}
impl StatefulWidget for CrateInfoTableWidget {
type State = CrateInfo;
fn render(self, area: Rect, buf: &mut Buffer, state: &mut Self::State) {
let ci = self.crate_info.clone();
let created_at = ci
.crate_data
.created_at
.format("%Y-%m-%d %H:%M:%S")
.to_string();
let updated_at = ci
.crate_data
.updated_at
.format("%Y-%m-%d %H:%M:%S")
.to_string();
let mut rows = [
["Name", &ci.crate_data.name],
["Created At", &created_at],
["Updated At", &updated_at],
["Max Version", &ci.crate_data.max_version],
]
.iter()
.map(|row| {
let cells = row.iter().map(|cell| Cell::from(*cell));
Row::new(cells)
})
.collect_vec();
let keywords = self
.crate_info
.keywords
.iter()
.map(|k| k.keyword.clone())
.map(Line::from)
.join(", ");
let keywords = textwrap::wrap(&keywords, (area.width as f64 * 0.75) as usize)
.iter()
.map(|s| Line::from(s.to_string()))
.collect_vec();
let height = keywords.len();
rows.push(
Row::new(vec![
Cell::from("Keywords"),
Cell::from(Text::from(keywords)),
])
.height(height as u16),
);
if let Some(description) = self.crate_info.crate_data.description {
// assume description is wrapped in 75%
let desc = textwrap::wrap(&description, (area.width as f64 * 0.75) as usize)
.iter()
.map(|s| Line::from(s.to_string()))
.collect_vec();
let height = desc.len();
rows.push(
Row::new(vec![
Cell::from("Description"),
Cell::from(Text::from(desc)),
])
.height(height as u16),
);
}
if let Some(homepage) = self.crate_info.crate_data.homepage {
rows.push(Row::new(vec![Cell::from("Homepage"), Cell::from(homepage)]));
}
if let Some(repository) = self.crate_info.crate_data.repository {
rows.push(Row::new(vec![
Cell::from("Repository"),
Cell::from(repository),
]));
}
if let Some(recent_downloads) = self.crate_info.crate_data.recent_downloads {
rows.push(Row::new(vec![
Cell::from("Recent Downloads"),
Cell::from(recent_downloads.to_string()),
]));
}
if let Some(max_stable_version) = self.crate_info.crate_data.max_stable_version {
rows.push(Row::new(vec![
Cell::from("Max Stable Version"),
Cell::from(max_stable_version),
]));
}
let selected_max = rows.len().saturating_sub(1);
let widths = [Constraint::Fill(1), Constraint::Fill(4)];
let table_widget = Table::new(rows, widths)
.style(
Style::default()
.fg(config::get().color.base05)
.bg(config::get().color.base00),
)
.block(Block::default().borders(Borders::ALL))
.highlight_symbol("\u{2022} ")
.row_highlight_style(config::get().color.base05)
.highlight_spacing(HighlightSpacing::Always);
if let Some(i) = state.crate_info.selected() {
state.crate_info.select(Some(i.min(selected_max)));
} else {
state.crate_info.select(Some(0));
}
StatefulWidget::render(table_widget, area, buf, &mut state.crate_info);
}
}
| rust | MIT | e1be774ae75fe9711fa13ba808c87e52db98d251 | 2026-01-04T20:19:28.808186Z | false |
catppuccin/rust | https://github.com/catppuccin/rust/blob/e4f47b6a524d8d9d3a262e3f3534955a01d77e93/build.rs | build.rs | //! Build script for the Catppuccin crate.
//! This script uses the palette JSON file from the catppuccin/palette github repo
//! in order to populate the `FlavorColors` struct as well as implement the various
//! iteration & indexing primitives offered by the crate.
use std::{
collections::HashMap,
env,
error::Error,
fs::File,
io::{BufReader, BufWriter, Write},
path::PathBuf,
};
use itertools::Itertools;
use proc_macro2::TokenStream;
use quote::{format_ident, quote};
use serde::Deserialize;
#[derive(Debug, Deserialize)]
struct Palette {
#[allow(dead_code)]
version: String,
#[serde(flatten)]
flavors: HashMap<String, Flavor>,
}
#[derive(Debug, Deserialize)]
struct Flavor {
emoji: char,
order: u32,
dark: bool,
colors: HashMap<String, Color>,
#[serde(rename = "ansiColors")]
ansi_colors: HashMap<String, AnsiColorPair>,
}
#[derive(Debug, Deserialize)]
struct Color {
name: String,
order: u32,
rgb: Rgb,
hsl: Hsl,
accent: bool,
}
#[derive(Debug, Deserialize)]
struct Rgb {
r: u8,
g: u8,
b: u8,
}
#[derive(Debug, Deserialize)]
struct Hsl {
h: f64,
s: f64,
l: f64,
}
#[derive(Debug, Deserialize)]
struct AnsiColorPair {
name: String,
order: u32,
normal: AnsiColor,
bright: AnsiColor,
}
#[derive(Debug, Deserialize)]
struct AnsiColor {
name: String,
rgb: Rgb,
hsl: Hsl,
code: u8,
}
fn main() -> Result<(), Box<dyn Error>> {
let out_dir = PathBuf::from(&env::var("OUT_DIR")?);
let codegen_path = out_dir.join("generated_palette.rs");
let mut code_writer = BufWriter::new(File::create(codegen_path)?);
let palette: Palette =
serde_json::from_reader(BufReader::new(File::open("src/palette.json")?))?;
let sample_flavor = palette
.flavors
.values()
.next()
.expect("at least one flavor");
let flavor_tokens = [
// Colors
make_flavor_colors_struct_tokens(sample_flavor),
make_flavor_colors_all_impl_tokens(sample_flavor),
// ANSI Colors
make_flavor_ansi_colors_struct_tokens(sample_flavor),
make_flavor_ansi_colors_all_impl_tokens(sample_flavor),
// ANSI Color Pairs
make_flavor_ansi_color_pairs_struct_tokens(sample_flavor),
make_flavor_ansi_color_pairs_all_impl_tokens(sample_flavor),
];
let color_tokens = [
make_color_name_enum_tokens(sample_flavor),
make_color_name_index_impl_tokens(sample_flavor),
make_color_name_display_impl_tokens(sample_flavor),
make_color_name_identifier_impl_tokens(sample_flavor),
make_color_name_fromstr_impl_tokens(sample_flavor),
];
let ansi_color_tokens = [
make_ansi_color_name_enum_tokens(sample_flavor),
make_ansi_color_name_index_impl_tokens(sample_flavor),
make_ansi_color_name_display_impl_tokens(sample_flavor),
make_ansi_color_name_identifier_impl_tokens(sample_flavor),
make_ansi_color_name_fromstr_impl_tokens(sample_flavor),
];
let ansi_color_pair_tokens = [
make_ansi_color_pair_name_enum_tokens(sample_flavor),
make_ansi_color_pair_name_index_impl_tokens(sample_flavor),
make_ansi_color_pair_name_display_impl_tokens(sample_flavor),
make_ansi_color_pair_name_identifier_impl_tokens(sample_flavor),
make_ansi_color_pair_name_fromstr_impl_tokens(sample_flavor),
];
let palette_tokens = [make_palette_const_tokens(&palette)];
let ast = syn::parse2(
[
&flavor_tokens[..],
&color_tokens[..],
&ansi_color_tokens[..],
&ansi_color_pair_tokens[..],
&palette_tokens[..],
]
.concat()
.into_iter()
.collect(),
)?;
let code = prettyplease::unparse(&ast);
write!(&mut code_writer, "{code}")?;
Ok(())
}
fn palette_circle(filename: &str) -> String {
format!(
r#"<img width="23" height="23" src="https://raw.githubusercontent.com/catppuccin/catppuccin/95aae3360eb88fc1b6a89398be08ec6deae0bc9a/assets/palette/circles/{filename}.png">"#
)
}
fn color_palette_circles(color_key: &str) -> String {
["latte", "frappe", "macchiato", "mocha"]
.map(|n| palette_circle(format!("{n}_{color_key}").as_str()))
.into_iter()
.collect::<String>()
}
fn ansi_color_palette_circles(color_key: &str) -> String {
["latte", "frappe", "macchiato", "mocha"]
.map(|n| palette_circle(format!("ansi/{n}_ansi_{color_key}").as_str()))
.into_iter()
.collect::<String>()
}
fn titlecase<S: AsRef<str>>(s: S) -> String {
let mut chars = s.as_ref().chars();
chars.next().map_or_else(String::new, |first| {
first.to_uppercase().to_string() + chars.as_str()
})
}
fn remove_whitespace(s: &str) -> String {
s.replace(' ', "")
}
fn flavors_in_order(palette: &Palette) -> std::vec::IntoIter<(&String, &Flavor)> {
palette
.flavors
.iter()
.sorted_by(|(_, a), (_, b)| a.order.cmp(&b.order))
}
fn colors_in_order(flavor: &Flavor) -> std::vec::IntoIter<(&String, &Color)> {
flavor
.colors
.iter()
.sorted_by(|(_, a), (_, b)| a.order.cmp(&b.order))
}
fn ansi_color_pairs_in_order(
flavor: &Flavor,
) -> std::vec::IntoIter<(&String, &AnsiColorPair, String, String)> {
flavor
.ansi_colors
.iter()
.map(|(ident, pair)| {
(
ident,
pair,
pair.normal.name.to_lowercase().replace(' ', "_"),
pair.bright.name.to_lowercase().replace(' ', "_"),
)
})
.sorted_by(|(_, a, _, _), (_, b, _, _)| a.order.cmp(&b.order))
}
fn ansi_colors_in_order(flavor: &Flavor) -> std::vec::IntoIter<(String, &AnsiColor)> {
flavor
.ansi_colors
.iter()
.flat_map(|(_, c)| [&c.normal, &c.bright])
.map(|c| (c.name.to_lowercase().replace(' ', "_"), c))
.sorted_by(|(_, a), (_, b)| a.code.cmp(&b.code))
}
fn make_flavor_colors_struct_tokens(sample_flavor: &Flavor) -> TokenStream {
let colors = colors_in_order(sample_flavor).map(|(k, _)| {
let ident = format_ident!("{k}");
let color_img = format!(" {}", color_palette_circles(k));
quote! {
#[doc = #color_img]
pub #ident: Color
}
});
quote! {
/// All of the colors for a particular flavor of Catppuccin.
/// Obtained via [`Flavor::colors`].
#[derive(Clone, Copy, Debug, PartialEq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct FlavorColors {
#(#colors),*
}
}
}
fn make_flavor_ansi_colors_struct_tokens(sample_flavor: &Flavor) -> TokenStream {
let colors = ansi_colors_in_order(sample_flavor).map(|(k, _)| {
let ident = format_ident!("{k}");
let color_img = format!(" {}", ansi_color_palette_circles(&k));
quote! {
#[doc = #color_img]
pub #ident: AnsiColor
}
});
quote! {
/// All of the ANSI colors for a particular flavor of Catppuccin.
/// Obtained via [`Flavor::ansi_colors`].
#[derive(Clone, Copy, Debug, PartialEq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct FlavorAnsiColors {
#(#colors),*
}
/// A single ANSI color.
#[derive(Clone, Copy, Debug, PartialEq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct AnsiColor {
/// The [`AnsiColorName`] for this color.
pub name: AnsiColorName,
/// The color represented as a six-digit hex string with a leading hash (#).
pub hex: Hex,
/// The color represented as individual red, green, and blue channels.
pub rgb: Rgb,
/// The color represented as individual hue, saturation, and lightness channels.
pub hsl: Hsl,
/// The color's ANSI code.
pub code: u8,
}
}
}
fn make_flavor_ansi_color_pairs_struct_tokens(sample_flavor: &Flavor) -> TokenStream {
let color_pairs = ansi_color_pairs_in_order(sample_flavor).map(|(k, ..)| {
let ident = format_ident!("{k}");
let doc = format!("The normal and bright {k} ANSI color pair.");
quote! {
#[doc = #doc]
pub #ident: AnsiColorPair
}
});
quote! {
/// All of the ANSI color pairs for a particular flavor of Catppuccin.
#[derive(Clone, Copy, Debug, PartialEq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct FlavorAnsiColorPairs {
#(#color_pairs),*
}
/// A pair of ANSI colors - normal and bright.
#[derive(Clone, Copy, Debug, PartialEq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct AnsiColorPair {
/// The [`AnsiColorPairName`] for this color.
pub name: AnsiColorPairName,
/// Order of the ANSI color in the palette spec.
pub order: u32,
/// The normal color.
pub normal: AnsiColor,
/// The bright color.
pub bright: AnsiColor,
}
}
}
fn make_flavor_colors_all_impl_tokens(sample_flavor: &Flavor) -> TokenStream {
let items = colors_in_order(sample_flavor).map(|(identifier, _)| {
let ident = format_ident!("{identifier}");
quote! { &self.#ident }
});
quote! {
impl FlavorColors {
/// Get an array of the colors in the flavor.
#[must_use]
pub const fn all_colors(&self) -> [&Color; 26] {
[
#(#items),*
]
}
}
}
}
fn make_flavor_ansi_colors_all_impl_tokens(sample_flavor: &Flavor) -> TokenStream {
let ansi_colors = ansi_colors_in_order(sample_flavor).map(|(identifier, _)| {
let ident = format_ident!("{identifier}");
quote! { &self.#ident }
});
let ansi_color_pairs = ansi_color_pairs_in_order(sample_flavor).map(
|(identifier, color_pair, normal_identifier, bright_identifier)| {
make_ansi_color_pair_entry(
identifier,
color_pair,
&normal_identifier,
&bright_identifier,
)
},
);
quote! {
impl FlavorAnsiColors {
/// Get an array of the ANSI colors in the flavor.
#[must_use]
pub const fn all_ansi_colors(&self) -> [&AnsiColor; 16] {
[
#(#ansi_colors),*
]
}
/// Convert the 16 ANSI colors to 8 ANSI color pairs.
#[must_use]
#[allow(clippy::too_many_lines, clippy::unreadable_literal)]
pub const fn to_ansi_color_pairs(&self) -> FlavorAnsiColorPairs {
FlavorAnsiColorPairs {
#(#ansi_color_pairs),*
}
}
}
}
}
fn make_flavor_ansi_color_pairs_all_impl_tokens(sample_flavor: &Flavor) -> TokenStream {
let items = ansi_color_pairs_in_order(sample_flavor).map(|(identifier, ..)| {
let ident = format_ident!("{identifier}");
quote! { &self.#ident }
});
quote! {
impl FlavorAnsiColorPairs {
/// Get an array of the ANSI color pairs in the flavor.
#[must_use]
pub const fn all_ansi_color_pairs(&self) -> [&AnsiColorPair; 8] {
[
#(#items),*
]
}
}
}
}
fn make_color_name_enum_tokens(sample_flavor: &Flavor) -> TokenStream {
let variants = colors_in_order(sample_flavor).map(|(name, _)| {
let ident = format_ident!("{}", titlecase(name));
let circles = format!(" {}", color_palette_circles(name));
quote! {
#[doc = #circles]
#ident
}
});
quote! {
/// Enum of all named Catppuccin colors. Can be used to index into a [`FlavorColors`].
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub enum ColorName {
#(#variants),*
}
}
}
fn make_ansi_color_name_enum_tokens(sample_flavor: &Flavor) -> TokenStream {
let variants = ansi_colors_in_order(sample_flavor).map(|(identifier, color)| {
let name = remove_whitespace(&color.name);
let ident = format_ident!("{name}");
let circles = format!(" {}", ansi_color_palette_circles(&identifier));
quote! {
#[doc = #circles]
#ident
}
});
quote! {
/// Enum of all named ANSI colors. Can be used to index into a [`FlavorAnsiColors`]
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub enum AnsiColorName {
#(#variants),*
}
}
}
fn make_ansi_color_pair_name_enum_tokens(sample_flavor: &Flavor) -> TokenStream {
let variants = ansi_color_pairs_in_order(sample_flavor).map(|(name, ..)| {
let ident = format_ident!("{}", titlecase(name));
let circles = format!(" {}", ansi_color_palette_circles(name));
quote! {
#[doc = #circles]
#ident
}
});
quote! {
/// Enum of all ANSI color pairs. Can be used to index into a [`FlavorAnsiColorPairs`].
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub enum AnsiColorPairName {
#(#variants),*
}
}
}
fn make_color_name_index_impl_tokens(sample_flavor: &Flavor) -> TokenStream {
let first = colors_in_order(sample_flavor).map(|(identifier, _)| {
let variant = format_ident!("{}", titlecase(identifier));
let ident = format_ident!("{}", identifier);
quote! {
ColorName::#variant => &self.#ident
}
});
let second = first.clone();
quote! {
impl Index<ColorName> for FlavorColors {
type Output = Color;
fn index(&self, index: ColorName) -> &Self::Output {
match index {
#(#first),*
}
}
}
impl FlavorColors {
/// Get a color by name.
///
/// This is equivalent to using the index operator, but can also be used in
/// const contexts.
#[must_use]
pub const fn get_color(&self, name: ColorName) -> &Color {
match name {
#(#second),*
}
}
}
}
}
fn make_ansi_color_name_index_impl_tokens(sample_flavor: &Flavor) -> TokenStream {
let first = ansi_colors_in_order(sample_flavor).map(|(identifier, color)| {
let variant = format_ident!("{}", remove_whitespace(&color.name));
let ident = format_ident!("{}", identifier);
quote! {
AnsiColorName::#variant => &self.#ident
}
});
let second = first.clone();
quote! {
impl Index<AnsiColorName> for FlavorAnsiColors {
type Output = AnsiColor;
fn index(&self, index: AnsiColorName) -> &Self::Output {
match index {
#(#first),*
}
}
}
impl FlavorAnsiColors {
/// Get an ANSI color by name.
///
/// This is equivalent to using the index operator, but can also be used in
/// const contexts.
#[must_use]
pub const fn get_ansi_color(&self, name: AnsiColorName) -> &AnsiColor {
match name {
#(#second),*
}
}
}
}
}
fn make_ansi_color_pair_name_index_impl_tokens(sample_flavor: &Flavor) -> TokenStream {
let first = ansi_color_pairs_in_order(sample_flavor).map(|(identifier, ..)| {
let variant = format_ident!("{}", titlecase(identifier));
let ident = format_ident!("{}", identifier);
quote! {
AnsiColorPairName::#variant => &self.#ident
}
});
let second = first.clone();
quote! {
impl Index<AnsiColorPairName> for FlavorAnsiColorPairs {
type Output = AnsiColorPair;
fn index(&self, index: AnsiColorPairName) -> &Self::Output {
match index {
#(#first),*
}
}
}
impl FlavorAnsiColorPairs {
/// Get an ANSI color pair by name.
///
/// This is equivalent to using the index operator, but can also be used in
/// const contexts.
#[must_use]
pub const fn get_ansi_color_pair(&self, name: AnsiColorPairName) -> &AnsiColorPair {
match name {
#(#second),*
}
}
}
}
}
fn make_color_name_display_impl_tokens(sample_flavor: &Flavor) -> TokenStream {
let match_arms = colors_in_order(sample_flavor).map(|(identifier, color)| {
let variant = format_ident!("{}", titlecase(identifier));
let name = &color.name;
quote! {
Self::#variant => write!(f, #name)
}
});
quote! {
impl core::fmt::Display for ColorName {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
match self {
#(#match_arms),*
}
}
}
}
}
fn make_ansi_color_name_display_impl_tokens(sample_flavor: &Flavor) -> TokenStream {
let match_arms = ansi_colors_in_order(sample_flavor).map(|(_, color)| {
let name = &color.name;
let variant = format_ident!("{}", remove_whitespace(name));
quote! {
Self::#variant => write!(f, #name)
}
});
quote! {
impl core::fmt::Display for AnsiColorName {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
match self {
#(#match_arms),*
}
}
}
}
}
fn make_ansi_color_pair_name_display_impl_tokens(sample_flavor: &Flavor) -> TokenStream {
let match_arms = ansi_color_pairs_in_order(sample_flavor).map(|(identifier, ..)| {
let name = titlecase(identifier);
let variant = format_ident!("{name}");
quote! {
Self::#variant => write!(f, #name)
}
});
quote! {
impl core::fmt::Display for AnsiColorPairName {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
match self {
#(#match_arms),*
}
}
}
}
}
fn make_color_name_identifier_impl_tokens(sample_flavor: &Flavor) -> TokenStream {
let match_arms = colors_in_order(sample_flavor).map(|(identifier, _)| {
let variant = format_ident!("{}", titlecase(identifier));
quote! {
Self::#variant => #identifier
}
});
quote! {
impl ColorName {
/// Get the color's identifier; the lowercase key used to identify the color.
/// This differs from `to_string` in that it's intended for machine usage
/// rather than presentation.
///
/// Example:
///
/// ```rust
/// let surface0 = catppuccin::PALETTE.latte.colors.surface0;
/// assert_eq!(surface0.name.to_string(), "Surface 0");
/// assert_eq!(surface0.name.identifier(), "surface0");
/// ```
#[must_use]
pub const fn identifier(&self) -> &'static str {
match self {
#(#match_arms),*
}
}
}
}
}
fn make_ansi_color_name_identifier_impl_tokens(sample_flavor: &Flavor) -> TokenStream {
let match_arms = ansi_colors_in_order(sample_flavor).map(|(identifier, color)| {
let variant = format_ident!("{}", remove_whitespace(&color.name));
quote! {
Self::#variant => #identifier
}
});
quote! {
impl AnsiColorName {
/// Get the ANSI color's identifier; the lowercase key used to identify the color.
/// This differs from `to_string` in that it's intended for machine usage
/// rather than presentation.
///
/// Example:
///
/// ```rust
/// let bright_black = catppuccin::PALETTE.latte.ansi_colors.bright_black;
/// assert_eq!(bright_black.name.to_string(), "Bright Black");
/// assert_eq!(bright_black.name.identifier(), "bright_black");
/// ```
#[must_use]
pub const fn identifier(&self) -> &'static str {
match self {
#(#match_arms),*
}
}
}
}
}
fn make_ansi_color_pair_name_identifier_impl_tokens(sample_flavor: &Flavor) -> TokenStream {
let match_arms = ansi_color_pairs_in_order(sample_flavor).map(|(identifier, ..)| {
let variant = format_ident!("{}", titlecase(identifier));
quote! {
Self::#variant => #identifier
}
});
quote! {
impl AnsiColorPairName {
/// Get the ANSI color pair's identifier; the lowercase key used to identify the color.
/// This differs from `to_string` in that it's intended for machine usage
/// rather than presentation.
///
/// Example:
///
/// ```rust
/// let black_ansi_pair = catppuccin::PALETTE.latte.ansi_colors.all_pairs().black;
/// assert_eq!(black_ansi_pair.name.to_string(), "Black");
/// assert_eq!(black_ansi_pair.name.identifier(), "black");
/// assert_eq!(black_ansi_pair.normal.name.to_string(), "Black");
/// assert_eq!(black_ansi_pair.normal.name.identifier(), "black");
/// assert_eq!(black_ansi_pair.bright.name.to_string(), "Bright Black");
/// assert_eq!(black_ansi_pair.bright.name.identifier(), "bright_black");
/// ```
#[must_use]
pub const fn identifier(&self) -> &'static str {
match self {
#(#match_arms),*
}
}
}
}
}
fn make_color_name_fromstr_impl_tokens(sample_flavor: &Flavor) -> TokenStream {
let match_arms = colors_in_order(sample_flavor)
.map(|(identifier, _)| {
let variant = format_ident!("{}", titlecase(identifier));
quote! {
#identifier => Ok(Self::#variant)
}
})
.collect::<Vec<_>>();
quote! {
impl core::str::FromStr for ColorName {
type Err = ParseColorNameError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
#(#match_arms),*,
_ => Err(ParseColorNameError),
}
}
}
}
}
fn make_ansi_color_name_fromstr_impl_tokens(sample_flavor: &Flavor) -> TokenStream {
let match_arms = ansi_colors_in_order(sample_flavor)
.map(|(identifier, color)| {
let variant = format_ident!("{}", remove_whitespace(&color.name));
quote! {
#identifier => Ok(Self::#variant)
}
})
.collect::<Vec<_>>();
quote! {
impl core::str::FromStr for AnsiColorName {
type Err = ParseColorNameError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
#(#match_arms),*,
_ => Err(ParseColorNameError),
}
}
}
}
}
fn make_ansi_color_pair_name_fromstr_impl_tokens(sample_flavor: &Flavor) -> TokenStream {
let match_arms = ansi_color_pairs_in_order(sample_flavor)
.map(|(identifier, ..)| {
let variant = format_ident!("{}", titlecase(identifier));
quote! {
#identifier => Ok(Self::#variant)
}
})
.collect::<Vec<_>>();
quote! {
impl core::str::FromStr for AnsiColorPairName {
type Err = ParseColorNameError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
#(#match_arms),*,
_ => Err(ParseColorNameError),
}
}
}
}
}
fn make_palette_const_tokens(palette: &Palette) -> TokenStream {
let flavors =
flavors_in_order(palette).map(|(identifier, flavor)| make_flavor_entry(identifier, flavor));
let tokens = quote! {
/// The Catppuccin palette. This constant will generally be your entrypoint
/// into using the crate.
#[allow(clippy::unreadable_literal)]
pub const PALETTE: Palette = Palette {
#(#flavors),*
};
};
tokens
}
fn make_flavor_entry(identifier: &str, flavor: &Flavor) -> TokenStream {
let Flavor {
emoji, order, dark, ..
} = flavor;
let colors =
colors_in_order(flavor).map(|(identifier, color)| make_color_entry(identifier, color));
let ansi_colors = ansi_colors_in_order(flavor)
.map(|(identifier, ansi_color_pair)| make_ansi_color_entry(&identifier, ansi_color_pair));
let flavorname_variant = format_ident!("{}", titlecase(identifier));
let ident = format_ident!("{}", identifier);
quote! {
#ident: Flavor {
name: FlavorName::#flavorname_variant,
emoji: #emoji,
order: #order,
dark: #dark,
colors: FlavorColors {
#(#colors),*
},
ansi_colors: FlavorAnsiColors {
#(#ansi_colors),*
}
}
}
}
fn make_color_entry(identifier: &str, color: &Color) -> TokenStream {
let ident = format_ident!("{}", identifier);
let colorname_variant = format_ident!("{}", titlecase(identifier));
let Color {
order,
accent,
rgb: Rgb { r, g, b },
hsl: Hsl { h, s, l },
..
} = color;
let rgb = quote! { Rgb { r: #r, g: #g, b: #b } };
let hsl = quote! { Hsl { h: #h, s: #s, l: #l } };
quote! {
#ident: Color {
name: ColorName::#colorname_variant,
order: #order,
accent: #accent,
hex: Hex(#rgb),
rgb: #rgb,
hsl: #hsl,
}
}
}
fn make_ansi_color_entry(identifier: &str, ansi_color: &AnsiColor) -> TokenStream {
let ident = format_ident!("{identifier}");
let AnsiColor {
name,
code,
rgb: Rgb { r, g, b },
hsl: Hsl { h, s, l },
} = ansi_color;
let name_variant = format_ident!("{}", remove_whitespace(name));
let rgb = quote! { Rgb { r: #r, g: #g, b: #b } };
let hsl = quote! { Hsl { h: #h, s: #s, l: #l } };
quote! {
#ident: AnsiColor {
name: AnsiColorName::#name_variant,
hex: Hex(#rgb),
rgb: #rgb,
hsl: #hsl,
code: #code,
}
}
}
fn make_ansi_color_pair_entry(
identifier: &str,
ansi_color_pair: &AnsiColorPair,
normal_identifier: &str,
bright_identifier: &str,
) -> TokenStream {
let identifier = format_ident!("{}", identifier);
let name_identifier = format_ident!("{}", ansi_color_pair.name);
let order = ansi_color_pair.order;
let normal_identifier = format_ident!("{}", normal_identifier);
let bright_identifier = format_ident!("{}", bright_identifier);
quote! {
#identifier: AnsiColorPair {
name: AnsiColorPairName::#name_identifier,
order: #order,
normal: self.#normal_identifier,
bright: self.#bright_identifier,
}
}
}
| rust | MIT | e4f47b6a524d8d9d3a262e3f3534955a01d77e93 | 2026-01-04T20:19:15.369728Z | false |
catppuccin/rust | https://github.com/catppuccin/rust/blob/e4f47b6a524d8d9d3a262e3f3534955a01d77e93/src/lib.rs | src/lib.rs | //! 🦀 Soothing pastel theme for Rust.
//!
//! # Usage
//!
//! Add Catppuccin to your project's `Cargo.toml`:
//!
//! ```console
//! $ cargo add catppuccin
//! ```
//!
//! # Example
//!
//! ```rust
//! struct Button {
//! text: String,
//! background_color: String,
//! };
//!
//! fn confirm(text: String) -> Button {
//! Button {
//! text,
//! background_color: catppuccin::PALETTE.mocha.colors.green.hex.to_string(),
//! }
//! }
//! ```
//!
//! More examples can be found
//! [here](https://github.com/catppuccin/rust/tree/main/examples).
//!
//! # Optional Features
//!
//! ## ANSI string painting
//!
//! Enable the `ansi-term` feature to add the
//! [`Color::ansi_paint`](Color::ansi_paint) method.
//! This adds [ansi-term](https://crates.io/crates/ansi_term) as a dependency.
//!
//! Example: [`examples/term_grid.rs`](https://github.com/catppuccin/rust/blob/main/examples/term_grid.rs)
//!
//! ### Bevy
//!
//! Enable the `bevy` feature to enable the conversion of Catppuccin colors to
//! [`bevy::prelude::Color`] instances.
//! This adds [bevy](https://crates.io/crates/bevy) as a dependency.
//!
//! Example: [`examples/bevy.rs`](https://github.com/catppuccin/rust/blob/main/examples/bevy.rs)
//!
//! ### CSS colors
//!
//! Enable the `css-colors` feature to enable the conversion of Catppuccin colors to
//! [`css_colors::RGB`] instances.
//! This adds [css-colors](https://crates.io/crates/css-colors) as a dependency.
//!
//! Example: [`examples/css.rs`](https://github.com/catppuccin/rust/blob/main/examples/css.rs)
//!
//! ### Iced
//!
//! Enable the `iced` feature to enable the conversion of Catppuccin colors to
//! [`iced::Color`] instances.
//! This adds [iced](https://crates.io/crates/iced) as a dependency.
//!
//! Example: [`examples/iced.rs`](https://github.com/catppuccin/rust/blob/main/examples/iced.rs)
//!
//! ### Ratatui
//!
//! Enable the `ratatui` feature to enable the conversion of Catppuccin colors to
//! [`ratatui::style::Color`] instances.
//! This adds [ratatui](https://crates.io/crates/ratatui) as a dependency.
//!
//! Example: [`examples/ratatui.rs`](https://github.com/catppuccin/rust/blob/main/examples/ratatui.rs)
//!
//! ### Serde
//!
//! Enable the `serde` feature to enable the serialization of Catppuccin's palette,
//! flavor, and color types.
//! This adds [serde](https://crates.io/crates/serde) as a dependency.
//!
//! Example: [`examples/serde.rs`](https://github.com/catppuccin/rust/blob/main/examples/serde.rs)
#![no_std]
extern crate alloc;
use core::{fmt, marker::PhantomData, ops::Index, str::FromStr};
include!(concat!(env!("OUT_DIR"), "/generated_palette.rs"));
/// The top-level type that encompasses the Catppuccin palette data structure.
/// Primarily used via the [`PALETTE`] constant.
///
/// Can be iterated over, in which case the flavors are yielded in the canonical order:
/// Latte, Frappé, Macchiato, Mocha.
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct Palette {
/// The light flavor.
pub latte: Flavor,
/// The lightest dark flavor.
pub frappe: Flavor,
/// The medium dark flavor.
pub macchiato: Flavor,
/// The darkest dark flavor.
pub mocha: Flavor,
}
/// Enum of all four flavors of Catppuccin. Can be used to index [`Palette`].
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub enum FlavorName {
/// The light flavor.
Latte,
/// The lightest dark flavor.
#[cfg_attr(feature = "serde", serde(rename = "Frappé"))]
Frappe,
/// The medium dark flavor.
Macchiato,
/// The darkest dark flavor.
Mocha,
}
/// An iterator over flavors in the palette.
/// Obtained via [`Palette::iter()`].
pub struct FlavorIterator<'a> {
current: usize,
phantom: PhantomData<&'a ()>,
}
/// Color represented as individual red, green, and blue channels.
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct Rgb {
/// Red channel.
pub r: u8,
/// Green channel.
pub g: u8,
/// Blue channel.
pub b: u8,
}
/// Color represented as 6-digit hexadecimal.
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct Hex(Rgb);
/// Color represented as individual hue (0-359), saturation (0-1), and lightness (0-1) channels.
#[derive(Clone, Copy, Debug, PartialEq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct Hsl {
/// Hue channel.
pub h: f64,
/// Saturation channel.
pub s: f64,
/// Lightness channel.
pub l: f64,
}
/// A single color in the Catppuccin palette.
#[derive(Clone, Copy, Debug, PartialEq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct Color {
/// The [`ColorName`] for this color.
pub name: ColorName,
/// Order of the color in the palette spec.
pub order: u32,
/// Whether the color is considered an accent color.
/// Accent colors are the first 14 colors in the palette, also called
/// the analogous colours. The remaining 12 non-accent colors are also
/// referred to as the monochromatic colors.
pub accent: bool,
/// The color represented as a six-digit hex string with a leading hash (#).
pub hex: Hex,
/// The color represented as individual red, green, and blue channels.
pub rgb: Rgb,
/// The color represented as individual hue, saturation, and lightness channels.
pub hsl: Hsl,
}
/// A flavor is a collection of colors. Catppuccin has four flavors; Latte,
/// Frappé, Macchiato, and Mocha.
///
/// Can be iterated over, in which case the colors are yielded in order.
#[derive(Clone, Copy, Debug, PartialEq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct Flavor {
/// The name of the flavor.
pub name: FlavorName,
/// Emoji associated with the flavor. Requires Unicode 13.0 (2020) or later to render.
pub emoji: char,
/// Order of the flavor in the palette spec.
pub order: u32,
/// Whether this flavor is dark or light oriented. Latte is light, the other
/// three flavors are dark.
pub dark: bool,
/// The colors in the flavor.
pub colors: FlavorColors,
/// The ANSI colors in the flavor.
pub ansi_colors: FlavorAnsiColors,
}
/// An iterator over colors in a flavor.
/// Obtained via [`Flavor::into_iter()`](struct.Flavor.html#method.into_iter) or [`FlavorColors::iter()`].
pub struct ColorIterator<'a> {
colors: &'a FlavorColors,
current: usize,
}
/// An iterator over the ANSI colors in a flavor.
///
/// Defaults to ascending order by ANSI code 0 -> 16.
/// Obtained via [`FlavorAnsiColors::into_iter()`](struct.FlavorAnsiColors.html#method.into_iter) or [`FlavorAnsiColors::iter()`].
pub struct AnsiColorIterator<'a> {
ansi_colors: &'a FlavorAnsiColors,
current: usize,
}
/// An iterator over the ANSI color pairs in a flavor.
/// Obtained via [`FlavorAnsiColorPairs::into_iter()`](struct.FlavorAnsiColorPairs.html#method.into_iter) or [`FlavorAnsiColorPairs::iter()`].
pub struct AnsiColorPairsIterator<'a> {
ansi_color_pairs: &'a FlavorAnsiColorPairs,
current: usize,
}
impl Palette {
/// Get an array of the flavors in the palette.
#[must_use]
pub const fn all_flavors(&self) -> [&Flavor; 4] {
[&self.latte, &self.frappe, &self.macchiato, &self.mocha]
}
/// Create an iterator over the flavors in the palette.
#[must_use]
pub const fn iter(&self) -> FlavorIterator<'_> {
FlavorIterator {
current: 0,
phantom: PhantomData,
}
}
}
impl Index<FlavorName> for Palette {
type Output = Flavor;
fn index(&self, index: FlavorName) -> &Self::Output {
match index {
FlavorName::Latte => &self.latte,
FlavorName::Frappe => &self.frappe,
FlavorName::Macchiato => &self.macchiato,
FlavorName::Mocha => &self.mocha,
}
}
}
impl Palette {
/// Get a flavor by name.
///
/// This is equivalent to using the index operator, but can also be used in
/// const contexts.
#[must_use]
pub const fn get_flavor(&self, name: FlavorName) -> &Flavor {
match name {
FlavorName::Latte => &self.latte,
FlavorName::Frappe => &self.frappe,
FlavorName::Macchiato => &self.macchiato,
FlavorName::Mocha => &self.mocha,
}
}
}
impl fmt::Display for Hex {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let Rgb { r, g, b } = self.0;
write!(f, "#{r:02x}{g:02x}{b:02x}")
}
}
#[cfg(feature = "serde")]
mod _hex {
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use crate::{Hex, Rgb};
use alloc::string::String;
use alloc::string::ToString;
impl Serialize for Hex {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
impl<'de> Deserialize<'de> for Hex {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let hex: String = Deserialize::deserialize(deserializer)?;
let hex: u32 = u32::from_str_radix(hex.trim_start_matches('#'), 16)
.map_err(serde::de::Error::custom)?;
let r = ((hex >> 16) & 0xff) as u8;
let g = ((hex >> 8) & 0xff) as u8;
let b = (hex & 0xff) as u8;
Ok(Self(Rgb { r, g, b }))
}
}
}
impl fmt::Display for FlavorName {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Self::Latte => write!(f, "Latte"),
Self::Frappe => write!(f, "Frappé"),
Self::Macchiato => write!(f, "Macchiato"),
Self::Mocha => write!(f, "Mocha"),
}
}
}
/// Error type for parsing a [`FlavorName`] from a string.
#[derive(Debug, PartialEq, Eq)]
pub struct ParseFlavorNameError;
impl core::error::Error for ParseFlavorNameError {}
impl core::fmt::Display for ParseFlavorNameError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"invalid flavor identifier, expected one of: latte, frappe, frappé, macchiato, mocha"
)
}
}
impl FromStr for FlavorName {
type Err = ParseFlavorNameError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"latte" => Ok(Self::Latte),
"frappe" | "frappé" => Ok(Self::Frappe),
"macchiato" => Ok(Self::Macchiato),
"mocha" => Ok(Self::Mocha),
_ => Err(ParseFlavorNameError),
}
}
}
impl FlavorName {
/// Get the flavor's identifier; the lowercase key used to identify the flavor.
/// This differs from `to_string` in that it's intended for machine usage
/// rather than presentation.
///
/// Example:
///
/// ```rust
/// let frappe = catppuccin::PALETTE.frappe;
/// assert_eq!(frappe.name.to_string(), "Frappé");
/// assert_eq!(frappe.name.identifier(), "frappe");
/// ```
#[must_use]
pub const fn identifier(&self) -> &'static str {
match self {
Self::Latte => "latte",
Self::Frappe => "frappe",
Self::Macchiato => "macchiato",
Self::Mocha => "mocha",
}
}
}
impl FlavorColors {
/// Create an iterator over the colors in the flavor.
#[must_use]
pub const fn iter(&self) -> ColorIterator<'_> {
ColorIterator {
colors: self,
current: 0,
}
}
}
impl FlavorAnsiColors {
/// Create an iterator over the ANSI colors in the flavor.
#[must_use]
pub const fn iter(&self) -> AnsiColorIterator<'_> {
AnsiColorIterator {
ansi_colors: self,
current: 0,
}
}
/// Get the ANSI color pairs
#[must_use]
pub const fn all_pairs(&self) -> FlavorAnsiColorPairs {
self.to_ansi_color_pairs()
}
}
impl FlavorAnsiColorPairs {
/// Create an iterator over the ANSI color pairs in the flavor.
#[must_use]
pub const fn iter(&self) -> AnsiColorPairsIterator<'_> {
AnsiColorPairsIterator {
ansi_color_pairs: self,
current: 0,
}
}
}
impl<'a> Iterator for FlavorIterator<'a> {
type Item = &'a Flavor;
fn next(&mut self) -> Option<Self::Item> {
if self.current >= PALETTE.all_flavors().len() {
None
} else {
let flavor = PALETTE.all_flavors()[self.current];
self.current += 1;
Some(flavor)
}
}
}
impl<'a> Iterator for ColorIterator<'a> {
type Item = &'a Color;
fn next(&mut self) -> Option<Self::Item> {
if self.current >= self.colors.all_colors().len() {
None
} else {
let color = self.colors.all_colors()[self.current];
self.current += 1;
Some(color)
}
}
}
impl<'a> Iterator for AnsiColorIterator<'a> {
type Item = &'a AnsiColor;
fn next(&mut self) -> Option<Self::Item> {
if self.current >= self.ansi_colors.all_ansi_colors().len() {
None
} else {
let color = self.ansi_colors.all_ansi_colors()[self.current];
self.current += 1;
Some(color)
}
}
}
impl<'a> Iterator for AnsiColorPairsIterator<'a> {
type Item = &'a AnsiColorPair;
fn next(&mut self) -> Option<Self::Item> {
if self.current >= self.ansi_color_pairs.all_ansi_color_pairs().len() {
None
} else {
let color = self.ansi_color_pairs.all_ansi_color_pairs()[self.current];
self.current += 1;
Some(color)
}
}
}
impl<'a> IntoIterator for &'a Palette {
type Item = &'a Flavor;
type IntoIter = FlavorIterator<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a> IntoIterator for &'a FlavorColors {
type Item = &'a Color;
type IntoIter = ColorIterator<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a> IntoIterator for &'a FlavorAnsiColors {
type Item = &'a AnsiColor;
type IntoIter = AnsiColorIterator<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a> IntoIterator for &'a FlavorAnsiColorPairs {
type Item = &'a AnsiColorPair;
type IntoIter = AnsiColorPairsIterator<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl Flavor {
/// Create an iterator over the colors in the flavor.
#[must_use]
pub const fn iter(&self) -> ColorIterator<'_> {
self.colors.iter()
}
/// Equivalent to [`<flavor>.name.identifier()`](FlavorName::identifier).
#[must_use]
pub const fn identifier(&self) -> &'static str {
self.name.identifier()
}
}
impl<'a> IntoIterator for &'a Flavor {
type Item = &'a Color;
type IntoIter = ColorIterator<'a>;
fn into_iter(self) -> Self::IntoIter {
self.colors.iter()
}
}
/// Error type for parsing a [`ColorName`] from a string.
#[derive(Debug, PartialEq, Eq)]
pub struct ParseColorNameError;
impl core::error::Error for ParseColorNameError {}
impl core::fmt::Display for ParseColorNameError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "invalid color identifier")
}
}
impl Index<ColorName> for Flavor {
type Output = Color;
fn index(&self, index: ColorName) -> &Self::Output {
self.colors.index(index)
}
}
impl Flavor {
/// Get a color by name.
///
/// This is equivalent to using the index operator, but can also be used in
/// const contexts.
#[must_use]
pub const fn get_color(&self, name: ColorName) -> &Color {
self.colors.get_color(name)
}
}
impl Color {
/// Equivalent to [`<color>.name.identifier()`](ColorName::identifier).
#[must_use]
pub const fn identifier(&self) -> &'static str {
self.name.identifier()
}
}
impl From<(u8, u8, u8)> for Rgb {
fn from((r, g, b): (u8, u8, u8)) -> Self {
Self { r, g, b }
}
}
impl From<(u8, u8, u8)> for Hex {
fn from((r, g, b): (u8, u8, u8)) -> Self {
Self(Rgb { r, g, b })
}
}
impl From<(f64, f64, f64)> for Hsl {
fn from((h, s, l): (f64, f64, f64)) -> Self {
Self { h, s, l }
}
}
#[cfg(feature = "ansi-term")]
mod ansi_term {
use crate::{AnsiColor, Color};
use alloc::borrow::ToOwned;
impl Color {
/// Paints the given input with a color à la [ansi_term](https://docs.rs/ansi_term/latest/ansi_term/)
pub fn ansi_paint<'a, I, S: 'a + ToOwned + ?Sized>(
&self,
input: I,
) -> ansi_term::ANSIGenericString<'a, S>
where
I: Into<alloc::borrow::Cow<'a, S>>,
<S as ToOwned>::Owned: core::fmt::Debug,
{
ansi_term::Color::RGB(self.rgb.r, self.rgb.g, self.rgb.b).paint(input)
}
}
impl AnsiColor {
/// Paints the given input with a color à la [ansi_term](https://docs.rs/ansi_term/latest/ansi_term/)
pub fn ansi_paint<'a, I, S: 'a + ToOwned + ?Sized>(
&self,
input: I,
) -> ansi_term::ANSIGenericString<'a, S>
where
I: Into<alloc::borrow::Cow<'a, S>>,
<S as ToOwned>::Owned: core::fmt::Debug,
{
ansi_term::Color::RGB(self.rgb.r, self.rgb.g, self.rgb.b).paint(input)
}
}
}
#[cfg(feature = "bevy")]
mod bevy {
use crate::{AnsiColor, Color};
impl From<Color> for bevy::prelude::Color {
fn from(value: Color) -> Self {
#[allow(clippy::cast_possible_truncation)]
Self::hsl(value.hsl.h as f32, value.hsl.s as f32, value.hsl.l as f32)
}
}
impl From<AnsiColor> for bevy::prelude::Color {
fn from(value: AnsiColor) -> Self {
#[allow(clippy::cast_possible_truncation)]
Self::hsl(value.hsl.h as f32, value.hsl.s as f32, value.hsl.l as f32)
}
}
}
#[cfg(feature = "css-colors")]
mod css_colors {
use crate::{AnsiColor, Color};
impl From<Color> for css_colors::RGB {
fn from(value: Color) -> Self {
Self {
r: css_colors::Ratio::from_u8(value.rgb.r),
g: css_colors::Ratio::from_u8(value.rgb.g),
b: css_colors::Ratio::from_u8(value.rgb.b),
}
}
}
impl From<AnsiColor> for css_colors::RGB {
fn from(value: AnsiColor) -> Self {
Self {
r: css_colors::Ratio::from_u8(value.rgb.r),
g: css_colors::Ratio::from_u8(value.rgb.g),
b: css_colors::Ratio::from_u8(value.rgb.b),
}
}
}
impl From<Color> for css_colors::HSL {
fn from(value: Color) -> Self {
#[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)]
Self {
h: css_colors::Angle::new(value.hsl.h as u16),
s: css_colors::Ratio::from_f32(value.hsl.s as f32),
l: css_colors::Ratio::from_f32(value.hsl.l as f32),
}
}
}
impl From<AnsiColor> for css_colors::HSL {
fn from(value: AnsiColor) -> Self {
#[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)]
Self {
h: css_colors::Angle::new(value.hsl.h as u16),
s: css_colors::Ratio::from_f32(value.hsl.s as f32),
l: css_colors::Ratio::from_f32(value.hsl.l as f32),
}
}
}
}
#[cfg(feature = "iced")]
mod iced {
use crate::{AnsiColor, Color};
impl From<Color> for iced::Color {
fn from(value: Color) -> Self {
Self::from_rgb8(value.rgb.r, value.rgb.g, value.rgb.b)
}
}
impl From<AnsiColor> for iced::Color {
fn from(value: AnsiColor) -> Self {
Self::from_rgb8(value.rgb.r, value.rgb.g, value.rgb.b)
}
}
}
#[cfg(feature = "ratatui")]
mod ratatui {
use crate::{AnsiColor, Color};
impl From<Color> for ratatui::style::Color {
fn from(value: Color) -> Self {
Self::Rgb(value.rgb.r, value.rgb.g, value.rgb.b)
}
}
impl From<AnsiColor> for ratatui::style::Color {
fn from(value: AnsiColor) -> Self {
Self::Rgb(value.rgb.r, value.rgb.g, value.rgb.b)
}
}
}
| rust | MIT | e4f47b6a524d8d9d3a262e3f3534955a01d77e93 | 2026-01-04T20:19:15.369728Z | false |
catppuccin/rust | https://github.com/catppuccin/rust/blob/e4f47b6a524d8d9d3a262e3f3534955a01d77e93/examples/serde.rs | examples/serde.rs | //! Example demonstrating integration with the `serde` crate.
fn main() {
let value =
serde_json::to_string_pretty(&catppuccin::PALETTE).expect("palette can be serialized");
println!("{value}");
}
| rust | MIT | e4f47b6a524d8d9d3a262e3f3534955a01d77e93 | 2026-01-04T20:19:15.369728Z | false |
catppuccin/rust | https://github.com/catppuccin/rust/blob/e4f47b6a524d8d9d3a262e3f3534955a01d77e93/examples/css.rs | examples/css.rs | //! Example demonstrating integration with the `css-colors` crate.
use css_colors::{percent, Color};
fn main() {
let teal = catppuccin::PALETTE.mocha.colors.teal;
let rgb: css_colors::RGB = teal.into();
println!("RGB: {}", rgb.to_css());
let hsl = rgb.to_hsl();
println!("HSL: {}", hsl.to_css());
let lighter = hsl.lighten(percent(20));
println!("20% lighter: {lighter}");
let ansi_normal_magenta = catppuccin::PALETTE.mocha.ansi_colors.magenta;
let ansi_bright_magenta = catppuccin::PALETTE.mocha.ansi_colors.bright_magenta;
let ansi_magenta_normal_rgb: css_colors::RGB = ansi_normal_magenta.into();
let ansi_magenta_bright_rgb: css_colors::RGB = ansi_bright_magenta.into();
println!("ANSI Magenta RGB: {}", ansi_magenta_normal_rgb.to_css());
println!(
"ANSI Bright Magenta RGB: {}",
ansi_magenta_bright_rgb.to_css()
);
}
| rust | MIT | e4f47b6a524d8d9d3a262e3f3534955a01d77e93 | 2026-01-04T20:19:15.369728Z | false |
catppuccin/rust | https://github.com/catppuccin/rust/blob/e4f47b6a524d8d9d3a262e3f3534955a01d77e93/examples/bevy.rs | examples/bevy.rs | //! Example demonstrating integration with the `bevy` crate.
use bevy::prelude::*;
use catppuccin::PALETTE;
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.run();
}
/// set up a simple 3D scene
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
// circular base with catppuccin mocha surface color
commands.spawn((
Mesh3d(meshes.add(Circle::new(4.0))),
MeshMaterial3d(materials.add(StandardMaterial::from_color(PALETTE.mocha.colors.surface0))),
Transform::from_rotation(Quat::from_rotation_x(-std::f32::consts::FRAC_PI_2)),
));
// cube with catppuccin mocha green color
commands.spawn((
Mesh3d(meshes.add(Cuboid::new(1.0, 1.0, 1.0))),
MeshMaterial3d(materials.add(StandardMaterial::from_color(PALETTE.mocha.colors.green))),
Transform::from_xyz(0.0, 0.5, 0.0),
));
// Sphere on top of cube with catppuccin mocha red color
commands.spawn((
Mesh3d(meshes.add(Sphere::new(0.5))),
MeshMaterial3d(materials.add(StandardMaterial::from_color(PALETTE.mocha.colors.red))),
Transform::from_xyz(0.0, 1.5, 0.0),
));
// light
commands.spawn((
PointLight {
shadows_enabled: true,
..default()
},
Transform::from_xyz(4.0, 8.0, 4.0),
));
// camera
commands.spawn((
Camera3d::default(),
Transform::from_xyz(-2.5, 4.5, 9.0).looking_at(Vec3::ZERO, Vec3::Y),
));
}
| rust | MIT | e4f47b6a524d8d9d3a262e3f3534955a01d77e93 | 2026-01-04T20:19:15.369728Z | false |
catppuccin/rust | https://github.com/catppuccin/rust/blob/e4f47b6a524d8d9d3a262e3f3534955a01d77e93/examples/ratatui.rs | examples/ratatui.rs | //! Example demonstrating integration with the `ratatui` crate.
use std::io::{self, stdout};
use catppuccin::PALETTE;
use ratatui::{
backend::CrosstermBackend,
layout::Rect,
style::Stylize as _,
text::{Line, Span},
widgets::{Paragraph, Widget},
Terminal, TerminalOptions, Viewport,
};
fn main() -> io::Result<()> {
let mut terminal = Terminal::with_options(
CrosstermBackend::new(stdout()),
TerminalOptions {
viewport: Viewport::Inline(0),
},
)?;
for flavor in &PALETTE {
terminal.insert_before(8, |buf| {
let analogous: Vec<Span> = flavor
.colors
.into_iter()
.filter(|c| c.accent)
.map(|c| "██".fg(*c)) // fg accepts any type that implements Into<Color>
.collect::<Vec<Span>>();
let monochromatic: Vec<Span> = flavor
.colors
.into_iter()
.filter(|c| !c.accent)
.map(|c| "██".fg(*c)) // fg accepts any type that implements Into<Color>
.collect();
let ansi_normals: Vec<Span> = flavor
.ansi_colors
.into_iter()
.filter(|c| c.code < 8)
.map(|c| "██".fg(*c)) // fg accepts any type that implements Into<Color>
.collect::<Vec<Span>>();
let ansi_brights: Vec<Span> = flavor
.ansi_colors
.into_iter()
.filter(|c| c.code >= 8)
.map(|c| "██".fg(*c)) // fg accepts any type that implements Into<Color>
.collect::<Vec<Span>>();
let width = buf.area.width;
Paragraph::new(flavor.name.to_string()).render(Rect::new(0, 0, width, 1), buf);
Paragraph::new(Line::from(analogous)).render(Rect::new(0, 1, width, 1), buf);
Paragraph::new(Line::from(monochromatic)).render(Rect::new(0, 2, width, 1), buf);
Paragraph::new(format!("{} ANSI", flavor.name)).render(Rect::new(0, 4, width, 1), buf);
Paragraph::new(Line::from(ansi_normals)).render(Rect::new(0, 5, width, 1), buf);
Paragraph::new(Line::from(ansi_brights)).render(Rect::new(0, 6, width, 1), buf);
})?;
}
Ok(())
}
| rust | MIT | e4f47b6a524d8d9d3a262e3f3534955a01d77e93 | 2026-01-04T20:19:15.369728Z | false |
catppuccin/rust | https://github.com/catppuccin/rust/blob/e4f47b6a524d8d9d3a262e3f3534955a01d77e93/examples/term_grid.rs | examples/term_grid.rs | //! Example demonstrating integration with the `ansi_term` crate.
use catppuccin::PALETTE;
const fn ansi_term_color(color: &catppuccin::Color) -> ansi_term::Colour {
ansi_term::Colour::RGB(color.rgb.r, color.rgb.g, color.rgb.b)
}
const fn ansi_term_ansi_color(color: &catppuccin::AnsiColor) -> ansi_term::Colour {
ansi_term::Colour::RGB(color.rgb.r, color.rgb.g, color.rgb.b)
}
fn main() {
for flavor in &PALETTE {
let heading = format!(
"{} ({})",
flavor.name,
if flavor.dark { "dark" } else { "light" }
);
println!(
"{}\n",
ansi_term::Style::new().underline().bold().paint(heading)
);
for color in flavor {
let name = format!(
"{}{}",
color.name,
if color.accent { " (accent)" } else { "" }
);
let rgb = format!(
"rgb({:3}, {:3}, {:3})",
color.rgb.r, color.rgb.g, color.rgb.b
);
let hsl = format!(
"hsl({:3.0}, {:5.3}, {:5.3})",
color.hsl.h, color.hsl.s, color.hsl.l
);
println!(
"{} {:18} → {:6} {:18} {:18}",
ansi_term_color(color).reverse().paint(" "),
name,
color.hex,
rgb,
hsl,
);
}
println!();
println!(
"{}\n",
ansi_term::Style::new()
.underline()
.bold()
.paint(format!("{} ANSI", flavor.name))
);
for ansi_color in &flavor.ansi_colors {
let rgb = format!(
"rgb({:3}, {:3}, {:3})",
ansi_color.rgb.r, ansi_color.rgb.g, ansi_color.rgb.b
);
let hsl = format!(
"hsl({:3.0}, {:5.3}, {:5.3})",
ansi_color.hsl.h, ansi_color.hsl.s, ansi_color.hsl.l
);
println!(
"{} {:15} → {:6} {:18} {:18}",
ansi_term_ansi_color(ansi_color).reverse().paint(" "),
ansi_color.name.to_string(),
ansi_color.hex,
rgb,
hsl,
);
}
println!();
println!(
"{}\n",
ansi_term::Style::new()
.underline()
.bold()
.paint(format!("{} ANSI Pairs", flavor.name))
);
for ansi_color_pair in &flavor.ansi_colors.to_ansi_color_pairs() {
println!(
"{}{} {}",
ansi_term_ansi_color(&ansi_color_pair.normal)
.reverse()
.paint(" "),
ansi_term_ansi_color(&ansi_color_pair.bright)
.reverse()
.paint(" "),
ansi_color_pair.name,
);
}
println!();
}
}
| rust | MIT | e4f47b6a524d8d9d3a262e3f3534955a01d77e93 | 2026-01-04T20:19:15.369728Z | false |
catppuccin/rust | https://github.com/catppuccin/rust/blob/e4f47b6a524d8d9d3a262e3f3534955a01d77e93/examples/simple.rs | examples/simple.rs | //! Simple example showing how to get colors from the Catppuccin palette.
use catppuccin::{AnsiColor, ColorName, Rgb, PALETTE};
fn main() {
let latte_teal = PALETTE.latte.colors.teal;
let Rgb { r, g, b } = latte_teal.rgb;
println!(
"Latte's {} is {}, which is rgb({r}, {g}, {b})",
latte_teal.name, latte_teal.hex
);
// you can also get a color by its name, from `FlavorColors` or `Flavor`:
let mocha = &PALETTE.mocha;
let mocha_teal = mocha.colors[ColorName::Teal];
let mocha_mauve = mocha[ColorName::Mauve];
let Rgb { r, g, b } = mocha_teal.rgb;
println!(
"Mocha's {} is {}, which is rgb({r}, {g}, {b})",
mocha_teal.name, mocha_teal.hex
);
println!("Mocha's {} is {}", mocha_mauve.name, mocha_mauve.hex);
println!();
// iterate over the 16 ANSI colors (i.e. Black, Red, ..., Bright Black, Bright Red, ...)
println!("Mocha's ANSI colors in code order:");
for AnsiColor {
name,
rgb,
hsl,
code,
hex,
} in &mocha.ansi_colors
{
println!(
"Mocha ANSI [{:2}] {:15} → {:6} {:3?} {:19?}",
code,
name.to_string(),
hex,
rgb,
hsl,
);
}
println!();
// iterate over the 16 ANSI colors in 8 pairs (i.e. Black, Bright Black, Red, Bright Red, ...)
println!("Mocha's ANSI color pairs:");
for pair in &mocha.ansi_colors.all_pairs() {
println!(
"[{:2}] {:7} / [{:2}] {}",
pair.normal.code,
pair.normal.name.to_string(),
pair.bright.code,
pair.bright.name
);
}
}
| rust | MIT | e4f47b6a524d8d9d3a262e3f3534955a01d77e93 | 2026-01-04T20:19:15.369728Z | false |
catppuccin/rust | https://github.com/catppuccin/rust/blob/e4f47b6a524d8d9d3a262e3f3534955a01d77e93/examples/iced.rs | examples/iced.rs | //! Example demonstrating integration with the `iced` crate.
use iced::{
application,
daemon::Appearance,
widget::{button, column, container, text},
Alignment::Center,
Element,
Length::Fill,
Result,
};
const COLORS: catppuccin::FlavorColors = catppuccin::PALETTE.latte.colors;
#[derive(Default)]
struct Counter {
value: i64,
}
#[derive(Clone, Copy, Debug)]
enum Message {
Increment,
Decrement,
}
impl Counter {
const fn update(&mut self, message: Message) {
match message {
Message::Increment => {
self.value += 1;
}
Message::Decrement => {
self.value -= 1;
}
}
}
fn view(&self) -> Element<'_, Message> {
let green: iced::Color = COLORS.green.into();
let red: iced::Color = COLORS.red.into();
container(
column![
button(text("+").size(50).center())
.style(move |_, _| button::Style {
background: Some(green.into()),
text_color: COLORS.crust.into(),
..Default::default()
})
.width(60)
.on_press(Message::Increment),
text(self.value).size(50),
button(text("-").size(50).center())
.style(move |_, _| button::Style {
background: Some(red.into()),
text_color: COLORS.crust.into(),
..Default::default()
})
.width(60)
.on_press(Message::Decrement),
]
.align_x(Center)
.spacing(10),
)
.padding(20)
.center_x(Fill)
.center_y(Fill)
.into()
}
}
fn main() -> Result {
application("Counter", Counter::update, Counter::view)
.style(move |_, _| Appearance {
background_color: COLORS.base.into(),
text_color: COLORS.text.into(),
})
.run()
}
| rust | MIT | e4f47b6a524d8d9d3a262e3f3534955a01d77e93 | 2026-01-04T20:19:15.369728Z | false |
catppuccin/rust | https://github.com/catppuccin/rust/blob/e4f47b6a524d8d9d3a262e3f3534955a01d77e93/examples/custom_flavor.rs | examples/custom_flavor.rs | //! Example demonstrating how to make a custom flavor.
//! Two options are provided; setting colors one-by-one, or using a helper macro.
use catppuccin::{Color, Flavor, FlavorColors};
fn americano_simple() -> Flavor {
let mut oled = catppuccin::PALETTE.mocha;
oled.colors.base.hex = (0, 0, 0).into();
oled.colors.base.rgb = (0, 0, 0).into();
oled.colors.base.hsl = (0.0, 0.0, 0.0).into();
oled.colors.mantle.hex = (10, 10, 10).into();
oled.colors.mantle.rgb = (10, 10, 10).into();
oled.colors.mantle.hsl = (0.0, 0.0, 0.04).into();
oled.colors.crust.hex = (0, 0, 0).into();
oled.colors.crust.rgb = (0, 0, 0).into();
oled.colors.crust.hsl = (0.0, 0.0, 0.08).into();
oled
}
macro_rules! custom_flavor {
($base:expr, $($color_key:ident: $rgb:expr, $hsl:expr,)*) => {
Flavor {
colors: FlavorColors {
$($color_key: Color {
hex: $rgb.into(),
rgb: $rgb.into(),
hsl: $hsl.into(),
..$base.colors.$color_key
},)*
..$base.colors
},
..$base
}
};
}
fn use_flavor(flavor: &Flavor) {
println!("bg: {}", flavor.colors.base.hex);
println!("bg2: {}", flavor.colors.mantle.hex);
println!("fg: {}", flavor.colors.text.hex);
println!("accent: {}", flavor.colors.mauve.hex);
}
fn main() {
println!("The simple way:");
let flavor = americano_simple();
use_flavor(&flavor);
println!();
println!("Or with a macro:");
let flavor = custom_flavor!(catppuccin::PALETTE.mocha,
base: (0, 0, 0), (0.0, 0.0, 0.0),
mantle: (10, 10, 10), (0.0, 0.0, 0.04),
crust: (20, 20, 20), (0.0, 0.0, 0.08),
);
use_flavor(&flavor);
}
| rust | MIT | e4f47b6a524d8d9d3a262e3f3534955a01d77e93 | 2026-01-04T20:19:15.369728Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/build.rs | rust-core/build.rs | extern crate napi_build;
fn main() {
napi_build::setup();
} | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/lib.rs | rust-core/src/lib.rs | #![deny(clippy::all)]
// Note: napi-bindings are excluded during tests (not(test)) to avoid linker errors
// when running `cargo test`. NAPI bindings require Node.js runtime which isn't
// available in the test environment. Integration tests should use the built library.
#[cfg(all(feature = "napi-bindings", not(test)))]
use napi_derive::napi;
// Core modules
pub mod types;
pub mod parsing;
pub mod extractors;
pub mod analysis;
pub mod patterns;
// Legacy modules (will be removed in future versions)
// pattern_learning has been fully ported to the patterns module
// Re-export core types and main structs for easy access
pub use types::*;
pub use analysis::{SemanticAnalyzer, ComplexityAnalyzer, RelationshipLearner, FrameworkDetector, BlueprintAnalyzer};
pub use parsing::{ParserManager, TreeWalker, FallbackExtractor};
pub use patterns::{
PatternLearningEngine, NamingPatternAnalyzer, StructuralPatternAnalyzer,
ImplementationPatternAnalyzer, ApproachPredictor
};
// Legacy re-exports (for backwards compatibility)
pub use parsing::ParserManager as AstParser; // Backwards compatibility alias
pub use patterns::PatternLearner;
pub use patterns::PatternLearningEngine as LegacyPatternLearner;
#[cfg(all(feature = "napi-bindings", not(test)))]
#[napi]
pub fn init_core() -> String {
"In Memoria Rust Core initialized".to_string()
}
| rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/parsing/tree_walker.rs | rust-core/src/parsing/tree_walker.rs | //! Generic tree traversal utilities for tree-sitter ASTs
use tree_sitter::Node;
/// Generic tree walker that visits all nodes in a tree-sitter AST
pub struct TreeWalker {
/// Maximum depth to traverse (prevents infinite recursion)
pub max_depth: usize,
}
impl Default for TreeWalker {
fn default() -> Self {
Self {
max_depth: 100, // Reasonable default to prevent stack overflow
}
}
}
impl TreeWalker {
/// Create a new tree walker with specified maximum depth
pub fn new(max_depth: usize) -> Self {
Self { max_depth }
}
/// Walk through all nodes in the tree, calling the visitor function for each
pub fn walk<F>(&self, node: Node<'_>, visitor: &mut F) -> Result<(), String>
where
F: FnMut(Node<'_>) -> Result<(), String>,
{
self.walk_recursive(node, visitor, 0)
}
/// Recursively walk through the tree with depth tracking
fn walk_recursive<F>(
&self,
node: Node<'_>,
visitor: &mut F,
depth: usize,
) -> Result<(), String>
where
F: FnMut(Node<'_>) -> Result<(), String>,
{
// Prevent infinite recursion
if depth > self.max_depth {
return Err(format!("Maximum tree depth ({}) exceeded", self.max_depth));
}
// Visit current node
visitor(node)?;
// Visit all children
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
self.walk_recursive(child, visitor, depth + 1)?;
}
Ok(())
}
/// Walk through nodes and collect results
pub fn collect<T, F>(&self, node: Node<'_>, mut collector: F) -> Result<Vec<T>, String>
where
F: FnMut(Node<'_>) -> Option<T>,
{
let mut results = Vec::new();
self.walk(node, &mut |node| {
if let Some(result) = collector(node) {
results.push(result);
}
Ok(())
})?;
Ok(results)
}
/// Find the first node by kind
pub fn find_first_by_kind(&self, node: Node<'_>, target_kind: &str) -> Result<Option<String>, String> {
let mut found = None;
self.walk(node, &mut |node| {
if found.is_none() && node.kind() == target_kind {
found = Some(node.kind().to_string());
}
Ok(())
})?;
Ok(found)
}
/// Find all nodes by kind
pub fn find_all_by_kind(&self, node: Node<'_>, target_kind: &str) -> Result<Vec<String>, String> {
let mut results = Vec::new();
self.walk(node, &mut |node| {
if node.kind() == target_kind {
results.push(node.kind().to_string());
}
Ok(())
})?;
Ok(results)
}
/// Count nodes matching a predicate
pub fn count<F>(&self, node: Node<'_>, mut predicate: F) -> Result<usize, String>
where
F: FnMut(Node<'_>) -> bool,
{
let mut count = 0;
self.walk(node, &mut |node| {
if predicate(node) {
count += 1;
}
Ok(())
})?;
Ok(count)
}
/// Get all child nodes of a specific kind
pub fn get_children_by_kind<'a>(&self, node: Node<'a>, kind: &str) -> Vec<Node<'a>> {
let mut cursor = node.walk();
let children: Vec<_> = node.children(&mut cursor).collect();
children.into_iter()
.filter(|child| child.kind() == kind)
.collect()
}
/// Get the first child node of a specific kind
pub fn get_first_child_by_kind<'a>(&self, node: Node<'a>, kind: &str) -> Option<Node<'a>> {
let mut cursor = node.walk();
let children: Vec<_> = node.children(&mut cursor).collect();
children.into_iter()
.find(|child| child.kind() == kind)
}
/// Extract text content for a node
pub fn extract_node_text<'a>(&self, node: Node<'_>, source: &'a str) -> Option<&'a str> {
source.get(node.start_byte()..node.end_byte())
}
/// Check if node has any error children
pub fn has_errors(&self, node: Node<'_>) -> bool {
Self::node_has_errors(node)
}
/// Static helper to check if node has any error children
fn node_has_errors(node: Node<'_>) -> bool {
if node.is_error() {
return true;
}
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if Self::node_has_errors(child) {
return true;
}
}
false
}
/// Get node position information as a string
pub fn get_position_info(&self, node: Node<'_>) -> String {
format!(
"{}:{}-{}:{}",
node.start_position().row + 1,
node.start_position().column + 1,
node.end_position().row + 1,
node.end_position().column + 1
)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::parsing::ParserManager;
fn create_test_tree() -> tree_sitter::Tree {
let mut manager = ParserManager::new().unwrap();
let code = "function test() { const x = 42; return x; }";
manager.parse(code, "javascript").unwrap()
}
#[test]
fn test_tree_walker_creation() {
let walker = TreeWalker::default();
assert_eq!(walker.max_depth, 100);
let custom_walker = TreeWalker::new(50);
assert_eq!(custom_walker.max_depth, 50);
}
#[test]
fn test_walk_all_nodes() {
let tree = create_test_tree();
let walker = TreeWalker::default();
let mut node_count = 0;
let result = walker.walk(tree.root_node(), &mut |_node| {
node_count += 1;
Ok(())
});
assert!(result.is_ok());
assert!(node_count > 0);
}
#[test]
fn test_collect_nodes() {
let tree = create_test_tree();
let walker = TreeWalker::default();
let node_kinds: Result<Vec<String>, _> = walker.collect(tree.root_node(), |node| {
Some(node.kind().to_string())
});
assert!(node_kinds.is_ok());
let kinds = node_kinds.unwrap();
assert!(kinds.contains(&"program".to_string()));
assert!(kinds.contains(&"function_declaration".to_string()));
}
#[test]
fn test_find_first_node() {
let tree = create_test_tree();
let walker = TreeWalker::default();
let function_node = walker.find_first_by_kind(tree.root_node(), "function_declaration");
assert!(function_node.is_ok());
assert!(function_node.unwrap().is_some());
}
#[test]
fn test_find_all_nodes() {
let tree = create_test_tree();
let walker = TreeWalker::default();
let identifier_nodes = walker.find_all_by_kind(tree.root_node(), "identifier");
assert!(identifier_nodes.is_ok());
let nodes = identifier_nodes.unwrap();
assert!(!nodes.is_empty());
}
#[test]
fn test_count_nodes() {
let tree = create_test_tree();
let walker = TreeWalker::default();
let identifier_count = walker.count(tree.root_node(), |node| {
node.kind() == "identifier"
});
assert!(identifier_count.is_ok());
assert!(identifier_count.unwrap() > 0);
}
#[test]
fn test_get_children_by_kind() {
let tree = create_test_tree();
let walker = TreeWalker::default();
let root = tree.root_node();
// Get function declarations as direct children of program
let functions = walker.get_children_by_kind(root, "function_declaration");
assert!(!functions.is_empty());
}
#[test]
fn test_get_first_child_by_kind() {
let tree = create_test_tree();
let walker = TreeWalker::default();
let root = tree.root_node();
let first_function = walker.get_first_child_by_kind(root, "function_declaration");
assert!(first_function.is_some());
}
#[test]
fn test_extract_node_text() {
let tree = create_test_tree();
let walker = TreeWalker::default();
let code = "function test() { const x = 42; return x; }";
let function_node = walker.get_first_child_by_kind(tree.root_node(), "function_declaration");
assert!(function_node.is_some());
let text = walker.extract_node_text(function_node.unwrap(), code);
assert!(text.is_some());
assert!(text.unwrap().contains("function test"));
}
#[test]
fn test_has_errors() {
let walker = TreeWalker::default();
// Test with valid code
let tree = create_test_tree();
assert!(!walker.has_errors(tree.root_node()));
// Test with invalid code
let mut manager = ParserManager::new().unwrap();
let invalid_code = "function {{{ invalid";
let invalid_tree = manager.parse(invalid_code, "javascript").unwrap();
assert!(walker.has_errors(invalid_tree.root_node()));
}
#[test]
fn test_position_info() {
let tree = create_test_tree();
let walker = TreeWalker::default();
let root = tree.root_node();
let position = walker.get_position_info(root);
assert!(position.contains("1:1")); // Should start at line 1, column 1
}
#[test]
fn test_walk_error_handling() {
let tree = create_test_tree();
let walker = TreeWalker::default();
// Test that error in visitor function is properly propagated
let result = walker.walk(tree.root_node(), &mut |_node| {
Err("Test error".to_string())
});
assert!(result.is_err());
assert_eq!(result.unwrap_err(), "Test error");
}
#[test]
fn test_max_depth_limit() {
let tree = create_test_tree();
let walker = TreeWalker::new(1); // Very shallow depth
let mut count = 0;
let result = walker.walk(tree.root_node(), &mut |_node| {
count += 1;
Ok(())
});
// Should hit depth limit with nested nodes
assert!(result.is_err() || count <= 10); // Either error or very few nodes processed
}
#[test]
fn test_empty_tree_handling() {
let mut manager = ParserManager::new().unwrap();
let empty_code = "";
let empty_tree = manager.parse(empty_code, "javascript").unwrap();
let walker = TreeWalker::default();
let mut count = 0;
let result = walker.walk(empty_tree.root_node(), &mut |_node| {
count += 1;
Ok(())
});
assert!(result.is_ok());
assert!(count > 0); // Should at least have the root program node
}
} | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/parsing/manager.rs | rust-core/src/parsing/manager.rs | //! Parser management and initialization for multiple languages
#[cfg(feature = "napi-bindings")]
use napi_derive::napi;
use crate::types::{ParseError, AstNode, Symbol, ParseResult};
use std::collections::HashMap;
use tree_sitter::{Language, Parser, Tree, Query, QueryCursor, Node, StreamingIterator};
// Import tree-sitter language constants
use tree_sitter_javascript::LANGUAGE as tree_sitter_javascript;
use tree_sitter_python::LANGUAGE as tree_sitter_python;
use tree_sitter_rust::LANGUAGE as tree_sitter_rust;
use tree_sitter_typescript::LANGUAGE_TYPESCRIPT as tree_sitter_typescript;
// Import new tree-sitter languages
use tree_sitter_sequel::LANGUAGE as tree_sitter_sql;
use tree_sitter_go::LANGUAGE as tree_sitter_go;
use tree_sitter_java::LANGUAGE as tree_sitter_java;
use tree_sitter_c::LANGUAGE as tree_sitter_c;
use tree_sitter_cpp::LANGUAGE as tree_sitter_cpp;
use tree_sitter_c_sharp::LANGUAGE as tree_sitter_csharp;
use tree_sitter_php::LANGUAGE_PHP as tree_sitter_php;
use tree_sitter_svelte_ng::LANGUAGE as tree_sitter_svelte;
/// Manages tree-sitter parsers for different programming languages
#[cfg_attr(feature = "napi-bindings", napi)]
pub struct ParserManager {
parsers: HashMap<String, Parser>,
queries: HashMap<String, Query>,
}
impl ParserManager {
/// Create a new parser manager with all supported languages initialized
pub fn new() -> Result<Self, ParseError> {
let mut manager = ParserManager {
parsers: HashMap::new(),
queries: HashMap::new(),
};
manager.initialize_parsers()?;
manager.initialize_queries()?;
Ok(manager)
}
/// NAPI constructor
#[cfg(feature = "napi-bindings")]
pub fn napi_new() -> Result<Self, ParseError> {
Self::new()
}
/// Initialize parsers for all supported languages
fn initialize_parsers(&mut self) -> Result<(), ParseError> {
// TypeScript parser
let mut ts_parser = Parser::new();
ts_parser
.set_language(&tree_sitter_typescript.into())
.map_err(|e| {
ParseError::from_reason(format!("Failed to set TypeScript language: {}", e))
})?;
self.parsers.insert("typescript".to_string(), ts_parser);
// JavaScript parser
let mut js_parser = Parser::new();
js_parser
.set_language(&tree_sitter_javascript.into())
.map_err(|e| {
ParseError::from_reason(format!("Failed to set JavaScript language: {}", e))
})?;
self.parsers.insert("javascript".to_string(), js_parser);
// Rust parser
let mut rust_parser = Parser::new();
rust_parser.set_language(&tree_sitter_rust.into()).map_err(|e| {
ParseError::from_reason(format!("Failed to set Rust language: {}", e))
})?;
self.parsers.insert("rust".to_string(), rust_parser);
// Python parser
let mut python_parser = Parser::new();
python_parser
.set_language(&tree_sitter_python.into())
.map_err(|e| {
ParseError::from_reason(format!("Failed to set Python language: {}", e))
})?;
self.parsers.insert("python".to_string(), python_parser);
// SQL parser
let mut sql_parser = Parser::new();
sql_parser.set_language(&tree_sitter_sql.into()).map_err(|e| {
ParseError::from_reason(format!("Failed to set SQL language: {}", e))
})?;
self.parsers.insert("sql".to_string(), sql_parser);
// Go parser
let mut go_parser = Parser::new();
go_parser.set_language(&tree_sitter_go.into()).map_err(|e| {
ParseError::from_reason(format!("Failed to set Go language: {}", e))
})?;
self.parsers.insert("go".to_string(), go_parser);
// Java parser
let mut java_parser = Parser::new();
java_parser.set_language(&tree_sitter_java.into()).map_err(|e| {
ParseError::from_reason(format!("Failed to set Java language: {}", e))
})?;
self.parsers.insert("java".to_string(), java_parser);
// C parser
let mut c_parser = Parser::new();
c_parser.set_language(&tree_sitter_c.into()).map_err(|e| {
ParseError::from_reason(format!("Failed to set C language: {}", e))
})?;
self.parsers.insert("c".to_string(), c_parser);
// C++ parser
let mut cpp_parser = Parser::new();
cpp_parser.set_language(&tree_sitter_cpp.into()).map_err(|e| {
ParseError::from_reason(format!("Failed to set C++ language: {}", e))
})?;
self.parsers.insert("cpp".to_string(), cpp_parser);
// C# parser
let mut csharp_parser = Parser::new();
csharp_parser.set_language(&tree_sitter_csharp.into()).map_err(|e| {
ParseError::from_reason(format!("Failed to set C# language: {}", e))
})?;
self.parsers.insert("csharp".to_string(), csharp_parser);
// Svelte parser (using svelte-ng)
let mut svelte_parser = Parser::new();
svelte_parser.set_language(&tree_sitter_svelte.into()).map_err(|e| {
ParseError::from_reason(format!("Failed to set Svelte language: {}", e))
})?;
self.parsers.insert("svelte".to_string(), svelte_parser);
// PHP parser
let mut php_parser = Parser::new();
php_parser.set_language(&tree_sitter_php.into()).map_err(|e| {
ParseError::from_reason(format!("Failed to set PHP language: {}", e))
})?;
self.parsers.insert("php".to_string(), php_parser);
Ok(())
}
/// Initialize common queries for different languages
fn initialize_queries(&mut self) -> Result<(), ParseError> {
// Initialize common queries for different languages
let languages = ["typescript", "javascript", "rust", "python", "sql", "go", "java", "c", "cpp", "csharp", "svelte"];
for lang in &languages {
let lang_obj = self.get_tree_sitter_language(lang)?;
// Function/method query
let function_query = match *lang {
"typescript" | "javascript" => "(function_declaration) @function",
"rust" => "(function_item) @function",
"python" => "(function_definition) @function",
_ => continue,
};
if let Ok(query) = Query::new(&lang_obj, function_query) {
self.queries.insert(format!("{}_functions", lang), query);
}
}
Ok(())
}
/// Parse code with the appropriate language parser
pub fn parse(&mut self, code: &str, language: &str) -> Result<Tree, ParseError> {
let parser = self.parsers.get_mut(language).ok_or_else(|| {
ParseError::from_reason(format!("Unsupported language: {}", language))
})?;
parser
.parse(code, None)
.ok_or_else(|| ParseError::from_reason("Failed to parse code"))
}
/// Get available languages
pub fn available_languages(&self) -> Vec<String> {
self.parsers.keys().cloned().collect()
}
/// Check if a language is supported
pub fn supports_language(&self, language: &str) -> bool {
self.parsers.contains_key(language)
}
/// Get tree-sitter language object for a given language string
pub fn get_tree_sitter_language(&self, language: &str) -> Result<Language, ParseError> {
match language {
"typescript" => Ok(tree_sitter_typescript.into()),
"javascript" => Ok(tree_sitter_javascript.into()),
"rust" => Ok(tree_sitter_rust.into()),
"python" => Ok(tree_sitter_python.into()),
"sql" => Ok(tree_sitter_sql.into()),
"go" => Ok(tree_sitter_go.into()),
"java" => Ok(tree_sitter_java.into()),
"c" => Ok(tree_sitter_c.into()),
"cpp" => Ok(tree_sitter_cpp.into()),
"csharp" => Ok(tree_sitter_csharp.into()),
"svelte" => Ok(tree_sitter_svelte.into()),
"php" => Ok(tree_sitter_php.into()),
_ => Err(ParseError::from_reason(format!(
"Unsupported language: {}",
language
))),
}
}
// AstParser compatibility methods
/// Parse code and return full AST result with symbols and errors
pub fn parse_code(
&mut self,
code: String,
language: String,
) -> Result<ParseResult, ParseError> {
let parser = self.parsers.get_mut(&language).ok_or_else(|| {
ParseError::from_reason(format!("Unsupported language: {}", language))
})?;
let tree = parser
.parse(&code, None)
.ok_or_else(|| ParseError::from_reason("Failed to parse code"))?;
let ast_tree = self.convert_tree_to_ast(&tree, &code)?;
let symbols = self.extract_symbols(&tree, &code, &language)?;
let errors = Self::extract_errors(&tree, &code);
Ok(ParseResult {
language,
tree: ast_tree,
errors,
symbols,
})
}
/// Query AST with tree-sitter query syntax
pub fn query_ast(
&mut self,
code: String,
language: String,
query_string: String,
) -> Result<Vec<AstNode>, ParseError> {
let parser = self.parsers.get_mut(&language).ok_or_else(|| {
ParseError::from_reason(format!("Unsupported language: {}", language))
})?;
let tree = parser
.parse(&code, None)
.ok_or_else(|| ParseError::from_reason("Failed to parse code"))?;
let lang = self.get_tree_sitter_language(&language)?;
let query = Query::new(&lang, &query_string)
.map_err(|e| ParseError::from_reason(format!("Invalid query: {}", e)))?;
let mut cursor = QueryCursor::new();
let mut matches = cursor.matches(&query, tree.root_node(), code.as_bytes());
let mut results = Vec::new();
while let Some(m) = matches.next() {
for capture in m.captures {
let node_ast = Self::convert_node_to_ast(capture.node, &code)?;
results.push(node_ast);
}
}
Ok(results)
}
/// Get symbols from parsed code
pub fn get_symbols(
&mut self,
code: String,
language: String,
) -> Result<Vec<Symbol>, ParseError> {
let parser = self.parsers.get_mut(&language).ok_or_else(|| {
ParseError::from_reason(format!("Unsupported language: {}", language))
})?;
let tree = parser
.parse(&code, None)
.ok_or_else(|| ParseError::from_reason("Failed to parse code"))?;
self.extract_symbols(&tree, &code, &language)
}
/// Get AST node at specific position
pub fn get_node_at_position(
&mut self,
code: String,
language: String,
line: u32,
column: u32,
) -> Result<Option<AstNode>, ParseError> {
let parser = self.parsers.get_mut(&language).ok_or_else(|| {
ParseError::from_reason(format!("Unsupported language: {}", language))
})?;
let tree = parser
.parse(&code, None)
.ok_or_else(|| ParseError::from_reason("Failed to parse code"))?;
let point = tree_sitter::Point::new(line as usize, column as usize);
let node = tree.root_node().descendant_for_point_range(point, point);
if let Some(node) = node {
let ast_node = Self::convert_node_to_ast(node, &code)?;
Ok(Some(ast_node))
} else {
Ok(None)
}
}
/// Analyze code complexity
pub fn analyze_complexity(
&mut self,
code: String,
language: String,
) -> Result<HashMap<String, u32>, ParseError> {
let parser = self.parsers.get_mut(&language).ok_or_else(|| {
ParseError::from_reason(format!("Unsupported language: {}", language))
})?;
let tree = parser
.parse(&code, None)
.ok_or_else(|| ParseError::from_reason("Failed to parse code"))?;
let mut complexity = HashMap::new();
// Calculate various complexity metrics
complexity.insert(
"cyclomatic".to_string(),
Self::calculate_cyclomatic_complexity(&tree),
);
complexity.insert(
"cognitive".to_string(),
Self::calculate_cognitive_complexity(&tree),
);
complexity.insert(
"nesting_depth".to_string(),
Self::calculate_max_nesting_depth(&tree),
);
complexity.insert("function_count".to_string(), Self::count_functions(&tree));
complexity.insert("class_count".to_string(), Self::count_classes(&tree));
Ok(complexity)
}
// Helper methods for AST conversion and analysis
fn convert_tree_to_ast(&self, tree: &Tree, code: &str) -> Result<AstNode, ParseError> {
Self::convert_node_to_ast(tree.root_node(), code)
}
fn convert_node_to_ast(node: Node, code: &str) -> Result<AstNode, ParseError> {
let start_pos = node.start_position();
let end_pos = node.end_position();
let text = code
.get(node.start_byte()..node.end_byte())
.unwrap_or("")
.to_string();
let mut children = Vec::new();
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if !child.is_error() {
children.push(Self::convert_node_to_ast(child, code)?);
}
}
Ok(AstNode {
node_type: node.kind().to_string(),
text,
start_line: start_pos.row as u32,
end_line: end_pos.row as u32,
start_column: start_pos.column as u32,
end_column: end_pos.column as u32,
children,
})
}
fn extract_symbols(
&self,
tree: &Tree,
code: &str,
language: &str,
) -> Result<Vec<Symbol>, ParseError> {
let mut symbols = Vec::new();
Self::walk_for_symbols(tree.root_node(), code, language, &mut symbols, "global")?;
Ok(symbols)
}
fn walk_for_symbols(
node: Node,
code: &str,
_language: &str,
symbols: &mut Vec<Symbol>,
scope: &str,
) -> Result<(), ParseError> {
match node.kind() {
"function_declaration" | "function_definition" | "function_item" => {
if let Some(name) = Self::extract_function_name(node, code) {
symbols.push(Symbol {
name,
symbol_type: "function".to_string(),
line: node.start_position().row as u32,
column: node.start_position().column as u32,
scope: scope.to_string(),
});
}
}
"class_declaration" | "class_definition" | "struct_item" | "enum_item" => {
if let Some(name) = Self::extract_type_name(node, code) {
symbols.push(Symbol {
name: name.clone(),
symbol_type: Self::get_symbol_type(node.kind()).to_string(),
line: node.start_position().row as u32,
column: node.start_position().column as u32,
scope: scope.to_string(),
});
// Walk children with this class/struct as new scope
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
Self::walk_for_symbols(child, code, _language, symbols, &name)?;
}
return Ok(());
}
}
"variable_declaration" | "let_declaration" | "const_declaration" => {
if let Some(name) = Self::extract_variable_name(node, code) {
symbols.push(Symbol {
name,
symbol_type: "variable".to_string(),
line: node.start_position().row as u32,
column: node.start_position().column as u32,
scope: scope.to_string(),
});
}
}
_ => {}
}
// Continue walking children
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
Self::walk_for_symbols(child, code, _language, symbols, scope)?;
}
Ok(())
}
fn extract_function_name(node: Node, code: &str) -> Option<String> {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "identifier" {
return code
.get(child.start_byte()..child.end_byte())
.map(|s| s.to_string());
}
}
None
}
fn extract_type_name(node: Node, code: &str) -> Option<String> {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "identifier" || child.kind() == "type_identifier" {
return code
.get(child.start_byte()..child.end_byte())
.map(|s| s.to_string());
}
}
None
}
fn extract_variable_name(node: Node, code: &str) -> Option<String> {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "identifier" {
return code
.get(child.start_byte()..child.end_byte())
.map(|s| s.to_string());
}
}
None
}
fn get_symbol_type(node_kind: &str) -> &str {
match node_kind {
"class_declaration" | "class_definition" => "class",
"struct_item" => "struct",
"enum_item" => "enum",
"interface_declaration" => "interface",
"type_alias_declaration" => "type",
_ => "unknown",
}
}
fn extract_errors(tree: &Tree, _code: &str) -> Vec<String> {
let mut errors = Vec::new();
Self::walk_for_errors(tree.root_node(), &mut errors);
errors
}
fn walk_for_errors(node: Node, errors: &mut Vec<String>) {
if node.is_error() {
errors.push(format!(
"Parse error at line {}, column {}: {}",
node.start_position().row + 1,
node.start_position().column + 1,
node.kind()
));
}
if node.is_missing() {
errors.push(format!(
"Missing node at line {}, column {}: expected {}",
node.start_position().row + 1,
node.start_position().column + 1,
node.kind()
));
}
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
Self::walk_for_errors(child, errors);
}
}
// Complexity calculation methods
fn calculate_cyclomatic_complexity(tree: &Tree) -> u32 {
let mut complexity = 1; // Base complexity
Self::walk_for_complexity(tree.root_node(), &mut complexity);
complexity
}
fn walk_for_complexity(node: Node, complexity: &mut u32) {
match node.kind() {
"if_statement"
| "while_statement"
| "for_statement"
| "switch_statement"
| "case_clause"
| "catch_clause"
| "conditional_expression" => {
*complexity += 1;
}
_ => {}
}
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
Self::walk_for_complexity(child, complexity);
}
}
fn calculate_cognitive_complexity(tree: &Tree) -> u32 {
let mut complexity = 0;
Self::walk_for_cognitive_complexity(tree.root_node(), &mut complexity, 0);
complexity
}
fn walk_for_cognitive_complexity(node: Node, complexity: &mut u32, nesting_level: u32) {
let increment = match node.kind() {
"if_statement" | "switch_statement" | "for_statement" | "while_statement"
| "do_statement" => nesting_level + 1,
"catch_clause" => nesting_level + 1,
"conditional_expression" => 1,
"break_statement" | "continue_statement" => 1,
_ => 0,
};
*complexity += increment;
let new_nesting = if matches!(
node.kind(),
"if_statement"
| "switch_statement"
| "for_statement"
| "while_statement"
| "do_statement"
| "catch_clause"
) {
nesting_level + 1
} else {
nesting_level
};
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
Self::walk_for_cognitive_complexity(child, complexity, new_nesting);
}
}
fn calculate_max_nesting_depth(tree: &Tree) -> u32 {
Self::walk_for_nesting_depth(tree.root_node(), 0)
}
fn walk_for_nesting_depth(node: Node, current_depth: u32) -> u32 {
let mut max_depth = current_depth;
let is_nesting_node = matches!(
node.kind(),
"if_statement"
| "while_statement"
| "for_statement"
| "switch_statement"
| "function_declaration"
| "class_declaration"
);
let new_depth = if is_nesting_node {
current_depth + 1
} else {
current_depth
};
max_depth = max_depth.max(new_depth);
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
let child_max = Self::walk_for_nesting_depth(child, new_depth);
max_depth = max_depth.max(child_max);
}
max_depth
}
fn count_functions(tree: &Tree) -> u32 {
let mut count = 0;
Self::walk_for_function_count(tree.root_node(), &mut count);
count
}
fn walk_for_function_count(node: Node, count: &mut u32) {
if matches!(
node.kind(),
"function_declaration"
| "function_definition"
| "function_item"
| "method_definition"
| "arrow_function"
) {
*count += 1;
}
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
Self::walk_for_function_count(child, count);
}
}
fn count_classes(tree: &Tree) -> u32 {
let mut count = 0;
Self::walk_for_class_count(tree.root_node(), &mut count);
count
}
fn walk_for_class_count(node: Node, count: &mut u32) {
if matches!(
node.kind(),
"class_declaration" | "class_definition" | "struct_item" | "enum_item"
) {
*count += 1;
}
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
Self::walk_for_class_count(child, count);
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parser_manager_creation() {
let manager = ParserManager::new();
assert!(manager.is_ok());
let manager = manager.unwrap();
assert!(!manager.parsers.is_empty());
}
#[test]
fn test_available_languages() {
let manager = ParserManager::new().unwrap();
let languages = manager.available_languages();
assert!(languages.contains(&"typescript".to_string()));
assert!(languages.contains(&"javascript".to_string()));
assert!(languages.contains(&"rust".to_string()));
assert!(languages.contains(&"python".to_string()));
assert!(languages.contains(&"sql".to_string()));
assert!(languages.contains(&"go".to_string()));
assert!(languages.contains(&"java".to_string()));
assert!(languages.contains(&"c".to_string()));
assert!(languages.contains(&"cpp".to_string()));
assert!(languages.contains(&"csharp".to_string()));
assert!(languages.contains(&"svelte".to_string()));
}
#[test]
fn test_supports_language() {
let manager = ParserManager::new().unwrap();
assert!(manager.supports_language("typescript"));
assert!(manager.supports_language("javascript"));
assert!(manager.supports_language("rust"));
assert!(manager.supports_language("python"));
assert!(manager.supports_language("sql"));
assert!(manager.supports_language("go"));
assert!(manager.supports_language("java"));
assert!(manager.supports_language("c"));
assert!(manager.supports_language("cpp"));
assert!(manager.supports_language("csharp"));
assert!(manager.supports_language("svelte"));
assert!(!manager.supports_language("unknown"));
assert!(!manager.supports_language(""));
}
#[test]
fn test_get_tree_sitter_language() {
let manager = ParserManager::new().unwrap();
// Test all supported languages
assert!(manager.get_tree_sitter_language("typescript").is_ok());
assert!(manager.get_tree_sitter_language("javascript").is_ok());
assert!(manager.get_tree_sitter_language("rust").is_ok());
assert!(manager.get_tree_sitter_language("python").is_ok());
assert!(manager.get_tree_sitter_language("sql").is_ok());
assert!(manager.get_tree_sitter_language("go").is_ok());
assert!(manager.get_tree_sitter_language("java").is_ok());
assert!(manager.get_tree_sitter_language("c").is_ok());
assert!(manager.get_tree_sitter_language("cpp").is_ok());
assert!(manager.get_tree_sitter_language("csharp").is_ok());
assert!(manager.get_tree_sitter_language("svelte").is_ok());
assert!(manager.get_tree_sitter_language("php").is_ok());
// Test unsupported language
assert!(manager.get_tree_sitter_language("unknown").is_err());
}
#[test]
fn test_parse_simple_code() {
let mut manager = ParserManager::new().unwrap();
// Test TypeScript parsing
let ts_code = "function test() { return 42; }";
let ts_result = manager.parse(ts_code, "typescript");
assert!(ts_result.is_ok());
let tree = ts_result.unwrap();
assert_eq!(tree.root_node().kind(), "program");
assert!(tree.root_node().child_count() > 0);
}
#[test]
fn test_parse_javascript() {
let mut manager = ParserManager::new().unwrap();
let js_code = "const x = 5;";
let result = manager.parse(js_code, "javascript");
assert!(result.is_ok());
let tree = result.unwrap();
assert_eq!(tree.root_node().kind(), "program");
}
#[test]
fn test_parse_rust() {
let mut manager = ParserManager::new().unwrap();
let rust_code = "fn main() { println!(\"Hello\"); }";
let result = manager.parse(rust_code, "rust");
assert!(result.is_ok());
let tree = result.unwrap();
assert_eq!(tree.root_node().kind(), "source_file");
}
#[test]
fn test_parse_python() {
let mut manager = ParserManager::new().unwrap();
let python_code = "def hello():\n return 'world'";
let result = manager.parse(python_code, "python");
assert!(result.is_ok());
let tree = result.unwrap();
assert_eq!(tree.root_node().kind(), "module");
}
#[test]
fn test_parse_sql() {
let mut manager = ParserManager::new().unwrap();
let sql_code = "SELECT * FROM users WHERE id = 1;";
let result = manager.parse(sql_code, "sql");
assert!(result.is_ok());
let tree = result.unwrap();
assert_eq!(tree.root_node().kind(), "program");
}
#[test]
fn test_parse_go() {
let mut manager = ParserManager::new().unwrap();
let go_code = "package main\n\nfunc main() {\n println(\"Hello\")\n}";
let result = manager.parse(go_code, "go");
assert!(result.is_ok());
let tree = result.unwrap();
assert_eq!(tree.root_node().kind(), "source_file");
}
#[test]
fn test_parse_unsupported_language() {
let mut manager = ParserManager::new().unwrap();
let result = manager.parse("some code", "unknown");
assert!(result.is_err());
}
#[test]
fn test_parse_invalid_code() {
let mut manager = ParserManager::new().unwrap();
// Test with syntactically invalid JavaScript
let invalid_js = "function {{{ invalid syntax";
let result = manager.parse(invalid_js, "javascript");
// Tree-sitter should still parse this (with error nodes), not fail entirely
assert!(result.is_ok());
let tree = result.unwrap();
assert_eq!(tree.root_node().kind(), "program");
}
#[test]
fn test_all_languages_initialized() {
let manager = ParserManager::new().unwrap();
// Verify all expected languages are present
let expected_languages = vec![
"typescript", "javascript", "rust", "python", "sql",
"go", "java", "c", "cpp", "csharp", "svelte", "php"
];
for lang in expected_languages {
assert!(manager.supports_language(lang), "Language {} should be supported", lang);
}
// Should have exactly these languages
assert_eq!(manager.available_languages().len(), 12);
}
}
| rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/parsing/utils.rs | rust-core/src/parsing/utils.rs | //! Utility functions for name extraction and tree-sitter node handling
use tree_sitter::Node;
/// Utilities for extracting names and identifiers from tree-sitter nodes
pub struct NameExtractor;
impl NameExtractor {
/// Extract name from a tree-sitter node by finding identifier children
pub fn extract_name_from_node(node: Node<'_>, content: &str) -> Result<String, String> {
// Try to find identifier node recursively
if let Some(name) = Self::find_identifier_recursive(node, content) {
return Ok(name);
}
Ok(String::new())
}
/// Find identifier recursively in the node tree
pub fn find_identifier_recursive(node: Node<'_>, content: &str) -> Option<String> {
Self::find_identifier_recursive_impl(node, content)
}
/// Internal implementation of recursive identifier finding
fn find_identifier_recursive_impl(node: Node<'_>, content: &str) -> Option<String> {
// Check if this node is an identifier
match node.kind() {
"identifier" | "property_identifier" | "type_identifier" => {
let start_byte = node.start_byte();
let end_byte = node.end_byte();
if let Some(name) = content.get(start_byte..end_byte) {
return Some(name.to_string());
}
}
_ => {}
}
// Search children recursively (but limit depth to avoid infinite recursion)
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if let Some(name) = Self::find_identifier_recursive_impl(child, content) {
return Some(name);
}
}
None
}
/// Find a child node by its kind
pub fn find_child_by_kind<'a>(node: Node<'a>, kind: &str) -> Option<Node<'a>> {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == kind {
return Some(child);
}
// Also search recursively in children
if let Some(found) = Self::find_child_by_kind(child, kind) {
return Some(found);
}
}
None
}
/// Collect all identifiers from a node and its children
pub fn collect_identifiers_from_node(node: Node<'_>, content: &str) -> Vec<String> {
let mut identifiers = Vec::new();
Self::collect_identifiers_recursive(node, content, &mut identifiers);
identifiers
}
/// Recursively collect identifiers
fn collect_identifiers_recursive(node: Node<'_>, content: &str, identifiers: &mut Vec<String>) {
if node.kind() == "identifier" {
let start_byte = node.start_byte();
let end_byte = node.end_byte();
if let Some(name) = content.get(start_byte..end_byte) {
identifiers.push(name.to_string());
}
}
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
Self::collect_identifiers_recursive(child, content, identifiers);
}
}
/// Extract text content from a node
pub fn extract_node_text<'a>(node: Node<'_>, content: &'a str) -> Option<&'a str> {
content.get(node.start_byte()..node.end_byte())
}
/// Check if a node represents a named construct (has identifier children)
pub fn is_named_construct(node: Node<'_>) -> bool {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if matches!(child.kind(), "identifier" | "property_identifier" | "type_identifier") {
return true;
}
}
false
}
/// Get the line and column position of a node
pub fn get_position_info(node: Node<'_>) -> (u32, u32, u32, u32) {
(
node.start_position().row as u32 + 1,
node.start_position().column as u32 + 1,
node.end_position().row as u32 + 1,
node.end_position().column as u32 + 1,
)
}
/// Check if a node or any of its children have errors
pub fn has_syntax_errors(node: Node<'_>) -> bool {
if node.is_error() || node.is_missing() {
return true;
}
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if Self::has_syntax_errors(child) {
return true;
}
}
false
}
/// Count the number of children of a specific kind
pub fn count_children_by_kind(node: Node<'_>, kind: &str) -> usize {
let mut cursor = node.walk();
node.children(&mut cursor)
.filter(|child| child.kind() == kind)
.count()
}
/// Get all direct children of a node
pub fn get_direct_children(node: Node<'_>) -> Vec<Node<'_>> {
let mut cursor = node.walk();
node.children(&mut cursor).collect()
}
/// Check if a string is a valid programming language identifier
pub fn is_valid_identifier(name: &str) -> bool {
!name.is_empty()
&& name.chars().next().is_some_and(|c| c.is_alphabetic() || c == '_')
&& name.chars().all(|c| c.is_alphanumeric() || c == '_')
}
/// Sanitize an extracted name by removing invalid characters
pub fn sanitize_name(name: &str) -> String {
name.chars()
.filter(|c| c.is_alphanumeric() || *c == '_')
.collect()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::parsing::ParserManager;
fn create_test_tree_and_code() -> (tree_sitter::Tree, String) {
let mut manager = ParserManager::new().unwrap();
let code = "function calculateTotal(price, tax) { return price + tax; }".to_string();
let tree = manager.parse(&code, "javascript").unwrap();
(tree, code)
}
#[test]
fn test_extract_name_from_node() {
let (tree, code) = create_test_tree_and_code();
let root = tree.root_node();
// Find the function declaration
let mut cursor = root.walk();
let function_node = root.children(&mut cursor)
.find(|child| child.kind() == "function_declaration")
.unwrap();
let name = NameExtractor::extract_name_from_node(function_node, &code);
assert!(name.is_ok());
assert_eq!(name.unwrap(), "calculateTotal");
}
#[test]
fn test_find_identifier_recursive() {
let (tree, code) = create_test_tree_and_code();
let root = tree.root_node();
let identifier = NameExtractor::find_identifier_recursive(root, &code);
assert!(identifier.is_some());
assert_eq!(identifier.unwrap(), "calculateTotal");
}
#[test]
fn test_find_child_by_kind() {
let (tree, _code) = create_test_tree_and_code();
let root = tree.root_node();
let function_node = NameExtractor::find_child_by_kind(root, "function_declaration");
assert!(function_node.is_some());
assert_eq!(function_node.unwrap().kind(), "function_declaration");
let nonexistent = NameExtractor::find_child_by_kind(root, "nonexistent_kind");
assert!(nonexistent.is_none());
}
#[test]
fn test_collect_identifiers_from_node() {
let (tree, code) = create_test_tree_and_code();
let root = tree.root_node();
let identifiers = NameExtractor::collect_identifiers_from_node(root, &code);
// Should find function name and parameter names
assert!(identifiers.contains(&"calculateTotal".to_string()));
assert!(identifiers.contains(&"price".to_string()));
assert!(identifiers.contains(&"tax".to_string()));
}
#[test]
fn test_extract_node_text() {
let (tree, code) = create_test_tree_and_code();
let root = tree.root_node();
let text = NameExtractor::extract_node_text(root, &code);
assert!(text.is_some());
assert_eq!(text.unwrap(), code);
}
#[test]
fn test_is_named_construct() {
let (tree, _code) = create_test_tree_and_code();
let root = tree.root_node();
// Root program node should have named constructs
assert!(NameExtractor::is_named_construct(root));
// Find function declaration which should also be a named construct
let function_node = NameExtractor::find_child_by_kind(root, "function_declaration");
assert!(function_node.is_some());
assert!(NameExtractor::is_named_construct(function_node.unwrap()));
}
#[test]
fn test_get_position_info() {
let (tree, _code) = create_test_tree_and_code();
let root = tree.root_node();
let (start_row, start_col, end_row, end_col) = NameExtractor::get_position_info(root);
// Root should start at line 1, column 1
assert_eq!(start_row, 1);
assert_eq!(start_col, 1);
assert!(end_row >= start_row);
assert!(end_col >= start_col);
}
#[test]
fn test_has_syntax_errors() {
let mut manager = ParserManager::new().unwrap();
// Valid code should have no errors
let valid_code = "function test() { return 42; }";
let valid_tree = manager.parse(valid_code, "javascript").unwrap();
assert!(!NameExtractor::has_syntax_errors(valid_tree.root_node()));
// Invalid code should have errors
let invalid_code = "function {{{ invalid syntax";
let invalid_tree = manager.parse(invalid_code, "javascript").unwrap();
assert!(NameExtractor::has_syntax_errors(invalid_tree.root_node()));
}
#[test]
fn test_count_children_by_kind() {
let (tree, _code) = create_test_tree_and_code();
let root = tree.root_node();
let function_count = NameExtractor::count_children_by_kind(root, "function_declaration");
assert_eq!(function_count, 1);
let nonexistent_count = NameExtractor::count_children_by_kind(root, "nonexistent");
assert_eq!(nonexistent_count, 0);
}
#[test]
fn test_get_direct_children() {
let (tree, _code) = create_test_tree_and_code();
let root = tree.root_node();
let children = NameExtractor::get_direct_children(root);
assert!(!children.is_empty());
// Should have a function declaration as a child
let has_function = children.iter().any(|child| child.kind() == "function_declaration");
assert!(has_function);
}
#[test]
fn test_is_valid_identifier() {
// Valid identifiers
assert!(NameExtractor::is_valid_identifier("hello"));
assert!(NameExtractor::is_valid_identifier("_private"));
assert!(NameExtractor::is_valid_identifier("camelCase"));
assert!(NameExtractor::is_valid_identifier("snake_case"));
assert!(NameExtractor::is_valid_identifier("PascalCase"));
assert!(NameExtractor::is_valid_identifier("a123"));
assert!(NameExtractor::is_valid_identifier("_"));
// Invalid identifiers
assert!(!NameExtractor::is_valid_identifier(""));
assert!(!NameExtractor::is_valid_identifier("123abc"));
assert!(!NameExtractor::is_valid_identifier("hello-world"));
assert!(!NameExtractor::is_valid_identifier("hello.world"));
assert!(!NameExtractor::is_valid_identifier("hello world"));
assert!(!NameExtractor::is_valid_identifier("@special"));
}
#[test]
fn test_sanitize_name() {
assert_eq!(NameExtractor::sanitize_name("hello"), "hello");
assert_eq!(NameExtractor::sanitize_name("hello-world"), "helloworld");
assert_eq!(NameExtractor::sanitize_name("hello.world"), "helloworld");
assert_eq!(NameExtractor::sanitize_name("hello world"), "helloworld");
assert_eq!(NameExtractor::sanitize_name("hello123"), "hello123");
assert_eq!(NameExtractor::sanitize_name("hello_world"), "hello_world");
assert_eq!(NameExtractor::sanitize_name("@#$%"), "");
}
#[test]
fn test_complex_javascript_structure() {
let mut manager = ParserManager::new().unwrap();
let complex_code = r#"
class Calculator {
constructor(name) {
this.name = name;
}
add(a, b) {
return a + b;
}
}
"#;
let tree = manager.parse(complex_code, "javascript").unwrap();
let root = tree.root_node();
let identifiers = NameExtractor::collect_identifiers_from_node(root, complex_code);
// Should find class name, constructor, method name, and parameter names
assert!(identifiers.contains(&"Calculator".to_string()));
assert!(identifiers.contains(&"constructor".to_string()));
assert!(identifiers.contains(&"name".to_string()));
assert!(identifiers.contains(&"add".to_string()));
assert!(identifiers.contains(&"a".to_string()));
assert!(identifiers.contains(&"b".to_string()));
}
#[test]
fn test_empty_node_handling() {
let mut manager = ParserManager::new().unwrap();
let empty_code = "";
let tree = manager.parse(empty_code, "javascript").unwrap();
let root = tree.root_node();
let name = NameExtractor::extract_name_from_node(root, empty_code);
assert!(name.is_ok());
assert_eq!(name.unwrap(), "");
let identifiers = NameExtractor::collect_identifiers_from_node(root, empty_code);
assert_eq!(identifiers.len(), 0);
}
#[test]
fn test_typescript_types() {
let mut manager = ParserManager::new().unwrap();
let ts_code = "interface UserService { getName(): string; }";
let tree = manager.parse(ts_code, "typescript").unwrap();
let root = tree.root_node();
let identifiers = NameExtractor::collect_identifiers_from_node(root, ts_code);
// Should find interface name and method name
assert!(identifiers.contains(&"UserService".to_string()));
assert!(identifiers.contains(&"getName".to_string()));
}
} | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/parsing/mod.rs | rust-core/src/parsing/mod.rs | pub mod manager;
pub mod tree_walker;
pub mod fallback;
pub mod utils;
pub use manager::*;
pub use tree_walker::*;
pub use fallback::*;
pub use utils::*; | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/parsing/fallback.rs | rust-core/src/parsing/fallback.rs | //! Fallback pattern-based extraction when tree-sitter parsing fails
use crate::types::{SemanticConcept, LineRange};
use std::collections::HashMap;
use std::path::Path;
/// Fallback extractor for when tree-sitter parsing fails
pub struct FallbackExtractor;
impl FallbackExtractor {
/// Create a new fallback extractor
pub fn new() -> Self {
Self
}
/// Extract concepts using regex patterns when tree-sitter fails
pub fn extract_concepts(&self, file_path: &str, content: &str) -> Vec<SemanticConcept> {
let mut concepts = Vec::new();
let mut concept_id = 1;
// Parse line by line looking for functions, classes, and interfaces
for (line_num, line) in content.lines().enumerate() {
let line = line.trim();
// Try to extract function names
if let Some(name) = self.extract_function_name(line) {
concepts.push(self.create_fallback_concept(
&format!("fallback_fn_{}", concept_id),
name,
"function",
file_path,
line_num + 1,
));
concept_id += 1;
}
// Try to extract class names
if let Some(name) = self.extract_class_name(line) {
concepts.push(self.create_fallback_concept(
&format!("fallback_class_{}", concept_id),
name,
"class",
file_path,
line_num + 1,
));
concept_id += 1;
}
// Try to extract interface names
if let Some(name) = self.extract_interface_name(line) {
concepts.push(self.create_fallback_concept(
&format!("fallback_interface_{}", concept_id),
name,
"interface",
file_path,
line_num + 1,
));
concept_id += 1;
}
}
// If no concepts found, create a generic file concept
if concepts.is_empty() {
let file_name = Path::new(file_path)
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("unknown");
concepts.push(self.create_fallback_concept(
"fallback_file_1",
file_name.to_string(),
"file",
file_path,
1,
));
}
concepts
}
/// Create a fallback concept with lower confidence
fn create_fallback_concept(
&self,
id: &str,
name: String,
concept_type: &str,
file_path: &str,
line: usize,
) -> SemanticConcept {
let mut relationships = HashMap::new();
relationships.insert("extraction_method".to_string(), "fallback".to_string());
let mut metadata = HashMap::new();
metadata.insert("source".to_string(), "regex_fallback".to_string());
metadata.insert(
"confidence_reason".to_string(),
"tree_sitter_failed".to_string(),
);
SemanticConcept {
id: id.to_string(),
name,
concept_type: concept_type.to_string(),
confidence: 0.7, // Lower confidence for fallback extraction
file_path: file_path.to_string(),
line_range: LineRange {
start: line as u32,
end: line as u32,
},
relationships,
metadata,
}
}
/// Extract function names using regex patterns
fn extract_function_name(&self, line: &str) -> Option<String> {
// TypeScript/JavaScript function patterns
if line.contains("function ") {
if let Some(start) = line.find("function ") {
let after_function = &line[start + 9..];
if let Some(end) = after_function.find('(') {
let name = after_function[..end].trim();
if !name.is_empty() && self.is_valid_identifier(name) {
return Some(name.to_string());
}
}
}
}
// Arrow function patterns: const funcName = () =>
if line.contains("=>") {
if let Some(equals_pos) = line.find('=') {
let before_equals = &line[..equals_pos].trim();
if let Some(name_start) = before_equals.rfind(char::is_whitespace) {
let name = before_equals[name_start..].trim();
if !name.is_empty() && self.is_valid_identifier(name) {
return Some(name.to_string());
}
} else {
// Handle case like "const funcName ="
if let Some(const_pos) = before_equals.find("const ") {
let name = before_equals[const_pos + 6..].trim();
if !name.is_empty() && self.is_valid_identifier(name) {
return Some(name.to_string());
}
}
}
}
}
// Rust function patterns
if line.contains("fn ") {
if let Some(start) = line.find("fn ") {
let after_fn = &line[start + 3..];
if let Some(end) = after_fn.find('(') {
let name = after_fn[..end].trim();
if !name.is_empty() && self.is_valid_identifier(name) {
return Some(name.to_string());
}
}
}
}
// Python function patterns
if line.trim_start().starts_with("def ") {
if let Some(start) = line.find("def ") {
let after_def = &line[start + 4..];
if let Some(end) = after_def.find('(') {
let name = after_def[..end].trim();
if !name.is_empty() && self.is_valid_identifier(name) {
return Some(name.to_string());
}
}
}
}
None
}
/// Extract class names using regex patterns
fn extract_class_name(&self, line: &str) -> Option<String> {
if line.contains("class ") {
if let Some(start) = line.find("class ") {
let after_class = &line[start + 6..];
let end = after_class
.find(char::is_whitespace)
.or_else(|| after_class.find('{'))
.or_else(|| after_class.find('('))
.unwrap_or(after_class.len());
let name = after_class[..end].trim();
if !name.is_empty() && self.is_valid_identifier(name) {
return Some(name.to_string());
}
}
}
// Rust struct patterns
if line.contains("struct ") {
if let Some(start) = line.find("struct ") {
let after_struct = &line[start + 7..];
let end = after_struct
.find(char::is_whitespace)
.or_else(|| after_struct.find('{'))
.or_else(|| after_struct.find('<'))
.unwrap_or(after_struct.len());
let name = after_struct[..end].trim();
if !name.is_empty() && self.is_valid_identifier(name) {
return Some(name.to_string());
}
}
}
None
}
/// Extract interface names using regex patterns
fn extract_interface_name(&self, line: &str) -> Option<String> {
if line.contains("interface ") {
if let Some(start) = line.find("interface ") {
let after_interface = &line[start + 10..];
let end = after_interface
.find(char::is_whitespace)
.or_else(|| after_interface.find('{'))
.or_else(|| after_interface.find('<'))
.unwrap_or(after_interface.len());
let name = after_interface[..end].trim();
if !name.is_empty() && self.is_valid_identifier(name) {
return Some(name.to_string());
}
}
}
None
}
/// Check if a string is a valid programming language identifier
fn is_valid_identifier(&self, name: &str) -> bool {
!name.is_empty()
&& name.chars().next().is_some_and(|c| c.is_alphabetic() || c == '_')
&& name.chars().all(|c| c.is_alphanumeric() || c == '_')
}
}
impl Default for FallbackExtractor {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_extract_javascript_function() {
let extractor = FallbackExtractor::new();
let code = "function calculateTotal() { return 42; }";
let concepts = extractor.extract_concepts("test.js", code);
assert_eq!(concepts.len(), 1);
assert_eq!(concepts[0].name, "calculateTotal");
assert_eq!(concepts[0].concept_type, "function");
assert_eq!(concepts[0].confidence, 0.7);
}
#[test]
fn test_extract_arrow_function() {
let extractor = FallbackExtractor::new();
let code = "const handleClick = () => { console.log('clicked'); }";
let concepts = extractor.extract_concepts("test.js", code);
assert_eq!(concepts.len(), 1);
assert_eq!(concepts[0].name, "handleClick");
assert_eq!(concepts[0].concept_type, "function");
}
#[test]
fn test_extract_typescript_class() {
let extractor = FallbackExtractor::new();
let code = "export class UserService { getName() { return 'test'; } }";
let concepts = extractor.extract_concepts("test.ts", code);
assert_eq!(concepts.len(), 2); // Class + method
let class_concept = concepts.iter().find(|c| c.concept_type == "class").unwrap();
assert_eq!(class_concept.name, "UserService");
let function_concept = concepts.iter().find(|c| c.concept_type == "function").unwrap();
assert_eq!(function_concept.name, "getName");
}
#[test]
fn test_extract_rust_function() {
let extractor = FallbackExtractor::new();
let code = "fn calculate_total() -> i32 { 42 }";
let concepts = extractor.extract_concepts("test.rs", code);
assert_eq!(concepts.len(), 1);
assert_eq!(concepts[0].name, "calculate_total");
assert_eq!(concepts[0].concept_type, "function");
}
#[test]
fn test_extract_rust_struct() {
let extractor = FallbackExtractor::new();
let code = "pub struct User { name: String }";
let concepts = extractor.extract_concepts("test.rs", code);
assert_eq!(concepts.len(), 1);
assert_eq!(concepts[0].name, "User");
assert_eq!(concepts[0].concept_type, "class"); // Mapped as class
}
#[test]
fn test_extract_python_function() {
let extractor = FallbackExtractor::new();
let code = "def process_data(data):\n return data.strip()";
let concepts = extractor.extract_concepts("test.py", code);
assert_eq!(concepts.len(), 1);
assert_eq!(concepts[0].name, "process_data");
assert_eq!(concepts[0].concept_type, "function");
}
#[test]
fn test_extract_interface() {
let extractor = FallbackExtractor::new();
let code = "interface IUserService { getName(): string; }";
let concepts = extractor.extract_concepts("test.ts", code);
assert_eq!(concepts.len(), 1);
assert_eq!(concepts[0].name, "IUserService");
assert_eq!(concepts[0].concept_type, "interface");
}
#[test]
fn test_extract_multiple_concepts() {
let extractor = FallbackExtractor::new();
let code = r#"
class Calculator {
add(a, b) { return a + b; }
}
function multiply(x, y) {
return x * y;
}
interface MathOperations {
calculate(): number;
}
"#;
let concepts = extractor.extract_concepts("test.ts", code);
assert!(concepts.len() >= 3);
let class_concepts = concepts.iter().filter(|c| c.concept_type == "class").count();
let function_concepts = concepts.iter().filter(|c| c.concept_type == "function").count();
let interface_concepts = concepts.iter().filter(|c| c.concept_type == "interface").count();
assert!(class_concepts >= 1);
assert!(function_concepts >= 2); // add + multiply
assert!(interface_concepts >= 1);
}
#[test]
fn test_empty_content() {
let extractor = FallbackExtractor::new();
let concepts = extractor.extract_concepts("empty.js", "");
assert_eq!(concepts.len(), 1);
assert_eq!(concepts[0].name, "empty");
assert_eq!(concepts[0].concept_type, "file");
}
#[test]
fn test_no_concepts_found() {
let extractor = FallbackExtractor::new();
let code = "const x = 42;\nconsole.log('hello');";
let concepts = extractor.extract_concepts("simple.js", code);
assert_eq!(concepts.len(), 1);
assert_eq!(concepts[0].name, "simple");
assert_eq!(concepts[0].concept_type, "file");
}
#[test]
fn test_invalid_identifiers() {
let extractor = FallbackExtractor::new();
// Should not extract invalid identifiers
assert!(!extractor.is_valid_identifier(""));
assert!(!extractor.is_valid_identifier("123abc")); // Starts with number
assert!(!extractor.is_valid_identifier("hello-world")); // Contains dash
assert!(!extractor.is_valid_identifier("hello.world")); // Contains dot
// Should extract valid identifiers
assert!(extractor.is_valid_identifier("hello"));
assert!(extractor.is_valid_identifier("_private"));
assert!(extractor.is_valid_identifier("camelCase"));
assert!(extractor.is_valid_identifier("snake_case"));
assert!(extractor.is_valid_identifier("PascalCase"));
assert!(extractor.is_valid_identifier("a123"));
}
#[test]
fn test_concept_metadata() {
let extractor = FallbackExtractor::new();
let code = "function test() { return 42; }";
let concepts = extractor.extract_concepts("test.js", code);
assert_eq!(concepts.len(), 1);
let concept = &concepts[0];
assert_eq!(concept.relationships.get("extraction_method"), Some(&"fallback".to_string()));
assert_eq!(concept.metadata.get("source"), Some(&"regex_fallback".to_string()));
assert_eq!(concept.metadata.get("confidence_reason"), Some(&"tree_sitter_failed".to_string()));
}
#[test]
fn test_line_numbers() {
let extractor = FallbackExtractor::new();
let code = r#"
function first() {}
class Second {}
function third() {}
"#;
let concepts = extractor.extract_concepts("test.js", code);
// Find concepts by name and check their line numbers
let first_fn = concepts.iter().find(|c| c.name == "first").unwrap();
assert_eq!(first_fn.line_range.start, 2); // Second line (1-indexed)
let second_class = concepts.iter().find(|c| c.name == "Second").unwrap();
assert_eq!(second_class.line_range.start, 4); // Fourth line
let third_fn = concepts.iter().find(|c| c.name == "third").unwrap();
assert_eq!(third_fn.line_range.start, 6); // Sixth line
}
#[test]
fn test_edge_case_patterns() {
let extractor = FallbackExtractor::new();
// Edge case: function keyword in comment should not be extracted
let code_with_comment = "// This function does something\nfunction realFunction() {}";
let concepts = extractor.extract_concepts("test.js", code_with_comment);
assert_eq!(concepts.len(), 1);
assert_eq!(concepts[0].name, "realFunction");
// Edge case: function keyword in string should not be extracted
let code_with_string = r#"const msg = "function in string"; function actualFunction() {}"#;
let string_concepts = extractor.extract_concepts("test.js", code_with_string);
assert_eq!(string_concepts.len(), 1);
assert_eq!(string_concepts[0].name, "actualFunction");
}
} | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/patterns/implementation.rs | rust-core/src/patterns/implementation.rs | //! Implementation pattern recognition for design patterns
#[cfg(feature = "napi-bindings")]
use napi_derive::napi;
use crate::patterns::types::{Pattern, PatternExample, ImplementationPattern, PatternExtractor};
use crate::types::{ParseError, LineRange, SemanticConcept};
use std::collections::HashMap;
use regex::Regex;
use walkdir::WalkDir;
use std::fs;
/// Analyzer for detecting implementation patterns (design patterns)
#[cfg_attr(feature = "napi-bindings", napi)]
pub struct ImplementationPatternAnalyzer {
patterns: HashMap<String, ImplementationPattern>,
pattern_signatures: HashMap<String, PatternSignature>,
}
#[derive(Debug, Clone)]
struct PatternSignature {
required_methods: Vec<String>,
optional_methods: Vec<String>,
class_characteristics: Vec<String>,
code_patterns: Vec<String>,
confidence_threshold: f64,
}
#[derive(Debug, Clone)]
struct PatternMatch {
pattern_name: String,
confidence: f64,
evidence: Vec<String>,
location: String,
}
#[cfg_attr(feature = "napi-bindings", napi)]
impl ImplementationPatternAnalyzer {
#[cfg_attr(feature = "napi-bindings", napi(constructor))]
pub fn new() -> Self {
let mut analyzer = ImplementationPatternAnalyzer {
patterns: HashMap::new(),
pattern_signatures: HashMap::new(),
};
analyzer.initialize_pattern_signatures();
analyzer
}
/// Initialize signatures for common design patterns
fn initialize_pattern_signatures(&mut self) {
// Singleton Pattern
self.pattern_signatures.insert("Singleton".to_string(), PatternSignature {
required_methods: vec!["getInstance".to_string()],
optional_methods: vec!["constructor".to_string(), "__construct".to_string()],
class_characteristics: vec!["static_instance".to_string(), "private_constructor".to_string()],
code_patterns: vec![
r"private\s+static\s+\w*instance".to_string(),
r"getInstance\(\)".to_string(),
r"private\s+\w*\(\)".to_string(), // private constructor
],
confidence_threshold: 0.7,
});
// Factory Pattern
self.pattern_signatures.insert("Factory".to_string(), PatternSignature {
required_methods: vec!["create".to_string(), "make".to_string(), "build".to_string()],
optional_methods: vec!["factory".to_string()],
class_characteristics: vec!["creator".to_string(), "product".to_string()],
code_patterns: vec![
r"create\w*\(\)".to_string(),
r"make\w*\(\)".to_string(),
r"Factory".to_string(),
],
confidence_threshold: 0.6,
});
// Observer Pattern
self.pattern_signatures.insert("Observer".to_string(), PatternSignature {
required_methods: vec!["notify".to_string(), "update".to_string(), "subscribe".to_string()],
optional_methods: vec!["unsubscribe".to_string(), "addListener".to_string(), "removeListener".to_string()],
class_characteristics: vec!["subject".to_string(), "observer".to_string(), "listeners".to_string()],
code_patterns: vec![
r"notify\w*\(\)".to_string(),
r"update\(\)".to_string(),
r"subscribe\(\)".to_string(),
r"addEventListener".to_string(),
],
confidence_threshold: 0.7,
});
// Builder Pattern
self.pattern_signatures.insert("Builder".to_string(), PatternSignature {
required_methods: vec!["build".to_string(), "with".to_string(), "set".to_string()],
optional_methods: vec!["create".to_string(), "builder".to_string()],
class_characteristics: vec!["builder".to_string(), "director".to_string()],
code_patterns: vec![
r"\.with\w+\(".to_string(),
r"\.set\w+\(".to_string(),
r"\.build\(\)".to_string(),
r"Builder".to_string(),
],
confidence_threshold: 0.6,
});
// Strategy Pattern
self.pattern_signatures.insert("Strategy".to_string(), PatternSignature {
required_methods: vec!["execute".to_string(), "apply".to_string(), "process".to_string()],
optional_methods: vec!["strategy".to_string(), "algorithm".to_string()],
class_characteristics: vec!["strategy".to_string(), "context".to_string()],
code_patterns: vec![
r"execute\(\)".to_string(),
r"Strategy".to_string(),
r"setStrategy\(".to_string(),
],
confidence_threshold: 0.6,
});
// Dependency Injection Pattern
self.pattern_signatures.insert("DependencyInjection".to_string(), PatternSignature {
required_methods: vec!["inject".to_string(), "provide".to_string(), "register".to_string()],
optional_methods: vec!["bind".to_string(), "container".to_string()],
class_characteristics: vec!["injector".to_string(), "container".to_string(), "provider".to_string()],
code_patterns: vec![
r"@inject".to_string(),
r"@Injectable".to_string(),
r"container\.get\(".to_string(),
r"DI".to_string(),
],
confidence_threshold: 0.7,
});
// Decorator Pattern
self.pattern_signatures.insert("Decorator".to_string(), PatternSignature {
required_methods: vec!["wrap".to_string(), "decorate".to_string()],
optional_methods: vec!["unwrap".to_string()],
class_characteristics: vec!["decorator".to_string(), "wrapper".to_string()],
code_patterns: vec![
r"@\w+".to_string(), // Decorator syntax
r"Decorator".to_string(),
r"wrap\(".to_string(),
],
confidence_threshold: 0.6,
});
// Command Pattern
self.pattern_signatures.insert("Command".to_string(), PatternSignature {
required_methods: vec!["execute".to_string(), "undo".to_string()],
optional_methods: vec!["redo".to_string(), "command".to_string()],
class_characteristics: vec!["command".to_string(), "invoker".to_string(), "receiver".to_string()],
code_patterns: vec![
r"execute\(\)".to_string(),
r"undo\(\)".to_string(),
r"Command".to_string(),
],
confidence_threshold: 0.7,
});
// Adapter Pattern
self.pattern_signatures.insert("Adapter".to_string(), PatternSignature {
required_methods: vec!["adapt".to_string(), "convert".to_string()],
optional_methods: vec!["wrap".to_string()],
class_characteristics: vec!["adapter".to_string(), "adaptee".to_string()],
code_patterns: vec![
r"Adapter".to_string(),
r"adapt\(".to_string(),
],
confidence_threshold: 0.6,
});
}
/// Analyze semantic concepts for implementation patterns
pub fn analyze_concepts(&mut self, concepts: &[SemanticConcept]) -> Result<Vec<Pattern>, ParseError> {
let mut detected_patterns = Vec::new();
let pattern_matches = self.detect_patterns_in_concepts(concepts)?;
for pattern_match in pattern_matches {
if pattern_match.confidence >= self.pattern_signatures
.get(&pattern_match.pattern_name)
.map(|s| s.confidence_threshold)
.unwrap_or(0.5) {
let examples = self.create_examples_for_pattern(&pattern_match, concepts);
let pattern = Pattern {
id: format!("implementation_{}", pattern_match.pattern_name.to_lowercase()),
pattern_type: "implementation".to_string(),
description: format!(
"{} pattern detected with {:.1}% confidence",
pattern_match.pattern_name,
pattern_match.confidence * 100.0
),
frequency: pattern_match.evidence.len() as u32,
confidence: pattern_match.confidence,
examples,
contexts: vec!["design_pattern".to_string()],
};
detected_patterns.push(pattern);
// Store in internal patterns
let impl_pattern = ImplementationPattern {
pattern_type: pattern_match.pattern_name.clone(),
frequency: pattern_match.evidence.len() as u32,
code_signatures: pattern_match.evidence.clone(),
confidence: pattern_match.confidence,
};
self.patterns.insert(pattern_match.pattern_name.clone(), impl_pattern);
}
}
Ok(detected_patterns)
}
/// Detect anti-patterns in implementation
pub fn detect_antipatterns(&self, concepts: &[SemanticConcept]) -> Vec<String> {
let mut antipatterns = Vec::new();
antipatterns.extend(self.detect_god_object_antipattern(concepts));
antipatterns.extend(self.detect_spaghetti_code_antipattern(concepts));
antipatterns.extend(self.detect_copy_paste_antipattern(concepts));
antipatterns.extend(self.detect_magic_number_antipattern(concepts));
antipatterns.extend(self.detect_long_parameter_list_antipattern(concepts));
antipatterns
}
/// Generate recommendations based on detected patterns
pub fn generate_recommendations(&self, concepts: &[SemanticConcept]) -> Vec<String> {
let mut recommendations = Vec::new();
// Check for missing patterns that could be beneficial
if self.should_suggest_singleton(concepts) {
recommendations.push("Consider using Singleton pattern for global state management".to_string());
}
if self.should_suggest_factory(concepts) {
recommendations.push("Consider using Factory pattern for object creation complexity".to_string());
}
if self.should_suggest_observer(concepts) {
recommendations.push("Consider using Observer pattern for event handling".to_string());
}
if self.should_suggest_strategy(concepts) {
recommendations.push("Consider using Strategy pattern to reduce conditional complexity".to_string());
}
if self.should_suggest_dependency_injection(concepts) {
recommendations.push("Consider using Dependency Injection for better testability".to_string());
}
// Anti-pattern recommendations
let antipatterns = self.detect_antipatterns(concepts);
if !antipatterns.is_empty() {
recommendations.push("Address detected anti-patterns to improve code quality".to_string());
}
if recommendations.is_empty() {
recommendations.push("Implementation patterns look good! Consider documenting design decisions".to_string());
}
recommendations
}
/// Analyze code files for pattern signatures
pub fn analyze_code_files(&mut self, path: &str) -> Result<Vec<Pattern>, ParseError> {
let mut detected_patterns = Vec::new();
for entry in WalkDir::new(path).into_iter().filter_map(|e| e.ok()) {
if entry.file_type().is_file() {
let file_path = entry.path();
if let Some(extension) = file_path.extension().and_then(|s| s.to_str()) {
if matches!(extension.to_lowercase().as_str(), "js" | "ts" | "jsx" | "tsx" | "rs" | "py" | "java" | "cs" | "cpp" | "c") {
if let Ok(content) = fs::read_to_string(file_path) {
let patterns = self.detect_patterns_in_code(&content, file_path.to_string_lossy().as_ref())?;
detected_patterns.extend(patterns);
}
}
}
}
}
Ok(detected_patterns)
}
/// Detect patterns in concepts using semantic analysis
fn detect_patterns_in_concepts(&self, concepts: &[SemanticConcept]) -> Result<Vec<PatternMatch>, ParseError> {
let mut pattern_matches = Vec::new();
for (pattern_name, signature) in &self.pattern_signatures {
let mut evidence = Vec::new();
let mut confidence_scores = Vec::new();
// Check for required methods in concepts
let method_matches = self.find_method_matches(concepts, &signature.required_methods);
evidence.extend(method_matches.iter().map(|m| format!("Method: {}", m)));
if !method_matches.is_empty() {
confidence_scores.push(0.4 * (method_matches.len() as f64 / signature.required_methods.len() as f64));
}
// Check for optional methods (bonus confidence)
if !signature.optional_methods.is_empty() {
let optional_matches = self.find_method_matches(concepts, &signature.optional_methods);
evidence.extend(optional_matches.iter().map(|m| format!("Optional Method: {}", m)));
if !optional_matches.is_empty() {
// Optional methods provide a confidence boost but aren't required
confidence_scores.push(0.15 * (optional_matches.len() as f64 / signature.optional_methods.len() as f64));
}
}
// Check for class characteristics
let class_matches = self.find_class_characteristic_matches(concepts, &signature.class_characteristics);
evidence.extend(class_matches.iter().map(|c| format!("Characteristic: {}", c)));
if !class_matches.is_empty() {
confidence_scores.push(0.3 * (class_matches.len() as f64 / signature.class_characteristics.len() as f64));
}
// Check naming patterns
let naming_matches = self.find_naming_pattern_matches(concepts, pattern_name);
evidence.extend(naming_matches.iter().map(|n| format!("Naming: {}", n)));
if !naming_matches.is_empty() {
confidence_scores.push(0.3);
}
let total_confidence: f64 = confidence_scores.iter().sum();
if total_confidence >= signature.confidence_threshold && !evidence.is_empty() {
pattern_matches.push(PatternMatch {
pattern_name: pattern_name.clone(),
confidence: total_confidence,
evidence,
location: "multiple_concepts".to_string(),
});
}
}
Ok(pattern_matches)
}
/// Detect patterns in code using regex patterns
fn detect_patterns_in_code(&self, code: &str, file_path: &str) -> Result<Vec<Pattern>, ParseError> {
let mut detected_patterns = Vec::new();
for (pattern_name, signature) in &self.pattern_signatures {
let mut evidence = Vec::new();
let mut confidence = 0.0;
// Check code patterns using regex
for code_pattern in &signature.code_patterns {
if let Ok(regex) = Regex::new(code_pattern) {
let matches: Vec<_> = regex.find_iter(code).collect();
if !matches.is_empty() {
evidence.extend(matches.iter().map(|m| format!("Code pattern: {}", m.as_str())));
confidence += 0.2;
}
}
}
// Check for method names in the code
for method in &signature.required_methods {
if code.contains(method) {
evidence.push(format!("Method found: {}", method));
confidence += 0.2;
}
}
if confidence >= signature.confidence_threshold && !evidence.is_empty() {
let examples = vec![PatternExample {
code: evidence.join(", "),
file_path: file_path.to_string(),
line_range: LineRange { start: 1, end: 1 },
}];
detected_patterns.push(Pattern {
id: format!("implementation_{}", pattern_name.to_lowercase()),
pattern_type: "implementation".to_string(),
description: format!("{} pattern detected in code", pattern_name),
frequency: evidence.len() as u32,
confidence,
examples,
contexts: vec!["code_analysis".to_string()],
});
}
}
Ok(detected_patterns)
}
/// Find method matches in concepts
fn find_method_matches(&self, concepts: &[SemanticConcept], required_methods: &[String]) -> Vec<String> {
let mut matches = Vec::new();
for concept in concepts {
if concept.concept_type == "method" || concept.concept_type == "function" {
for required_method in required_methods {
if concept.name.to_lowercase().contains(&required_method.to_lowercase()) ||
self.is_method_variant(&concept.name, required_method) {
matches.push(concept.name.clone());
break;
}
}
}
}
matches
}
/// Find class characteristic matches
fn find_class_characteristic_matches(&self, concepts: &[SemanticConcept], characteristics: &[String]) -> Vec<String> {
let mut matches = Vec::new();
for concept in concepts {
for characteristic in characteristics {
if concept.name.to_lowercase().contains(&characteristic.to_lowercase()) ||
concept.concept_type.to_lowercase().contains(&characteristic.to_lowercase()) ||
concept.metadata.values().any(|v| v.to_lowercase().contains(&characteristic.to_lowercase())) {
matches.push(characteristic.clone());
}
}
}
matches
}
/// Find naming pattern matches
fn find_naming_pattern_matches(&self, concepts: &[SemanticConcept], pattern_name: &str) -> Vec<String> {
let mut matches = Vec::new();
let pattern_lower = pattern_name.to_lowercase();
for concept in concepts {
if concept.name.to_lowercase().contains(&pattern_lower) ||
concept.file_path.to_lowercase().contains(&pattern_lower) {
matches.push(concept.name.clone());
}
}
matches
}
/// Check if a method name is a variant of a required method
fn is_method_variant(&self, method_name: &str, required_method: &str) -> bool {
let method_lower = method_name.to_lowercase();
let required_lower = required_method.to_lowercase();
// Check for common variants
match required_lower.as_str() {
"getinstance" => method_lower.contains("getinstance") || method_lower.contains("instance"),
"create" => method_lower.contains("create") || method_lower.contains("new") || method_lower.contains("make"),
"notify" => method_lower.contains("notify") || method_lower.contains("emit") || method_lower.contains("trigger"),
"update" => method_lower.contains("update") || method_lower.contains("refresh") || method_lower.contains("change"),
"build" => method_lower.contains("build") || method_lower.contains("construct") || method_lower.contains("assemble"),
_ => method_lower.contains(&required_lower),
}
}
/// Create examples for detected patterns
fn create_examples_for_pattern(&self, pattern_match: &PatternMatch, concepts: &[SemanticConcept]) -> Vec<PatternExample> {
let mut examples = Vec::new();
// Find concepts that contributed to this pattern match
for concept in concepts.iter().take(3) { // Limit to first 3 examples
if concept.name.to_lowercase().contains(&pattern_match.pattern_name.to_lowercase()) ||
pattern_match.evidence.iter().any(|e| e.contains(&concept.name)) {
examples.push(PatternExample {
code: format!("{} {}", concept.concept_type, concept.name),
file_path: concept.file_path.clone(),
line_range: concept.line_range.clone(),
});
}
}
if examples.is_empty() {
// Fallback example
examples.push(PatternExample {
code: pattern_match.evidence.first().cloned().unwrap_or_else(|| "Pattern detected".to_string()),
file_path: pattern_match.location.clone(),
line_range: LineRange { start: 1, end: 1 },
});
}
examples
}
// Anti-pattern detection methods
fn detect_god_object_antipattern(&self, concepts: &[SemanticConcept]) -> Vec<String> {
let mut antipatterns = Vec::new();
let mut class_method_counts: HashMap<String, usize> = HashMap::new();
for concept in concepts {
if concept.concept_type == "class" {
let method_count = concepts.iter()
.filter(|c| c.concept_type == "method" && c.file_path == concept.file_path)
.count();
class_method_counts.insert(concept.name.clone(), method_count);
if method_count > 20 {
antipatterns.push(format!(
"God Object anti-pattern: Class '{}' has {} methods ({}:{})",
concept.name, method_count, concept.file_path, concept.line_range.start
));
}
}
}
antipatterns
}
fn detect_spaghetti_code_antipattern(&self, concepts: &[SemanticConcept]) -> Vec<String> {
let mut antipatterns = Vec::new();
// Check for functions with too many dependencies
for concept in concepts {
if concept.concept_type == "function" || concept.concept_type == "method" {
let dependency_count = concept.relationships.len();
if dependency_count > 15 {
antipatterns.push(format!(
"Spaghetti Code: Function '{}' has {} dependencies ({}:{})",
concept.name, dependency_count, concept.file_path, concept.line_range.start
));
}
}
}
antipatterns
}
fn detect_copy_paste_antipattern(&self, concepts: &[SemanticConcept]) -> Vec<String> {
let mut antipatterns = Vec::new();
let mut similar_names: HashMap<String, Vec<&SemanticConcept>> = HashMap::new();
// Group concepts by similar names
for concept in concepts {
let name_base = self.extract_name_base(&concept.name);
similar_names.entry(name_base).or_default().push(concept);
}
// Check for potential copy-paste patterns
for (base_name, group) in similar_names {
if group.len() > 3 && base_name.len() > 3 {
let names: Vec<String> = group.iter().map(|c| c.name.clone()).collect();
antipatterns.push(format!(
"Potential Copy-Paste: {} similar functions found: {}",
group.len(),
names.join(", ")
));
}
}
antipatterns
}
fn detect_magic_number_antipattern(&self, concepts: &[SemanticConcept]) -> Vec<String> {
let mut antipatterns = Vec::new();
// This is a simplified check - in practice you'd analyze the actual code
for concept in concepts {
if concept.concept_type == "constant" {
// Check if it's a meaningful constant name or just a number
if concept.name.chars().all(|c| c.is_numeric() || c == '.' || c == '_') {
antipatterns.push(format!(
"Magic Number: Constant '{}' should have a descriptive name ({}:{})",
concept.name, concept.file_path, concept.line_range.start
));
}
}
}
antipatterns
}
fn detect_long_parameter_list_antipattern(&self, concepts: &[SemanticConcept]) -> Vec<String> {
let mut antipatterns = Vec::new();
for concept in concepts {
if concept.concept_type == "function" || concept.concept_type == "method" {
if let Some(params) = concept.metadata.get("parameters") {
if let Ok(param_count) = params.parse::<usize>() {
if param_count > 5 {
antipatterns.push(format!(
"Long Parameter List: Function '{}' has {} parameters ({}:{})",
concept.name, param_count, concept.file_path, concept.line_range.start
));
}
}
}
}
}
antipatterns
}
// Recommendation helper methods
fn should_suggest_singleton(&self, concepts: &[SemanticConcept]) -> bool {
// Check for global state or configuration management
let has_global_state = concepts.iter().any(|c|
c.name.to_lowercase().contains("config") ||
c.name.to_lowercase().contains("settings") ||
c.name.to_lowercase().contains("global")
);
let has_singleton = self.patterns.contains_key("Singleton");
has_global_state && !has_singleton
}
fn should_suggest_factory(&self, concepts: &[SemanticConcept]) -> bool {
// Check for complex object creation
let creation_complexity = concepts.iter()
.filter(|c| c.concept_type == "constructor" || c.name.to_lowercase().contains("new"))
.count();
let has_factory = self.patterns.contains_key("Factory");
creation_complexity > 3 && !has_factory
}
fn should_suggest_observer(&self, concepts: &[SemanticConcept]) -> bool {
// Check for event handling patterns
let has_events = concepts.iter().any(|c|
c.name.to_lowercase().contains("event") ||
c.name.to_lowercase().contains("listener") ||
c.name.to_lowercase().contains("callback")
);
let has_observer = self.patterns.contains_key("Observer");
has_events && !has_observer
}
fn should_suggest_strategy(&self, concepts: &[SemanticConcept]) -> bool {
// Check for conditional complexity
let has_complex_conditionals = concepts.iter().any(|c|
c.metadata.get("body").is_some_and(|body|
body.matches("if").count() > 5 || body.matches("switch").count() > 1
)
);
let has_strategy = self.patterns.contains_key("Strategy");
has_complex_conditionals && !has_strategy
}
fn should_suggest_dependency_injection(&self, concepts: &[SemanticConcept]) -> bool {
// Check for tight coupling
let high_coupling_count = concepts.iter()
.filter(|c| c.relationships.len() > 8)
.count();
let has_di = self.patterns.contains_key("DependencyInjection");
high_coupling_count > 2 && !has_di
}
/// Extract base name for similarity detection
fn extract_name_base(&self, name: &str) -> String {
// Remove common suffixes and prefixes
let mut base = name.to_lowercase();
// Remove numbers at the end
while base.chars().last().is_some_and(|c| c.is_numeric()) {
base.pop();
}
// Remove common suffixes
for suffix in &["test", "impl", "service", "controller", "handler"] {
if base.ends_with(suffix) {
base = base[..base.len() - suffix.len()].to_string();
break;
}
}
base
}
}
impl PatternExtractor for ImplementationPatternAnalyzer {
fn extract_patterns(&self, path: &str) -> Result<Vec<Pattern>, ParseError> {
let mut analyzer = self.clone();
analyzer.analyze_code_files(path)
}
}
impl Clone for ImplementationPatternAnalyzer {
fn clone(&self) -> Self {
ImplementationPatternAnalyzer {
patterns: self.patterns.clone(),
pattern_signatures: self.pattern_signatures.clone(),
}
}
}
impl Default for ImplementationPatternAnalyzer {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::collections::HashMap;
fn create_test_concept(name: &str, concept_type: &str, file_path: &str) -> SemanticConcept {
SemanticConcept {
id: format!("test_{}", name),
name: name.to_string(),
concept_type: concept_type.to_string(),
confidence: 0.8,
file_path: file_path.to_string(),
line_range: LineRange { start: 1, end: 10 },
relationships: HashMap::new(),
metadata: HashMap::new(),
}
}
#[test]
fn test_implementation_pattern_analyzer_creation() {
let analyzer = ImplementationPatternAnalyzer::new();
assert!(!analyzer.pattern_signatures.is_empty());
assert!(analyzer.pattern_signatures.contains_key("Singleton"));
assert!(analyzer.pattern_signatures.contains_key("Factory"));
assert!(analyzer.pattern_signatures.contains_key("Observer"));
}
#[test]
fn test_singleton_pattern_detection() {
let mut analyzer = ImplementationPatternAnalyzer::new();
let concepts = vec![
create_test_concept("getInstance", "method", "Singleton.js"),
create_test_concept("SingletonClass", "class", "Singleton.js"),
];
let patterns = analyzer.analyze_concepts(&concepts).unwrap();
let singleton_pattern = patterns.iter().find(|p| p.id.contains("singleton"));
assert!(singleton_pattern.is_some());
if let Some(pattern) = singleton_pattern {
assert_eq!(pattern.pattern_type, "implementation");
assert!(pattern.confidence > 0.0);
}
}
#[test]
fn test_factory_pattern_detection() {
let mut analyzer = ImplementationPatternAnalyzer::new();
let concepts = vec![
create_test_concept("createUser", "method", "UserFactory.js"),
create_test_concept("UserFactory", "class", "UserFactory.js"),
];
let patterns = analyzer.analyze_concepts(&concepts).unwrap();
let factory_pattern = patterns.iter().find(|p| p.id.contains("factory"));
assert!(factory_pattern.is_some());
}
#[test]
fn test_observer_pattern_detection() {
let mut analyzer = ImplementationPatternAnalyzer::new();
let concepts = vec![
create_test_concept("notify", "method", "Observable.js"),
create_test_concept("subscribe", "method", "Observable.js"),
create_test_concept("update", "method", "Observer.js"),
];
let patterns = analyzer.analyze_concepts(&concepts).unwrap();
let observer_pattern = patterns.iter().find(|p| p.id.contains("observer"));
assert!(observer_pattern.is_some());
}
#[test]
fn test_builder_pattern_detection() {
let mut analyzer = ImplementationPatternAnalyzer::new();
let concepts = vec![
create_test_concept("withName", "method", "UserBuilder.js"),
create_test_concept("setAge", "method", "UserBuilder.js"),
create_test_concept("build", "method", "UserBuilder.js"),
];
let patterns = analyzer.analyze_concepts(&concepts).unwrap();
| rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | true |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/patterns/prediction.rs | rust-core/src/patterns/prediction.rs | //! Approach prediction based on learned patterns and context
#[cfg(feature = "napi-bindings")]
use napi_derive::napi;
use crate::patterns::types::{ApproachPrediction, ProblemComplexity, GeneratedApproach, Pattern};
use crate::types::{ParseError, SemanticConcept};
use std::collections::{HashMap, HashSet};
use serde_json::{Value, from_str};
/// Predictor for suggesting coding approaches based on patterns and context
#[cfg_attr(feature = "napi-bindings", napi)]
pub struct ApproachPredictor {
learned_patterns: HashMap<String, Pattern>,
approach_templates: HashMap<String, ApproachTemplate>,
context_weights: HashMap<String, f64>,
historical_approaches: Vec<HistoricalApproach>,
}
#[derive(Debug, Clone)]
struct ApproachTemplate {
name: String,
description: String,
complexity_suitability: Vec<ProblemComplexity>,
required_patterns: Vec<String>,
preferred_patterns: Vec<String>,
technologies: Vec<String>,
confidence_base: f64,
// Fields accessed by update_templates_from_history
confidence: f64,
patterns: Vec<String>,
}
#[derive(Debug, Clone)]
struct HistoricalApproach {
problem_description: String,
approach_taken: String,
patterns_used: Vec<String>,
success_rating: f64,
complexity: ProblemComplexity,
context: HashMap<String, String>,
}
#[derive(Debug, Clone)]
struct ProblemContext {
domain: String,
scale: String,
performance_requirements: String,
maintainability_requirements: String,
team_size: String,
timeline: String,
existing_patterns: Vec<String>,
technologies: Vec<String>,
}
#[cfg_attr(feature = "napi-bindings", napi)]
impl ApproachPredictor {
#[cfg_attr(feature = "napi-bindings", napi(constructor))]
pub fn new() -> Self {
let mut predictor = ApproachPredictor {
learned_patterns: HashMap::new(),
approach_templates: HashMap::new(),
context_weights: HashMap::new(),
historical_approaches: Vec::new(),
};
predictor.initialize_approach_templates();
predictor.initialize_context_weights();
predictor
}
/// Predict the best approach for a given problem description
#[cfg_attr(feature = "napi-bindings", napi)]
pub fn predict_approach(&self, problem_description: String, context_data: Option<String>) -> Result<ApproachPrediction, ParseError> {
let complexity = self.analyze_problem_complexity(&problem_description);
let context = self.parse_context_data(context_data.as_deref())?;
let available_patterns = self.extract_available_patterns(&context);
let candidates = self.generate_approach_candidates(&problem_description, &complexity, &context, &available_patterns);
let best_approach = self.select_best_approach(candidates, &context);
Ok(ApproachPrediction {
approach: best_approach.description.clone(),
confidence: best_approach.confidence,
reasoning: self.generate_reasoning(&best_approach, &complexity, &context),
patterns: self.extract_recommended_patterns(&best_approach),
complexity: complexity.to_string(),
})
}
/// Learn from historical approach data
pub fn learn_from_approaches(&mut self, approach_data: &str) -> Result<bool, ParseError> {
let historical_data: Value = from_str(approach_data)
.map_err(|e| ParseError::from_reason(format!("Failed to parse approach data: {}", e)))?;
if let Some(approaches) = historical_data.as_array() {
for approach_value in approaches {
if let Ok(historical_approach) = self.parse_historical_approach(approach_value) {
self.historical_approaches.push(historical_approach);
// Update approach templates based on successful patterns
self.update_templates_from_history();
}
}
Ok(true)
} else {
Ok(false)
}
}
/// Update predictor with new pattern information
pub fn update_patterns(&mut self, patterns: Vec<Pattern>) {
for pattern in patterns {
self.learned_patterns.insert(pattern.id.clone(), pattern);
}
// Recalculate approach template confidence based on new patterns
self.recalculate_template_confidence();
}
/// Predict approach based on existing codebase analysis
pub fn predict_from_codebase(&self, concepts: &[SemanticConcept], problem_description: &str) -> Result<ApproachPrediction, ParseError> {
let context = self.analyze_codebase_context(concepts);
let existing_patterns = self.identify_existing_patterns(concepts);
let complexity = self.analyze_problem_complexity(problem_description);
// Generate candidates that align with existing codebase patterns
let candidates = self.generate_contextual_candidates(problem_description, &complexity, &context, &existing_patterns);
let best_approach = self.select_best_approach(candidates, &context);
Ok(ApproachPrediction {
approach: best_approach.description.clone(),
confidence: best_approach.confidence,
reasoning: self.generate_contextual_reasoning(&best_approach, &existing_patterns, &context),
patterns: existing_patterns,
complexity: complexity.to_string(),
})
}
/// Generate multiple approach alternatives
pub fn generate_alternatives(&self, problem_description: &str, context_data: Option<&str>, count: usize) -> Result<Vec<ApproachPrediction>, ParseError> {
let complexity = self.analyze_problem_complexity(problem_description);
let context = self.parse_context_data(context_data)?;
let available_patterns = self.extract_available_patterns(&context);
let mut candidates = self.generate_approach_candidates(problem_description, &complexity, &context, &available_patterns);
// Sort by confidence and take top N
candidates.sort_by(|a, b| b.confidence.partial_cmp(&a.confidence).unwrap_or(std::cmp::Ordering::Equal));
candidates.truncate(count);
let alternatives: Result<Vec<_>, _> = candidates.into_iter()
.map(|approach| Ok(ApproachPrediction {
approach: approach.description.clone(),
confidence: approach.confidence,
reasoning: self.generate_reasoning(&approach, &complexity, &context),
patterns: self.extract_recommended_patterns(&approach),
complexity: complexity.to_string(),
}))
.collect();
alternatives
}
/// Initialize approach templates
fn initialize_approach_templates(&mut self) {
// Microservices Architecture
self.approach_templates.insert("microservices".to_string(), ApproachTemplate {
name: "Microservices Architecture".to_string(),
description: "Decompose into loosely coupled, independently deployable services".to_string(),
complexity_suitability: vec![ProblemComplexity::Medium, ProblemComplexity::High],
required_patterns: vec!["service_boundaries".to_string(), "api_gateway".to_string()],
preferred_patterns: vec!["event_driven".to_string(), "database_per_service".to_string()],
technologies: vec!["docker".to_string(), "kubernetes".to_string(), "rest_api".to_string()],
confidence_base: 0.8,
confidence: 0.8,
patterns: vec!["service_boundaries".to_string(), "api_gateway".to_string()],
});
// Monolithic Architecture
self.approach_templates.insert("monolith".to_string(), ApproachTemplate {
name: "Modular Monolith".to_string(),
description: "Single deployable unit with clear internal module boundaries".to_string(),
complexity_suitability: vec![ProblemComplexity::Low, ProblemComplexity::Medium],
required_patterns: vec!["layered_architecture".to_string()],
preferred_patterns: vec!["dependency_injection".to_string(), "domain_driven_design".to_string()],
technologies: vec!["mvc".to_string(), "orm".to_string()],
confidence_base: 0.7,
confidence: 0.7,
patterns: vec!["layered_architecture".to_string()],
});
// Event-Driven Architecture
self.approach_templates.insert("event_driven".to_string(), ApproachTemplate {
name: "Event-Driven Architecture".to_string(),
description: "Asynchronous communication through events and message queues".to_string(),
complexity_suitability: vec![ProblemComplexity::Medium, ProblemComplexity::High],
required_patterns: vec!["event_sourcing".to_string(), "publisher_subscriber".to_string()],
preferred_patterns: vec!["saga_pattern".to_string(), "cqrs".to_string()],
technologies: vec!["message_queue".to_string(), "event_store".to_string()],
confidence_base: 0.75,
confidence: 0.75,
patterns: vec!["event_sourcing".to_string(), "publisher_subscriber".to_string()],
});
// Serverless Architecture
self.approach_templates.insert("serverless".to_string(), ApproachTemplate {
name: "Serverless Architecture".to_string(),
description: "Function-based architecture with managed infrastructure".to_string(),
complexity_suitability: vec![ProblemComplexity::Low, ProblemComplexity::Medium],
required_patterns: vec!["function_as_service".to_string()],
preferred_patterns: vec!["api_gateway".to_string(), "event_triggers".to_string()],
technologies: vec!["aws_lambda".to_string(), "azure_functions".to_string(), "api_gateway".to_string()],
confidence_base: 0.6,
confidence: 0.6,
patterns: vec!["function_as_service".to_string()],
});
// Clean Architecture
self.approach_templates.insert("clean_architecture".to_string(), ApproachTemplate {
name: "Clean Architecture".to_string(),
description: "Dependency inversion with clear separation of concerns".to_string(),
complexity_suitability: vec![ProblemComplexity::Medium, ProblemComplexity::High],
required_patterns: vec!["dependency_inversion".to_string(), "use_cases".to_string()],
preferred_patterns: vec!["repository_pattern".to_string(), "domain_entities".to_string()],
technologies: vec!["dependency_injection".to_string(), "testing_framework".to_string()],
confidence_base: 0.85,
confidence: 0.85,
patterns: vec!["dependency_inversion".to_string(), "use_cases".to_string()],
});
// CRUD Application
self.approach_templates.insert("crud".to_string(), ApproachTemplate {
name: "CRUD Application".to_string(),
description: "Simple Create, Read, Update, Delete operations with standard patterns".to_string(),
complexity_suitability: vec![ProblemComplexity::Low],
required_patterns: vec!["mvc".to_string(), "repository".to_string()],
preferred_patterns: vec!["validation".to_string(), "orm".to_string()],
technologies: vec!["database".to_string(), "web_framework".to_string()],
confidence_base: 0.9,
confidence: 0.9,
patterns: vec!["mvc".to_string(), "repository".to_string()],
});
}
/// Initialize context weights for decision making
fn initialize_context_weights(&mut self) {
self.context_weights.insert("performance".to_string(), 0.25);
self.context_weights.insert("scalability".to_string(), 0.2);
self.context_weights.insert("maintainability".to_string(), 0.2);
self.context_weights.insert("team_experience".to_string(), 0.15);
self.context_weights.insert("timeline".to_string(), 0.1);
self.context_weights.insert("budget".to_string(), 0.1);
}
/// Analyze problem complexity from description
fn analyze_problem_complexity(&self, problem_description: &str) -> ProblemComplexity {
let description_lower = problem_description.to_lowercase();
let high_complexity_indicators = [
"distributed", "microservices", "real-time", "high-throughput", "scalable",
"multiple systems", "complex business rules", "enterprise", "multi-tenant",
"event-driven", "asynchronous", "concurrent", "parallel processing",
];
let medium_complexity_indicators = [
"api", "database", "user management", "authentication", "integration",
"business logic", "workflows", "reporting", "analytics", "modular",
];
let high_score = high_complexity_indicators.iter()
.filter(|&indicator| description_lower.contains(indicator))
.count();
let medium_score = medium_complexity_indicators.iter()
.filter(|&indicator| description_lower.contains(indicator))
.count();
if high_score >= 2 || description_lower.len() > 500 {
ProblemComplexity::High
} else if medium_score >= 2 || high_score >= 1 || description_lower.len() > 200 {
ProblemComplexity::Medium
} else {
ProblemComplexity::Low
}
}
/// Parse context data from JSON string
fn parse_context_data(&self, context_data: Option<&str>) -> Result<ProblemContext, ParseError> {
let default_context = ProblemContext {
domain: "general".to_string(),
scale: "medium".to_string(),
performance_requirements: "standard".to_string(),
maintainability_requirements: "high".to_string(),
team_size: "small".to_string(),
timeline: "months".to_string(),
existing_patterns: Vec::new(),
technologies: Vec::new(),
};
if let Some(data) = context_data {
let parsed: Value = from_str(data)
.map_err(|e| ParseError::from_reason(format!("Failed to parse context: {}", e)))?;
Ok(ProblemContext {
domain: parsed.get("domain").and_then(|v| v.as_str()).unwrap_or("general").to_string(),
scale: parsed.get("scale").and_then(|v| v.as_str()).unwrap_or("medium").to_string(),
performance_requirements: parsed.get("performance").and_then(|v| v.as_str()).unwrap_or("standard").to_string(),
maintainability_requirements: parsed.get("maintainability").and_then(|v| v.as_str()).unwrap_or("high").to_string(),
team_size: parsed.get("team_size").and_then(|v| v.as_str()).unwrap_or("small").to_string(),
timeline: parsed.get("timeline").and_then(|v| v.as_str()).unwrap_or("months").to_string(),
existing_patterns: parsed.get("existing_patterns")
.and_then(|v| v.as_array())
.map(|arr| arr.iter().filter_map(|v| v.as_str().map(String::from)).collect())
.unwrap_or_default(),
technologies: parsed.get("technologies")
.and_then(|v| v.as_array())
.map(|arr| arr.iter().filter_map(|v| v.as_str().map(String::from)).collect())
.unwrap_or_default(),
})
} else {
Ok(default_context)
}
}
/// Extract available patterns from context
fn extract_available_patterns(&self, context: &ProblemContext) -> Vec<String> {
let mut patterns = context.existing_patterns.clone();
// Infer patterns from technologies
for tech in &context.technologies {
match tech.to_lowercase().as_str() {
"react" | "vue" | "angular" => patterns.push("component_based".to_string()),
"express" | "fastapi" | "spring" => patterns.push("mvc".to_string()),
"docker" | "kubernetes" => patterns.push("containerization".to_string()),
"redis" | "rabbitmq" | "kafka" => patterns.push("event_driven".to_string()),
"graphql" => patterns.push("api_gateway".to_string()),
_ => {}
}
}
patterns.extend(self.learned_patterns.keys().cloned());
patterns.sort();
patterns.dedup();
patterns
}
/// Generate approach candidates
fn generate_approach_candidates(
&self,
problem_description: &str,
complexity: &ProblemComplexity,
context: &ProblemContext,
available_patterns: &[String],
) -> Vec<GeneratedApproach> {
let mut candidates = Vec::new();
for template in self.approach_templates.values() {
if template.complexity_suitability.contains(complexity) {
let confidence = self.calculate_template_confidence(template, context, available_patterns);
if confidence > 0.3 {
candidates.push(GeneratedApproach {
description: format!("{}: {}", template.name, template.description),
confidence,
reasoning: self.generate_template_reasoning(template, context, available_patterns),
});
}
}
}
// Add custom approaches based on historical data
candidates.extend(self.generate_historical_candidates(problem_description, complexity, context));
candidates
}
/// Calculate confidence for a template
fn calculate_template_confidence(
&self,
template: &ApproachTemplate,
context: &ProblemContext,
available_patterns: &[String],
) -> f64 {
let mut confidence = template.confidence_base;
// Adjust for required patterns availability
let required_available = template.required_patterns.iter()
.filter(|&pattern| available_patterns.contains(pattern))
.count() as f64;
let required_ratio = if template.required_patterns.is_empty() {
1.0
} else {
required_available / template.required_patterns.len() as f64
};
confidence *= required_ratio;
// Boost for preferred patterns
let preferred_available = template.preferred_patterns.iter()
.filter(|&pattern| available_patterns.contains(pattern))
.count() as f64;
let preferred_boost = preferred_available * 0.1;
confidence += preferred_boost;
// Adjust for context factors
confidence *= self.calculate_context_multiplier(template, context);
confidence.min(1.0)
}
/// Calculate context multiplier
fn calculate_context_multiplier(&self, template: &ApproachTemplate, context: &ProblemContext) -> f64 {
let mut multiplier = 1.0;
// Scale considerations
match (template.name.as_str(), context.scale.as_str()) {
("Microservices Architecture", "large") => multiplier *= 1.2,
("Microservices Architecture", "small") => multiplier *= 0.7,
("Modular Monolith", "small") | ("Modular Monolith", "medium") => multiplier *= 1.1,
("CRUD Application", "small") => multiplier *= 1.3,
("CRUD Application", "large") => multiplier *= 0.5,
_ => {}
}
// Performance considerations
if context.performance_requirements == "high" {
match template.name.as_str() {
"Event-Driven Architecture" => multiplier *= 1.1,
"Serverless Architecture" => multiplier *= 0.8,
_ => {}
}
}
// Team size considerations
if context.team_size == "large" && template.name == "Clean Architecture" {
multiplier *= 1.2;
}
// Domain-specific adjustments
match (template.name.as_str(), context.domain.as_str()) {
("Microservices Architecture", "enterprise") => multiplier *= 1.15,
("CRUD Application", "prototype") => multiplier *= 1.2,
("Event-Driven Architecture", "real_time") => multiplier *= 1.3,
("Serverless Architecture", "prototype") => multiplier *= 1.1,
("Clean Architecture", "long_term_project") => multiplier *= 1.2,
_ => {}
}
// Maintainability requirements
match context.maintainability_requirements.as_str() {
"high" => {
match template.name.as_str() {
"Clean Architecture" => multiplier *= 1.25,
"Modular Monolith" => multiplier *= 1.1,
_ => {}
}
},
"low" => {
match template.name.as_str() {
"CRUD Application" => multiplier *= 1.1,
"Serverless Architecture" => multiplier *= 1.05,
_ => {}
}
},
_ => {}
}
// Timeline considerations
match context.timeline.as_str() {
"urgent" | "short" => {
match template.name.as_str() {
"CRUD Application" => multiplier *= 1.3,
"Serverless Architecture" => multiplier *= 1.15,
"Microservices Architecture" => multiplier *= 0.7, // Complex to implement quickly
_ => {}
}
},
"long_term" | "ongoing" => {
match template.name.as_str() {
"Clean Architecture" => multiplier *= 1.2,
"Microservices Architecture" => multiplier *= 1.1,
"CRUD Application" => multiplier *= 0.8, // Less suitable for long-term
_ => {}
}
},
_ => {}
}
multiplier
}
/// Select best approach from candidates
fn select_best_approach(&self, mut candidates: Vec<GeneratedApproach>, _context: &ProblemContext) -> GeneratedApproach {
candidates.sort_by(|a, b| b.confidence.partial_cmp(&a.confidence).unwrap_or(std::cmp::Ordering::Equal));
candidates.into_iter().next().unwrap_or_else(|| GeneratedApproach {
description: "Standard layered architecture with clear separation of concerns".to_string(),
confidence: 0.5,
reasoning: "Default approach when no specific patterns are identified".to_string(),
})
}
/// Generate reasoning for an approach
fn generate_reasoning(&self, approach: &GeneratedApproach, complexity: &ProblemComplexity, context: &ProblemContext) -> String {
let mut reasoning = vec![
format!("Problem complexity: {}", complexity),
format!("Approach confidence: {:.1}%", approach.confidence * 100.0),
];
reasoning.push(approach.reasoning.clone());
if context.performance_requirements == "high" {
reasoning.push("High performance requirements favor this approach".to_string());
}
if context.scale == "large" {
reasoning.push("Large scale requirements support this architectural choice".to_string());
}
reasoning.join(". ")
}
/// Generate contextual reasoning based on existing codebase
fn generate_contextual_reasoning(&self, approach: &GeneratedApproach, existing_patterns: &[String], context: &ProblemContext) -> String {
let mut reasoning = vec![
approach.reasoning.clone(),
format!("Existing codebase patterns: {}", existing_patterns.join(", ")),
];
if !existing_patterns.is_empty() {
reasoning.push("Recommendation aligns with existing architectural patterns".to_string());
}
reasoning.push(format!("Context scale: {}, team size: {}", context.scale, context.team_size));
reasoning.join(". ")
}
/// Extract recommended patterns from approach
fn extract_recommended_patterns(&self, approach: &GeneratedApproach) -> Vec<String> {
let mut patterns = Vec::new();
// Extract patterns mentioned in the approach description
for template in self.approach_templates.values() {
if approach.description.contains(&template.name) {
patterns.extend(template.required_patterns.clone());
patterns.extend(template.preferred_patterns.clone());
break;
}
}
patterns.sort();
patterns.dedup();
patterns
}
/// Additional helper methods for historical data and codebase analysis
fn parse_historical_approach(&self, value: &Value) -> Result<HistoricalApproach, ParseError> {
let problem = value.get("problem").and_then(|v| v.as_str()).unwrap_or("").to_string();
let approach = value.get("approach").and_then(|v| v.as_str()).unwrap_or("").to_string();
let success = value.get("success").and_then(|v| v.as_f64()).unwrap_or(0.5);
let complexity_str = value.get("complexity").and_then(|v| v.as_str()).unwrap_or("medium");
let complexity = match complexity_str {
"low" => ProblemComplexity::Low,
"high" => ProblemComplexity::High,
_ => ProblemComplexity::Medium,
};
Ok(HistoricalApproach {
problem_description: problem,
approach_taken: approach,
patterns_used: Vec::new(),
success_rating: success,
complexity,
context: HashMap::new(),
})
}
fn update_templates_from_history(&mut self) {
// Collect template descriptions first to avoid borrowing conflicts
let template_descriptions: Vec<(String, String)> = self.approach_templates
.iter()
.map(|(name, template)| (name.clone(), template.description.clone()))
.collect();
// Analyze historical approaches to update template confidence based on success ratings
for historical in &self.historical_approaches {
// Use problem_description for internal consistency check
let problem_approach_alignment = self.calculate_approach_similarity(&historical.problem_description, &historical.approach_taken);
// Find templates that match this historical approach's patterns
for (template_name, template_desc) in &template_descriptions {
let approach_similarity = self.calculate_approach_similarity(&historical.approach_taken, template_desc);
let problem_match_bonus = self.calculate_approach_similarity(&historical.problem_description, template_desc) * 0.3;
// Factor in how well the problem aligned with the chosen approach
let alignment_bonus = problem_approach_alignment * 0.1;
let total_similarity = approach_similarity + problem_match_bonus + alignment_bonus;
if total_similarity > 0.6 { // Similar approaches
if let Some(template) = self.approach_templates.get_mut(template_name) {
// Check if template complexity matches historical complexity
let complexity_match = template.complexity_suitability.contains(&historical.complexity);
let complexity_bonus = if complexity_match { 0.1 } else { -0.05 };
// Adjust confidence based on historical success and complexity matching
let base_adjustment = (historical.success_rating - 0.5) * 0.2;
let final_adjustment = base_adjustment + complexity_bonus;
template.confidence = (template.confidence + final_adjustment).clamp(0.1, 1.0);
// Add patterns from successful approaches
if historical.success_rating > 0.7 {
for pattern in &historical.patterns_used {
if !template.patterns.contains(pattern) {
template.patterns.push(pattern.clone());
}
}
}
}
}
}
}
}
fn calculate_approach_similarity(&self, approach1: &str, approach2: &str) -> f64 {
// Simple similarity calculation based on common words
let approach1_lower = approach1.to_lowercase();
let approach2_lower = approach2.to_lowercase();
let words1: HashSet<&str> = approach1_lower.split_whitespace().collect();
let words2: HashSet<&str> = approach2_lower.split_whitespace().collect();
let intersection = words1.intersection(&words2).count();
let union = words1.union(&words2).count();
if union == 0 { 0.0 } else { intersection as f64 / union as f64 }
}
fn recalculate_template_confidence(&mut self) {
// Recalculate base confidence based on available patterns
// Implementation depends on specific requirements
}
fn analyze_codebase_context(&self, concepts: &[SemanticConcept]) -> ProblemContext {
let mut technologies = HashSet::new();
let mut patterns = HashSet::new();
// Extract technologies and patterns from file paths and names
for concept in concepts {
if concept.file_path.contains("test") {
technologies.insert("testing".to_string());
}
if concept.file_path.contains("api") {
patterns.insert("api".to_string());
}
if concept.name.contains("Service") {
patterns.insert("service_layer".to_string());
}
if concept.name.contains("Repository") {
patterns.insert("repository".to_string());
}
}
let scale = if concepts.len() > 100 {
"large"
} else if concepts.len() > 50 {
"medium"
} else {
"small"
};
// Use historical approaches to inform context decisions
let mut maintainability = "high".to_string();
let mut domain = "existing_codebase".to_string();
let mut timeline = "ongoing".to_string();
// Learn from successful historical approaches with similar complexity
for historical in &self.historical_approaches {
if historical.success_rating > 0.7 { // Only learn from successful approaches
if let Some(hist_domain) = historical.context.get("domain") {
domain = hist_domain.clone();
}
if let Some(hist_maint) = historical.context.get("maintainability") {
maintainability = hist_maint.clone();
}
if let Some(hist_timeline) = historical.context.get("timeline") {
timeline = hist_timeline.clone();
}
}
}
ProblemContext {
domain,
scale: scale.to_string(),
performance_requirements: "standard".to_string(),
maintainability_requirements: maintainability,
team_size: "medium".to_string(),
timeline,
existing_patterns: patterns.into_iter().collect(),
technologies: technologies.into_iter().collect(),
}
}
fn identify_existing_patterns(&self, concepts: &[SemanticConcept]) -> Vec<String> {
let mut patterns = Vec::new();
// Identify patterns based on concept analysis
let has_controllers = concepts.iter().any(|c| c.name.contains("Controller"));
let has_services = concepts.iter().any(|c| c.name.contains("Service"));
let has_repositories = concepts.iter().any(|c| c.name.contains("Repository"));
if has_controllers && has_services {
patterns.push("mvc".to_string());
}
if has_repositories {
patterns.push("repository_pattern".to_string());
}
if has_services {
patterns.push("service_layer".to_string());
}
| rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | true |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/patterns/naming.rs | rust-core/src/patterns/naming.rs | //! Naming pattern analysis and recognition
#[cfg(feature = "napi-bindings")]
use napi_derive::napi;
use crate::patterns::types::{Pattern, PatternExample, NamingPattern, PatternExtractor};
use crate::types::{ParseError, LineRange, SemanticConcept};
use std::collections::HashMap;
use walkdir::WalkDir;
use std::fs;
use regex::Regex;
/// Analyzer for detecting and learning naming conventions
#[cfg_attr(feature = "napi-bindings", napi)]
pub struct NamingPatternAnalyzer {
patterns: HashMap<String, NamingPattern>,
naming_rules: HashMap<String, Vec<NamingRule>>,
}
#[derive(Debug, Clone)]
struct NamingRule {
rule_type: String,
pattern: String,
confidence_weight: f64,
}
#[cfg_attr(feature = "napi-bindings", napi)]
impl NamingPatternAnalyzer {
#[cfg_attr(feature = "napi-bindings", napi(constructor))]
pub fn new() -> Self {
let mut analyzer = NamingPatternAnalyzer {
patterns: HashMap::new(),
naming_rules: HashMap::new(),
};
analyzer.initialize_rules();
analyzer
}
/// Initialize common naming pattern rules
fn initialize_rules(&mut self) {
// JavaScript/TypeScript naming rules
let js_rules = vec![
NamingRule {
rule_type: "camelCase".to_string(),
pattern: r"^[a-z][a-zA-Z0-9]*$".to_string(),
confidence_weight: 0.9,
},
NamingRule {
rule_type: "PascalCase".to_string(),
pattern: r"^[A-Z][a-zA-Z0-9]*$".to_string(),
confidence_weight: 0.9,
},
NamingRule {
rule_type: "CONSTANT_CASE".to_string(),
pattern: r"^[A-Z][A-Z0-9_]*$".to_string(),
confidence_weight: 0.8,
},
];
self.naming_rules.insert("javascript".to_string(), js_rules.clone());
self.naming_rules.insert("typescript".to_string(), js_rules);
// Rust naming rules
let rust_rules = vec![
NamingRule {
rule_type: "snake_case".to_string(),
pattern: r"^[a-z][a-z0-9_]*$".to_string(),
confidence_weight: 0.9,
},
NamingRule {
rule_type: "PascalCase".to_string(),
pattern: r"^[A-Z][a-zA-Z0-9]*$".to_string(),
confidence_weight: 0.9,
},
NamingRule {
rule_type: "SCREAMING_SNAKE_CASE".to_string(),
pattern: r"^[A-Z][A-Z0-9_]*$".to_string(),
confidence_weight: 0.8,
},
];
self.naming_rules.insert("rust".to_string(), rust_rules);
// Python naming rules
let python_rules = vec![
NamingRule {
rule_type: "snake_case".to_string(),
pattern: r"^[a-z][a-z0-9_]*$".to_string(),
confidence_weight: 0.9,
},
NamingRule {
rule_type: "PascalCase".to_string(),
pattern: r"^[A-Z][a-zA-Z0-9]*$".to_string(),
confidence_weight: 0.8,
},
NamingRule {
rule_type: "CONSTANT_CASE".to_string(),
pattern: r"^[A-Z][A-Z0-9_]*$".to_string(),
confidence_weight: 0.8,
},
];
self.naming_rules.insert("python".to_string(), python_rules);
}
/// Analyze naming patterns from semantic concepts
pub fn analyze_concepts(&mut self, concepts: &[SemanticConcept], language: &str) -> Result<Vec<Pattern>, ParseError> {
let mut detected_patterns: HashMap<String, (u32, Vec<PatternExample>)> = HashMap::new();
// Get naming rules for the language
let rules = self.naming_rules.get(language).cloned().unwrap_or_else(|| {
// Default to common patterns
vec![
NamingRule {
rule_type: "mixed".to_string(),
pattern: r".*".to_string(),
confidence_weight: 0.3,
},
]
});
// Analyze each concept's name
for concept in concepts {
let name = &concept.name;
for rule in &rules {
if let Ok(regex) = Regex::new(&rule.pattern) {
if regex.is_match(name) {
let pattern_key = format!("{}_{}", rule.rule_type, self.get_context_type(&concept.concept_type));
let example = PatternExample {
code: format!("{} {}", concept.concept_type, name),
file_path: concept.file_path.clone(),
line_range: concept.line_range.clone(),
};
let entry = detected_patterns.entry(pattern_key.clone()).or_insert((0, Vec::new()));
entry.0 += 1;
entry.1.push(example);
// Update internal naming pattern storage
let naming_pattern = NamingPattern {
pattern_type: rule.rule_type.clone(),
frequency: entry.0,
contexts: vec![self.get_context_type(&concept.concept_type)],
confidence: rule.confidence_weight,
};
self.patterns.insert(pattern_key, naming_pattern);
break;
}
}
}
}
// Convert to Pattern objects
let mut patterns = Vec::new();
for (pattern_key, (frequency, examples)) in detected_patterns {
if let Some(naming_pattern) = self.patterns.get(&pattern_key) {
let confidence = self.calculate_confidence(frequency, examples.len(), naming_pattern.confidence);
patterns.push(Pattern {
id: format!("naming_{}", pattern_key),
pattern_type: "naming".to_string(),
description: format!(
"{} naming pattern for {} (used {} times)",
naming_pattern.pattern_type,
naming_pattern.contexts.join(", "),
frequency
),
frequency,
confidence,
examples,
contexts: vec![language.to_string()],
});
}
}
Ok(patterns)
}
/// Detect violations of established naming patterns
pub fn detect_violations(&self, concepts: &[SemanticConcept], language: &str) -> Vec<String> {
let mut violations = Vec::new();
// Get dominant patterns for this language/context
let dominant_patterns = self.get_dominant_patterns(language);
for concept in concepts {
let context = self.get_context_type(&concept.concept_type);
let expected_pattern = dominant_patterns.get(&context);
if let Some(pattern) = expected_pattern {
if !self.matches_pattern(&concept.name, &pattern.pattern_type) {
violations.push(format!(
"Naming violation in {}: '{}' should follow {} pattern (found in {}:{})",
concept.file_path,
concept.name,
pattern.pattern_type,
concept.file_path,
concept.line_range.start
));
}
}
}
violations
}
/// Generate naming recommendations based on learned patterns
pub fn generate_recommendations(&self, language: &str) -> Vec<String> {
let mut recommendations = Vec::new();
let dominant_patterns = self.get_dominant_patterns(language);
for (context, pattern) in dominant_patterns {
if pattern.confidence > 0.7 {
recommendations.push(format!(
"Use {} for {} names (confidence: {:.2})",
pattern.pattern_type,
context,
pattern.confidence
));
}
}
if recommendations.is_empty() {
recommendations.push("Consider establishing consistent naming conventions".to_string());
}
recommendations
}
/// Learn naming patterns from file changes
pub fn learn_from_changes(&mut self, old_code: &str, new_code: &str, language: &str) -> Result<Vec<Pattern>, ParseError> {
// This is a simplified implementation - in practice you'd use AST diffing
let old_names = self.extract_names_from_code(old_code, language);
let new_names = self.extract_names_from_code(new_code, language);
// Find newly introduced names
let mut new_patterns = Vec::new();
for name in &new_names {
if !old_names.contains(name) {
if let Some(pattern_type) = self.classify_name(name, language) {
// Update frequency and create pattern
let pattern_key = format!("{}_{}", pattern_type, "unknown");
let entry = self.patterns.entry(pattern_key.clone()).or_insert(NamingPattern {
pattern_type: pattern_type.clone(),
frequency: 0,
contexts: vec!["unknown".to_string()],
confidence: 0.5,
});
entry.frequency += 1;
new_patterns.push(Pattern {
id: format!("naming_{}", pattern_key),
pattern_type: "naming".to_string(),
description: format!("Detected {} pattern", pattern_type),
frequency: entry.frequency,
confidence: entry.confidence,
examples: vec![],
contexts: vec![language.to_string()],
});
}
}
}
Ok(new_patterns)
}
/// Get the dominant patterns for a language
fn get_dominant_patterns(&self, language: &str) -> HashMap<String, &NamingPattern> {
let mut dominant: HashMap<String, &NamingPattern> = HashMap::new();
for (key, pattern) in &self.patterns {
if key.contains(language) || pattern.contexts.contains(&language.to_string()) {
let parts: Vec<&str> = key.split('_').collect();
if parts.len() >= 2 {
let context = parts[parts.len() - 1];
// Use the pattern with highest confidence for each context
match dominant.get(context) {
Some(existing) if existing.confidence < pattern.confidence => {
dominant.insert(context.to_string(), pattern);
}
None => {
dominant.insert(context.to_string(), pattern);
}
_ => {}
}
}
}
}
dominant
}
/// Check if a name matches a pattern type
fn matches_pattern(&self, name: &str, pattern_type: &str) -> bool {
match pattern_type {
"camelCase" => {
let regex = Regex::new(r"^[a-z][a-zA-Z0-9]*$").unwrap();
regex.is_match(name)
}
"PascalCase" => {
let regex = Regex::new(r"^[A-Z][a-zA-Z0-9]*$").unwrap();
regex.is_match(name)
}
"snake_case" => {
let regex = Regex::new(r"^[a-z][a-z0-9_]*$").unwrap();
regex.is_match(name)
}
"CONSTANT_CASE" | "SCREAMING_SNAKE_CASE" => {
let regex = Regex::new(r"^[A-Z][A-Z0-9_]*$").unwrap();
regex.is_match(name)
}
_ => true, // Unknown patterns are considered matches
}
}
/// Get context type from concept type
fn get_context_type(&self, concept_type: &str) -> String {
match concept_type {
"class" | "interface" | "struct" => "type".to_string(),
"function" | "method" => "function".to_string(),
"variable" | "field" => "variable".to_string(),
"constant" => "constant".to_string(),
_ => "unknown".to_string(),
}
}
/// Calculate confidence score for a pattern
fn calculate_confidence(&self, frequency: u32, examples_count: usize, base_confidence: f64) -> f64 {
let frequency_boost = (frequency as f64).log10().min(0.3);
let examples_boost = (examples_count as f64 / 10.0).min(0.2);
(base_confidence + frequency_boost + examples_boost).min(1.0)
}
/// Extract names from code (simplified implementation)
fn extract_names_from_code(&self, code: &str, language: &str) -> Vec<String> {
let mut names = Vec::new();
match language {
"javascript" | "typescript" => {
// Simple regex-based extraction for demo
let patterns = [
r"function\s+([a-zA-Z_][a-zA-Z0-9_]*)",
r"const\s+([a-zA-Z_][a-zA-Z0-9_]*)",
r"let\s+([a-zA-Z_][a-zA-Z0-9_]*)",
r"var\s+([a-zA-Z_][a-zA-Z0-9_]*)",
r"class\s+([a-zA-Z_][a-zA-Z0-9_]*)",
];
for pattern_str in &patterns {
if let Ok(regex) = Regex::new(pattern_str) {
for captures in regex.captures_iter(code) {
if let Some(name) = captures.get(1) {
names.push(name.as_str().to_string());
}
}
}
}
}
"rust" => {
let patterns = [
r"fn\s+([a-zA-Z_][a-zA-Z0-9_]*)",
r"struct\s+([a-zA-Z_][a-zA-Z0-9_]*)",
r"enum\s+([a-zA-Z_][a-zA-Z0-9_]*)",
r"let\s+([a-zA-Z_][a-zA-Z0-9_]*)",
r"const\s+([A-Z_][A-Z0-9_]*)",
];
for pattern_str in &patterns {
if let Ok(regex) = Regex::new(pattern_str) {
for captures in regex.captures_iter(code) {
if let Some(name) = captures.get(1) {
names.push(name.as_str().to_string());
}
}
}
}
}
_ => {} // Add more languages as needed
}
names
}
/// Classify a name into a pattern type
fn classify_name(&self, name: &str, language: &str) -> Option<String> {
if let Some(rules) = self.naming_rules.get(language) {
for rule in rules {
if let Ok(regex) = Regex::new(&rule.pattern) {
if regex.is_match(name) {
return Some(rule.rule_type.clone());
}
}
}
}
None
}
}
impl PatternExtractor for NamingPatternAnalyzer {
fn extract_patterns(&self, path: &str) -> Result<Vec<Pattern>, ParseError> {
let mut all_patterns = Vec::new();
for entry in WalkDir::new(path).into_iter().filter_map(|e| e.ok()) {
if entry.file_type().is_file() {
let file_path = entry.path();
if let Some(extension) = file_path.extension().and_then(|s| s.to_str()) {
let language = match extension.to_lowercase().as_str() {
"js" | "jsx" => "javascript",
"ts" | "tsx" => "typescript",
"rs" => "rust",
"py" => "python",
_ => continue,
};
if let Ok(content) = fs::read_to_string(file_path) {
let names = self.extract_names_from_code(&content, language);
for name in names {
if let Some(pattern_type) = self.classify_name(&name, language) {
all_patterns.push(Pattern {
id: format!("naming_{}_{}", pattern_type, name),
pattern_type: "naming".to_string(),
description: format!("{} naming pattern", pattern_type),
frequency: 1,
confidence: 0.7,
examples: vec![PatternExample {
code: name,
file_path: file_path.to_string_lossy().to_string(),
line_range: LineRange { start: 1, end: 1 },
}],
contexts: vec![language.to_string()],
});
}
}
}
}
}
}
Ok(all_patterns)
}
}
impl Default for NamingPatternAnalyzer {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::types::SemanticConcept;
use std::collections::HashMap;
fn create_test_concept(name: &str, concept_type: &str, file_path: &str) -> SemanticConcept {
SemanticConcept {
id: format!("test_{}", name),
name: name.to_string(),
concept_type: concept_type.to_string(),
confidence: 0.8,
file_path: file_path.to_string(),
line_range: LineRange { start: 1, end: 1 },
relationships: HashMap::new(),
metadata: HashMap::new(),
}
}
#[test]
fn test_naming_pattern_analyzer_creation() {
let analyzer = NamingPatternAnalyzer::new();
assert!(!analyzer.patterns.is_empty() || !analyzer.naming_rules.is_empty());
}
#[test]
fn test_camel_case_detection() {
let mut analyzer = NamingPatternAnalyzer::new();
let concepts = vec![
create_test_concept("getUserName", "function", "test.js"),
create_test_concept("userName", "variable", "test.js"),
];
let patterns = analyzer.analyze_concepts(&concepts, "javascript").unwrap();
assert!(!patterns.is_empty());
let camel_case_patterns: Vec<_> = patterns.iter()
.filter(|p| p.description.contains("camelCase"))
.collect();
assert!(!camel_case_patterns.is_empty());
}
#[test]
fn test_snake_case_detection() {
let mut analyzer = NamingPatternAnalyzer::new();
let concepts = vec![
create_test_concept("get_user_name", "function", "test.rs"),
create_test_concept("user_name", "variable", "test.rs"),
];
let patterns = analyzer.analyze_concepts(&concepts, "rust").unwrap();
assert!(!patterns.is_empty());
let snake_case_patterns: Vec<_> = patterns.iter()
.filter(|p| p.description.contains("snake_case"))
.collect();
assert!(!snake_case_patterns.is_empty());
}
#[test]
fn test_pascal_case_detection() {
let mut analyzer = NamingPatternAnalyzer::new();
let concepts = vec![
create_test_concept("UserService", "class", "test.ts"),
create_test_concept("ApiClient", "class", "test.ts"),
];
let patterns = analyzer.analyze_concepts(&concepts, "typescript").unwrap();
assert!(!patterns.is_empty());
let pascal_case_patterns: Vec<_> = patterns.iter()
.filter(|p| p.description.contains("PascalCase"))
.collect();
assert!(!pascal_case_patterns.is_empty());
}
#[test]
fn test_violation_detection() {
let mut analyzer = NamingPatternAnalyzer::new();
// First establish a pattern
let good_concepts = vec![
create_test_concept("getUserName", "function", "test.js"),
create_test_concept("setUserName", "function", "test.js"),
create_test_concept("userName", "variable", "test.js"),
];
let _ = analyzer.analyze_concepts(&good_concepts, "javascript").unwrap();
// Then check for violations
let bad_concepts = vec![
create_test_concept("get_user_age", "function", "test.js"), // snake_case in JS
];
let violations = analyzer.detect_violations(&bad_concepts, "javascript");
assert!(!violations.is_empty());
}
#[test]
fn test_recommendations_generation() {
let mut analyzer = NamingPatternAnalyzer::new();
let concepts = vec![
create_test_concept("getUserName", "function", "test.js"),
create_test_concept("setUserName", "function", "test.js"),
create_test_concept("userName", "variable", "test.js"),
];
let _ = analyzer.analyze_concepts(&concepts, "javascript").unwrap();
let recommendations = analyzer.generate_recommendations("javascript");
assert!(!recommendations.is_empty());
}
#[test]
fn test_context_type_mapping() {
let analyzer = NamingPatternAnalyzer::new();
assert_eq!(analyzer.get_context_type("class"), "type");
assert_eq!(analyzer.get_context_type("function"), "function");
assert_eq!(analyzer.get_context_type("variable"), "variable");
assert_eq!(analyzer.get_context_type("constant"), "constant");
assert_eq!(analyzer.get_context_type("unknown"), "unknown");
}
#[test]
fn test_pattern_matching() {
let analyzer = NamingPatternAnalyzer::new();
assert!(analyzer.matches_pattern("camelCase", "camelCase"));
assert!(analyzer.matches_pattern("PascalCase", "PascalCase"));
assert!(analyzer.matches_pattern("snake_case", "snake_case"));
assert!(analyzer.matches_pattern("CONSTANT_CASE", "CONSTANT_CASE"));
assert!(!analyzer.matches_pattern("PascalCase", "camelCase"));
assert!(!analyzer.matches_pattern("snake_case", "PascalCase"));
}
#[test]
fn test_name_extraction() {
let analyzer = NamingPatternAnalyzer::new();
let js_code = "function getUserName() { const userName = 'test'; }";
let names = analyzer.extract_names_from_code(js_code, "javascript");
assert!(names.contains(&"getUserName".to_string()));
assert!(names.contains(&"userName".to_string()));
let rust_code = "fn get_user_name() { let user_name = String::new(); }";
let names = analyzer.extract_names_from_code(rust_code, "rust");
assert!(names.contains(&"get_user_name".to_string()));
assert!(names.contains(&"user_name".to_string()));
}
#[test]
fn test_name_classification() {
let analyzer = NamingPatternAnalyzer::new();
assert_eq!(analyzer.classify_name("camelCase", "javascript"), Some("camelCase".to_string()));
assert_eq!(analyzer.classify_name("PascalCase", "javascript"), Some("PascalCase".to_string()));
assert_eq!(analyzer.classify_name("snake_case", "rust"), Some("snake_case".to_string()));
assert_eq!(analyzer.classify_name("CONSTANT_CASE", "rust"), Some("SCREAMING_SNAKE_CASE".to_string()));
}
} | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/patterns/types.rs | rust-core/src/patterns/types.rs | //! Core type definitions for pattern learning and analysis
#[cfg(feature = "napi-bindings")]
use napi_derive::napi;
use serde::{Deserialize, Serialize};
use crate::types::{LineRange, ParseError};
// Simple error type for when napi is not available (from original implementation)
#[derive(Debug)]
pub struct SimpleError {
pub message: String,
}
impl SimpleError {
pub fn from_reason<S: Into<String>>(message: S) -> Self {
Self {
message: message.into(),
}
}
}
impl std::fmt::Display for SimpleError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.message)
}
}
impl std::error::Error for SimpleError {}
// Conditional type aliases - use proper napi::Result when available (from original implementation)
#[cfg(feature = "napi-bindings")]
pub type ApiResult<T> = napi::Result<T>;
#[cfg(not(feature = "napi-bindings"))]
pub type ApiResult<T> = Result<T, SimpleError>;
/// Core pattern representation
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "napi-bindings", napi(object))]
pub struct Pattern {
pub id: String,
pub pattern_type: String,
pub description: String,
pub frequency: u32,
pub confidence: f64,
pub examples: Vec<PatternExample>,
pub contexts: Vec<String>,
}
/// Example of a pattern occurrence
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "napi-bindings", napi(object))]
pub struct PatternExample {
pub code: String,
pub file_path: String,
pub line_range: LineRange,
}
/// Result of pattern analysis
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "napi-bindings", napi(object))]
pub struct PatternAnalysisResult {
pub detected: Vec<String>,
pub violations: Vec<String>,
pub recommendations: Vec<String>,
pub learned: Option<Vec<Pattern>>,
}
/// Prediction of coding approach based on patterns
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "napi-bindings", napi(object))]
pub struct ApproachPrediction {
pub approach: String,
pub confidence: f64,
pub reasoning: String,
pub patterns: Vec<String>,
pub complexity: String,
}
/// Naming pattern information
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct NamingPattern {
pub pattern_type: String, // camelCase, PascalCase, snake_case, etc.
pub frequency: u32,
pub contexts: Vec<String>, // function, class, variable, constant
pub confidence: f64,
}
/// Structural pattern information
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StructuralPattern {
pub pattern_type: String, // MVC, layered, modular, etc.
pub frequency: u32,
pub characteristics: Vec<String>,
pub confidence: f64,
}
/// Implementation pattern information
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ImplementationPattern {
pub pattern_type: String, // singleton, factory, observer, etc.
pub frequency: u32,
pub code_signatures: Vec<String>,
pub confidence: f64,
}
/// Problem complexity levels for approach prediction
#[derive(Debug, Clone, PartialEq)]
pub enum ProblemComplexity {
Low,
Medium,
High,
}
impl std::fmt::Display for ProblemComplexity {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ProblemComplexity::Low => write!(f, "low"),
ProblemComplexity::Medium => write!(f, "medium"),
ProblemComplexity::High => write!(f, "high"),
}
}
}
/// Generated approach recommendation
#[derive(Debug, Clone)]
pub struct GeneratedApproach {
pub description: String,
pub confidence: f64,
pub reasoning: String,
}
/// Trait for components that can extract patterns
pub trait PatternExtractor {
fn extract_patterns(&self, path: &str) -> Result<Vec<Pattern>, ParseError>;
}
/// Trait for components that can analyze patterns
pub trait PatternAnalyzer {
fn analyze_patterns(&self, patterns: &[Pattern]) -> Result<PatternAnalysisResult, ParseError>;
}
/// Trait for components that can learn from data
pub trait PatternLearner {
fn learn_from_data(&mut self, data: &str) -> Result<Vec<Pattern>, ParseError>;
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_pattern_creation() {
let pattern = Pattern {
id: "test_pattern".to_string(),
pattern_type: "naming".to_string(),
description: "Test pattern".to_string(),
frequency: 5,
confidence: 0.8,
examples: vec![],
contexts: vec!["test".to_string()],
};
assert_eq!(pattern.id, "test_pattern");
assert_eq!(pattern.pattern_type, "naming");
assert_eq!(pattern.frequency, 5);
assert_eq!(pattern.confidence, 0.8);
}
#[test]
fn test_pattern_example() {
let example = PatternExample {
code: "function test() {}".to_string(),
file_path: "test.js".to_string(),
line_range: LineRange { start: 1, end: 1 },
};
assert_eq!(example.code, "function test() {}");
assert_eq!(example.file_path, "test.js");
assert_eq!(example.line_range.start, 1);
}
#[test]
fn test_problem_complexity_display() {
assert_eq!(format!("{}", ProblemComplexity::Low), "low");
assert_eq!(format!("{}", ProblemComplexity::Medium), "medium");
assert_eq!(format!("{}", ProblemComplexity::High), "high");
}
#[test]
fn test_naming_pattern() {
let naming = NamingPattern {
pattern_type: "camelCase".to_string(),
frequency: 10,
contexts: vec!["function".to_string()],
confidence: 0.9,
};
assert_eq!(naming.pattern_type, "camelCase");
assert_eq!(naming.frequency, 10);
assert_eq!(naming.confidence, 0.9);
}
#[test]
fn test_structural_pattern() {
let structural = StructuralPattern {
pattern_type: "MVC".to_string(),
frequency: 5,
characteristics: vec!["model".to_string(), "view".to_string(), "controller".to_string()],
confidence: 0.85,
};
assert_eq!(structural.pattern_type, "MVC");
assert_eq!(structural.frequency, 5);
assert_eq!(structural.characteristics.len(), 3);
}
#[test]
fn test_implementation_pattern() {
let implementation = ImplementationPattern {
pattern_type: "singleton".to_string(),
frequency: 3,
code_signatures: vec!["getInstance()".to_string()],
confidence: 0.95,
};
assert_eq!(implementation.pattern_type, "singleton");
assert_eq!(implementation.frequency, 3);
assert!(implementation.code_signatures.contains(&"getInstance()".to_string()));
}
} | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/patterns/learning.rs | rust-core/src/patterns/learning.rs | //! Core learning algorithms for pattern discovery and analysis
#[cfg(feature = "napi-bindings")]
use napi_derive::napi;
use crate::patterns::implementation::ImplementationPatternAnalyzer;
use crate::patterns::naming::NamingPatternAnalyzer;
use crate::patterns::prediction::ApproachPredictor;
use crate::patterns::structural::StructuralPatternAnalyzer;
use crate::patterns::types::{
Pattern, PatternAnalysisResult, PatternLearner as PatternLearnerTrait,
};
use crate::types::{ParseError, SemanticConcept};
use serde_json::{from_str, Value};
use std::collections::{HashMap, HashSet};
use std::fs;
use walkdir::WalkDir;
/// Core learning engine that orchestrates pattern discovery across all domains
#[cfg_attr(feature = "napi-bindings", napi)]
pub struct PatternLearningEngine {
naming_analyzer: NamingPatternAnalyzer,
structural_analyzer: StructuralPatternAnalyzer,
implementation_analyzer: ImplementationPatternAnalyzer,
approach_predictor: ApproachPredictor,
learned_patterns: HashMap<String, Pattern>,
learning_metrics: LearningMetrics,
confidence_threshold: f64,
}
#[derive(Debug, Clone)]
pub struct LearningMetrics {
pub total_patterns_learned: usize,
pub confidence_distribution: HashMap<String, usize>, // confidence ranges
pub pattern_type_counts: HashMap<String, usize>,
pub learning_accuracy: f64,
pub last_learning_timestamp: Option<String>,
}
#[derive(Debug, Clone)]
struct LearningSession {
session_id: String,
patterns_discovered: Vec<Pattern>,
analysis_duration_ms: u64,
files_analyzed: usize,
concepts_analyzed: usize,
}
#[derive(Debug, Clone)]
pub struct PatternEvolution {
pub pattern_id: String,
pub confidence_history: Vec<(String, f64)>, // timestamp, confidence
pub frequency_history: Vec<(String, u32)>, // timestamp, frequency
pub evolution_trend: EvolutionTrend,
}
#[derive(Debug, Clone)]
pub enum EvolutionTrend {
Improving, // Confidence/frequency increasing
Stable, // Confidence/frequency stable
Declining, // Confidence/frequency decreasing
Emerging, // New pattern
Deprecated, // Pattern no longer seen
}
#[cfg_attr(feature = "napi-bindings", napi)]
impl PatternLearningEngine {
#[cfg_attr(feature = "napi-bindings", napi(constructor))]
pub fn new() -> Self {
PatternLearningEngine {
naming_analyzer: NamingPatternAnalyzer::new(),
structural_analyzer: StructuralPatternAnalyzer::new(),
implementation_analyzer: ImplementationPatternAnalyzer::new(),
approach_predictor: ApproachPredictor::new(),
learned_patterns: HashMap::new(),
learning_metrics: LearningMetrics {
total_patterns_learned: 0,
confidence_distribution: HashMap::new(),
pattern_type_counts: HashMap::new(),
learning_accuracy: 0.0,
last_learning_timestamp: None,
},
confidence_threshold: 0.5,
}
}
/// Learn patterns from an entire codebase
///
/// # Safety
/// This function is marked unsafe for NAPI compatibility. It performs file system operations
/// and pattern analysis that are inherently safe but marked unsafe for JavaScript interop.
#[cfg_attr(feature = "napi-bindings", napi)]
pub async unsafe fn learn_from_codebase(
&mut self,
path: String,
) -> Result<Vec<Pattern>, ParseError> {
let session_start = std::time::Instant::now();
let mut session = LearningSession {
session_id: format!(
"session_{}",
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_secs()
),
patterns_discovered: Vec::new(),
analysis_duration_ms: 0,
files_analyzed: 0,
concepts_analyzed: 0,
};
// Phase 1: Collect semantic concepts from the codebase
let concepts = self.extract_semantic_concepts(&path).await?;
session.concepts_analyzed = concepts.len();
// Count unique files analyzed
let unique_files: std::collections::HashSet<_> =
concepts.iter().map(|c| &c.file_path).collect();
session.files_analyzed = unique_files.len();
// Phase 2: Learn naming patterns
let naming_patterns = self.learn_naming_patterns(&concepts, &path).await?;
session.patterns_discovered.extend(naming_patterns);
// Phase 3: Learn structural patterns
let structural_patterns = self.learn_structural_patterns(&concepts, &path).await?;
session.patterns_discovered.extend(structural_patterns);
// Phase 4: Learn implementation patterns
let implementation_patterns = self.learn_implementation_patterns(&concepts, &path).await?;
session.patterns_discovered.extend(implementation_patterns);
// Phase 5: Update approach predictor with new patterns
self.approach_predictor
.update_patterns(session.patterns_discovered.clone());
// Phase 6: Consolidate and validate patterns
let patterns_for_validation = session.patterns_discovered.clone();
let validated_patterns = self.validate_and_consolidate_patterns(patterns_for_validation)?;
// Phase 7: Update learning metrics
session.analysis_duration_ms = session_start.elapsed().as_millis() as u64;
self.update_learning_metrics(&validated_patterns, &session);
// Store learned patterns
for pattern in &validated_patterns {
self.learned_patterns
.insert(pattern.id.clone(), pattern.clone());
}
Ok(validated_patterns)
}
/// Learn from file changes (incremental learning)
///
/// # Safety
/// This function is marked unsafe for NAPI compatibility. It performs pattern analysis
/// operations that are inherently safe but marked unsafe for JavaScript interop.
#[cfg_attr(feature = "napi-bindings", napi)]
pub async unsafe fn learn_from_changes(
&mut self,
old_content: String,
new_content: String,
file_path: String,
language: String,
) -> Result<Vec<Pattern>, ParseError> {
let mut new_patterns = Vec::new();
// Learn naming pattern changes
let naming_changes =
self.naming_analyzer
.learn_from_changes(&old_content, &new_content, &language)?;
new_patterns.extend(naming_changes);
// Learn structural changes (simplified - would need AST diff in practice)
if self.has_structural_changes(&old_content, &new_content) {
let structural_changes = self
.learn_structural_changes(&old_content, &new_content, &file_path)
.await?;
new_patterns.extend(structural_changes);
}
// Update internal state
for pattern in &new_patterns {
self.learned_patterns
.insert(pattern.id.clone(), pattern.clone());
}
// Use helper methods for additional learning
let change_type = self.detect_change_type(&old_content, &new_content);
self.learn_from_change_type(&change_type).await?;
self.learn_from_file_context(&file_path).await?;
// Boost confidence for related patterns when we find successful patterns
if !new_patterns.is_empty() {
let primary_concept = self.extract_primary_concept(&new_patterns);
self.boost_related_pattern_confidence(&primary_concept, 0.05)
.await?;
}
// Update learning metrics
self.update_incremental_metrics(&new_patterns);
Ok(new_patterns)
}
/// Learn from analysis data (JSON format)
///
/// # Safety
/// This function is marked unsafe for NAPI compatibility. It performs data parsing and
/// learning operations that are inherently safe but marked unsafe for JavaScript interop.
#[cfg_attr(feature = "napi-bindings", napi)]
pub async unsafe fn learn_from_analysis(
&mut self,
analysis_data: String,
) -> Result<bool, ParseError> {
let data: Value = from_str(&analysis_data).map_err(|e| {
ParseError::from_reason(format!("Failed to parse analysis data: {}", e))
})?;
// Extract concepts from analysis data
let concepts = self.parse_concepts_from_analysis(&data)?;
// Also try to parse any existing patterns in the data
if let Some(patterns_array) = data.get("patterns").and_then(|p| p.as_array()) {
for pattern_json in patterns_array {
if let Ok(pattern) = self.parse_pattern_from_json(pattern_json) {
self.learned_patterns.insert(pattern.id.clone(), pattern);
}
}
}
if !concepts.is_empty() {
// Learn patterns from the concepts
let naming_patterns = self
.naming_analyzer
.analyze_concepts(&concepts, "mixed")
.unwrap_or_default();
let mut implementation_patterns = self
.implementation_analyzer
.analyze_concepts(&concepts)
.unwrap_or_default();
let structural_patterns = self
.structural_analyzer
.analyze_concept_structures(&concepts)
.unwrap_or_default();
// Combine all patterns
let mut all_patterns = naming_patterns;
all_patterns.append(&mut implementation_patterns);
all_patterns.extend(structural_patterns);
// Store patterns that meet confidence threshold
let mut learned_count = 0;
for pattern in all_patterns {
if pattern.confidence >= self.confidence_threshold {
self.learned_patterns.insert(pattern.id.clone(), pattern);
learned_count += 1;
}
}
// Update predictor with historical approach data if available
if let Some(approaches) = data.get("approaches") {
if let Ok(approach_data) = serde_json::to_string(approaches) {
let _ = self
.approach_predictor
.learn_from_approaches(&approach_data);
}
}
// Update metrics
self.learning_metrics.total_patterns_learned += learned_count;
Ok(learned_count > 0)
} else {
Ok(false)
}
}
/// Get comprehensive analysis of learned patterns
#[cfg_attr(feature = "napi-bindings", napi)]
pub fn analyze_patterns(
&self,
concepts: Vec<SemanticConcept>,
) -> Result<PatternAnalysisResult, ParseError> {
let mut detected = Vec::new();
let mut violations = Vec::new();
let mut recommendations = Vec::new();
// Analyze with each specialized analyzer
// Naming analysis
let naming_violations = self.naming_analyzer.detect_violations(&concepts, "mixed");
violations.extend(naming_violations);
let naming_recommendations = self.naming_analyzer.generate_recommendations("mixed");
recommendations.extend(naming_recommendations);
// Structural analysis
let structural_violations = self
.structural_analyzer
.detect_structural_violations(&concepts);
violations.extend(structural_violations);
let structural_recommendations = self
.structural_analyzer
.generate_structural_recommendations(&concepts);
recommendations.extend(structural_recommendations);
// Implementation analysis
let implementation_violations = self.implementation_analyzer.detect_antipatterns(&concepts);
violations.extend(implementation_violations);
let implementation_recommendations = self
.implementation_analyzer
.generate_recommendations(&concepts);
recommendations.extend(implementation_recommendations);
// Detected patterns
for pattern in self.learned_patterns.values() {
detected.push(format!(
"{}: {} (confidence: {:.2})",
pattern.pattern_type, pattern.description, pattern.confidence
));
}
Ok(PatternAnalysisResult {
detected,
violations,
recommendations,
learned: Some(self.learned_patterns.values().cloned().collect()),
})
}
/// Predict best approach for a problem
#[cfg_attr(feature = "napi-bindings", napi)]
pub fn predict_approach(
&self,
problem_description: String,
context: Option<String>,
) -> Result<crate::patterns::types::ApproachPrediction, ParseError> {
self.approach_predictor
.predict_approach(problem_description, context)
}
/// Get learning metrics and statistics
pub fn get_learning_metrics(&self) -> &LearningMetrics {
&self.learning_metrics
}
/// Set confidence threshold for pattern acceptance
pub fn set_confidence_threshold(&mut self, threshold: f64) {
self.confidence_threshold = threshold.clamp(0.0, 1.0);
}
/// Get pattern evolution data
pub fn get_pattern_evolution(&self, pattern_id: &str) -> Option<PatternEvolution> {
// This would track pattern changes over time
// Simplified implementation for now
if self.learned_patterns.contains_key(pattern_id) {
Some(PatternEvolution {
pattern_id: pattern_id.to_string(),
confidence_history: Vec::new(),
frequency_history: Vec::new(),
evolution_trend: EvolutionTrend::Stable,
})
} else {
None
}
}
/// Get all learned patterns (for legacy compatibility)
pub fn get_learned_patterns(&self) -> Vec<Pattern> {
self.learned_patterns.values().cloned().collect()
}
/// Insert a pattern (for external use and testing)
pub fn insert_pattern(&mut self, id: String, pattern: Pattern) {
self.learned_patterns.insert(id, pattern);
}
/// Get a specific pattern by ID
pub fn get_pattern(&self, id: &str) -> Option<&Pattern> {
self.learned_patterns.get(id)
}
/// Check if a pattern exists
pub fn has_pattern(&self, id: &str) -> bool {
self.learned_patterns.contains_key(id)
}
/// Updates patterns based on file changes (from original implementation)
///
/// # Safety
/// This function uses unsafe because it needs to interact with the Node.js runtime
/// through N-API bindings. The caller must ensure the change data is valid JSON.
#[cfg_attr(feature = "napi-bindings", napi)]
pub async unsafe fn update_from_change(
&mut self,
change_data: String,
) -> Result<bool, ParseError> {
self.update_from_change_internal(change_data).await
}
/// Internal implementation for updating patterns from file changes (from original implementation)
pub async fn update_from_change_internal(
&mut self,
change_data: String,
) -> Result<bool, ParseError> {
// Parse the change data JSON
let change: Value = match from_str(&change_data) {
Ok(data) => data,
Err(e) => {
eprintln!("Failed to parse change data: {}", e);
return Ok(false);
}
};
let mut patterns_updated = false;
// Extract change information
let change_type = change
.get("type")
.and_then(|t| t.as_str())
.unwrap_or("unknown");
let file_path = change.get("path").and_then(|p| p.as_str());
let content = change.get("content").and_then(|c| c.as_str());
let language = change.get("language").and_then(|l| l.as_str());
// Update patterns based on change type
match change_type {
"add" | "create" => {
patterns_updated |= self
.handle_file_addition(file_path, content, language)
.await?;
}
"modify" | "change" => {
patterns_updated |= self
.handle_file_modification(file_path, content, language)
.await?;
}
"delete" | "remove" => {
patterns_updated |= self.handle_file_deletion(file_path).await?;
}
"rename" | "move" => {
patterns_updated |= self.handle_file_rename(file_path, &change).await?;
}
_ => {
// Handle unknown change types by treating as modification
patterns_updated |= self
.handle_file_modification(file_path, content, language)
.await?;
}
}
// Learn from the overall change pattern
patterns_updated |= self
.learn_from_change_pattern(change_type, file_path, language)
.await?;
// Update usage statistics for related patterns
if let (Some(path), Some(lang)) = (file_path, language) {
patterns_updated |= self.update_language_usage_patterns(path, lang).await?;
}
Ok(patterns_updated)
}
/// Helper method to update pattern frequency (from original implementation)
async fn update_pattern_frequency(
&mut self,
pattern_type: &str,
increment: u32,
) -> Result<bool, ParseError> {
if let Some(pattern) = self.learned_patterns.get_mut(pattern_type) {
pattern.frequency += increment;
// Adjust confidence based on increased usage
pattern.confidence = (pattern.confidence + 0.05).min(0.95);
Ok(true)
} else {
// Create a new pattern if it doesn't exist
let new_pattern = Pattern {
id: format!("learned_{}_{}", pattern_type, self.generate_pattern_id()),
pattern_type: pattern_type.to_string(),
description: format!("Pattern learned from analysis: {}", pattern_type),
frequency: increment,
confidence: 0.3, // Start with low confidence for new patterns
examples: vec![],
contexts: vec!["learned".to_string()],
};
self.learned_patterns
.insert(new_pattern.id.clone(), new_pattern);
Ok(true)
}
}
/// Helper method to boost confidence of patterns related to a concept (from original implementation)
async fn boost_related_pattern_confidence(
&mut self,
concept: &str,
boost: f64,
) -> Result<bool, ParseError> {
let mut updated = false;
for pattern in self.learned_patterns.values_mut() {
// Check if pattern is related to the concept
if pattern
.description
.to_lowercase()
.contains(&concept.to_lowercase())
|| pattern
.pattern_type
.to_lowercase()
.contains(&concept.to_lowercase())
|| pattern
.contexts
.iter()
.any(|c| c.to_lowercase().contains(&concept.to_lowercase()))
{
pattern.confidence = (pattern.confidence + boost).min(0.95);
updated = true;
}
}
Ok(updated)
}
/// Helper method to learn from change type (from original implementation)
async fn learn_from_change_type(&mut self, change_type: &str) -> Result<bool, ParseError> {
let pattern_type = format!("change_{}", change_type);
self.update_pattern_frequency(&pattern_type, 1).await
}
/// Helper method to learn from file context (from original implementation)
async fn learn_from_file_context(&mut self, file_path: &str) -> Result<bool, ParseError> {
let mut updated = false;
// Learn from file extension
if let Some(extension) = std::path::Path::new(file_path)
.extension()
.and_then(|s| s.to_str())
{
let pattern_type = format!("file_type_{}", extension);
updated |= self.update_pattern_frequency(&pattern_type, 1).await?;
}
// Learn from directory structure
if let Some(parent) = std::path::Path::new(file_path).parent() {
if let Some(dir_name) = parent.file_name().and_then(|s| s.to_str()) {
let pattern_type = format!("directory_{}", dir_name);
updated |= self.update_pattern_frequency(&pattern_type, 1).await?;
}
}
Ok(updated)
}
/// Helper method to parse pattern from JSON (from original implementation)
fn parse_pattern_from_json(&self, json: &Value) -> Result<Pattern, serde_json::Error> {
// Extract pattern fields from JSON
let id = json
.get("id")
.and_then(|v| v.as_str())
.unwrap_or(&format!("parsed_{}", self.generate_pattern_id()))
.to_string();
let pattern_type = json
.get("type")
.or_else(|| json.get("patternType"))
.and_then(|v| v.as_str())
.unwrap_or("unknown")
.to_string();
let description = json
.get("description")
.and_then(|v| v.as_str())
.unwrap_or("Pattern learned from analysis")
.to_string();
let frequency = json.get("frequency").and_then(|v| v.as_u64()).unwrap_or(1) as u32;
let confidence = json
.get("confidence")
.and_then(|v| v.as_f64())
.unwrap_or(0.5);
let contexts = json
.get("contexts")
.and_then(|v| v.as_array())
.map(|arr| {
arr.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect()
})
.unwrap_or_else(|| vec!["analysis".to_string()]);
// Parse examples if available
let examples = json
.get("examples")
.and_then(|v| v.as_array())
.map(|arr| {
arr.iter()
.filter_map(|ex| self.parse_example_from_json(ex))
.collect()
})
.unwrap_or_default();
Ok(Pattern {
id,
pattern_type,
description,
frequency,
confidence,
examples,
contexts,
})
}
/// Helper method to parse example from JSON (from original implementation)
fn parse_example_from_json(
&self,
json: &Value,
) -> Option<crate::patterns::types::PatternExample> {
let code = json.get("code")?.as_str()?.to_string();
let file_path = json
.get("filePath")
.or_else(|| json.get("file_path"))
.and_then(|v| v.as_str())
.unwrap_or("unknown")
.to_string();
let line_range =
if let Some(range) = json.get("lineRange").or_else(|| json.get("line_range")) {
crate::types::LineRange {
start: range.get("start").and_then(|v| v.as_u64()).unwrap_or(1) as u32,
end: range.get("end").and_then(|v| v.as_u64()).unwrap_or(1) as u32,
}
} else {
crate::types::LineRange { start: 1, end: 1 }
};
Some(crate::patterns::types::PatternExample {
code,
file_path,
line_range,
})
}
/// Generate unique pattern ID (from original implementation)
fn generate_pattern_id(&self) -> String {
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_millis()
.to_string()
}
fn detect_change_type(&self, old_content: &str, new_content: &str) -> String {
if old_content.len() > new_content.len() {
"deletion".to_string()
} else if old_content.len() < new_content.len() {
"addition".to_string()
} else {
"modification".to_string()
}
}
fn extract_primary_concept(&self, patterns: &[Pattern]) -> String {
patterns
.first()
.map(|p| p.pattern_type.clone())
.unwrap_or_else(|| "general".to_string())
}
/// Private helper methods
async fn extract_semantic_concepts(
&self,
path: &str,
) -> Result<Vec<SemanticConcept>, ParseError> {
let mut concepts = Vec::new();
let mut file_count = 0;
let start_time = std::time::Instant::now();
let timeout = std::time::Duration::from_secs(60); // 60 second timeout
for entry in WalkDir::new(path)
.max_depth(5) // Limit directory traversal depth
.into_iter()
.filter_map(|e| e.ok())
{
// Check timeout
if start_time.elapsed() > timeout {
eprintln!(
"Timeout reached during concept extraction after {} files",
file_count
);
break;
}
if entry.file_type().is_file() && file_count < 100 {
// Reduced limit for performance
let file_path = entry.path();
// Add proper file filtering
if !self.should_analyze_file(file_path) {
continue;
}
if let Some(extension) = file_path.extension().and_then(|s| s.to_str()) {
if self.is_supported_extension(extension) {
if let Ok(content) = fs::read_to_string(file_path) {
let file_concepts = self.extract_concepts_from_file(
&content,
file_path.to_string_lossy().as_ref(),
extension,
)?;
concepts.extend(file_concepts);
file_count += 1;
}
}
}
}
}
Ok(concepts)
}
fn extract_concepts_from_file(
&self,
content: &str,
file_path: &str,
extension: &str,
) -> Result<Vec<SemanticConcept>, ParseError> {
// This would use the semantic analyzer from the main codebase
// For now, return a simplified extraction
let mut concepts = Vec::new();
let language = match extension {
"js" | "jsx" => "javascript",
"ts" | "tsx" => "typescript",
"rs" => "rust",
"py" => "python",
"java" => "java",
_ => "unknown",
};
// Simple regex-based concept extraction (in practice, would use tree-sitter)
let lines: Vec<&str> = content.lines().collect();
for (line_num, line) in lines.iter().enumerate() {
if let Some(concept) =
self.extract_concept_from_line(line, file_path, line_num as u32 + 1, language)
{
concepts.push(concept);
}
}
Ok(concepts)
}
fn extract_concept_from_line(
&self,
line: &str,
file_path: &str,
line_num: u32,
language: &str,
) -> Option<SemanticConcept> {
let trimmed = line.trim();
// Function detection patterns
let function_patterns = match language {
"javascript" | "typescript" => vec![
r"function\s+(\w+)",
r"const\s+(\w+)\s*=.*=>",
r"(\w+)\s*:\s*\([^)]*\)\s*=>",
],
"rust" => vec![r"fn\s+(\w+)", r"pub\s+fn\s+(\w+)"],
"python" => vec![r"def\s+(\w+)"],
"java" => vec![r"public\s+.*\s+(\w+)\s*\(", r"private\s+.*\s+(\w+)\s*\("],
_ => vec![],
};
// Class detection patterns
let class_patterns = match language {
"javascript" | "typescript" => vec![r"class\s+(\w+)", r"interface\s+(\w+)"],
"rust" => vec![r"struct\s+(\w+)", r"enum\s+(\w+)", r"trait\s+(\w+)"],
"python" => vec![r"class\s+(\w+)"],
"java" => vec![r"class\s+(\w+)", r"interface\s+(\w+)"],
_ => vec![],
};
// Check for function patterns
for pattern in function_patterns {
if let Ok(regex) = regex::Regex::new(pattern) {
if let Some(captures) = regex.captures(trimmed) {
if let Some(name) = captures.get(1) {
return Some(SemanticConcept {
id: format!("{}_{}", file_path, name.as_str()),
name: name.as_str().to_string(),
concept_type: "function".to_string(),
confidence: 0.8,
file_path: file_path.to_string(),
line_range: crate::types::LineRange {
start: line_num,
end: line_num,
},
relationships: HashMap::new(),
metadata: HashMap::new(),
});
}
}
}
}
// Check for class patterns
for pattern in class_patterns {
if let Ok(regex) = regex::Regex::new(pattern) {
if let Some(captures) = regex.captures(trimmed) {
if let Some(name) = captures.get(1) {
return Some(SemanticConcept {
id: format!("{}_{}", file_path, name.as_str()),
name: name.as_str().to_string(),
concept_type: "class".to_string(),
confidence: 0.9,
file_path: file_path.to_string(),
line_range: crate::types::LineRange {
start: line_num,
end: line_num,
},
relationships: HashMap::new(),
metadata: HashMap::new(),
});
}
}
}
}
None
}
fn is_supported_extension(&self, extension: &str) -> bool {
matches!(
extension.to_lowercase().as_str(),
"js" | "jsx"
| "ts"
| "tsx"
| "rs"
| "py"
| "java"
| "cpp"
| "c"
| "cs"
| "go"
| "rb"
| "php"
)
}
fn should_analyze_file(&self, file_path: &std::path::Path) -> bool {
// Skip common non-source directories
let path_str = file_path.to_string_lossy();
if path_str.contains("node_modules")
|| path_str.contains(".git")
|| path_str.contains("target")
|| path_str.contains("dist")
|| path_str.contains("build")
|| path_str.contains(".next")
|| path_str.contains("__pycache__")
|| path_str.contains("coverage")
|| path_str.contains(".vscode")
|| path_str.contains(".idea")
{
return false;
}
// Check if file extension is supported
if let Some(extension) = file_path.extension().and_then(|s| s.to_str()) {
self.is_supported_extension(extension)
} else {
false
}
}
fn is_ignored_directory(&self, dir_name: &str) -> bool {
matches!(
dir_name,
"node_modules"
| ".git"
| "target"
| "dist"
| "build"
| ".next"
| "__pycache__"
| "coverage"
| ".vscode"
| ".idea"
)
}
async fn learn_naming_patterns(
&mut self,
concepts: &[SemanticConcept],
_path: &str,
) -> Result<Vec<Pattern>, ParseError> {
// Group concepts by language for better analysis
| rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | true |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/patterns/mod.rs | rust-core/src/patterns/mod.rs | //! Pattern learning and analysis modules
//!
//! This module provides comprehensive pattern recognition and learning capabilities
//! across multiple domains: naming conventions, structural patterns, implementation
//! patterns, and approach prediction.
// Core types and traits
pub mod types;
// Specialized pattern analyzers
pub mod naming;
pub mod structural;
pub mod implementation;
pub mod prediction;
pub mod learning;
// Re-export main types and analyzers
pub use types::*;
pub use naming::NamingPatternAnalyzer;
pub use structural::StructuralPatternAnalyzer;
pub use implementation::ImplementationPatternAnalyzer;
pub use prediction::ApproachPredictor;
pub use learning::PatternLearningEngine;
// Legacy compatibility - re-export the main pattern learning functionality
// through the new modular engine
#[cfg(feature = "napi-bindings")]
use napi_derive::napi;
/// Legacy PatternLearner for backwards compatibility
#[derive(Default)]
#[cfg_attr(feature = "napi-bindings", napi)]
pub struct PatternLearner {
engine: PatternLearningEngine,
}
#[cfg_attr(feature = "napi-bindings", napi)]
impl PatternLearner {
#[cfg_attr(feature = "napi-bindings", napi(constructor))]
pub fn new() -> Self {
PatternLearner {
engine: PatternLearningEngine::new(),
}
}
/// Learn patterns from an entire codebase
///
/// # Safety
/// This function is marked unsafe for NAPI compatibility. It performs file system operations
/// and pattern analysis that are inherently safe but marked unsafe for JavaScript interop.
#[cfg_attr(feature = "napi-bindings", napi)]
pub async unsafe fn learn_from_codebase(&mut self, path: String) -> Result<Vec<Pattern>, crate::types::ParseError> {
self.engine.learn_from_codebase(path).await
}
/// Extract patterns from a specific path
///
/// # Safety
/// This function is marked unsafe for NAPI compatibility. It performs file system operations
/// and pattern analysis that are inherently safe but marked unsafe for JavaScript interop.
#[cfg_attr(feature = "napi-bindings", napi)]
pub async unsafe fn extract_patterns(&self, path: String) -> Result<Vec<Pattern>, crate::types::ParseError> {
// Use the learning engine to extract patterns
let naming_analyzer = NamingPatternAnalyzer::new();
let structural_analyzer = StructuralPatternAnalyzer::new();
let implementation_analyzer = ImplementationPatternAnalyzer::new();
let mut all_patterns = Vec::new();
// Extract patterns from each analyzer
all_patterns.extend(naming_analyzer.extract_patterns(&path)?);
all_patterns.extend(structural_analyzer.extract_patterns(&path)?);
all_patterns.extend(implementation_analyzer.extract_patterns(&path)?);
Ok(all_patterns)
}
/// Analyze file changes to identify patterns (original signature)
///
/// # Safety
/// This function is marked unsafe due to NAPI bindings requirements.
/// It should only be called from properly initialized JavaScript contexts.
#[cfg_attr(feature = "napi-bindings", napi)]
pub async unsafe fn analyze_file_change(
&self,
change_data: String,
) -> Result<PatternAnalysisResult, crate::types::ParseError> {
self.analyze_file_change_internal(change_data).await
}
/// Internal implementation for analyze_file_change (from original)
pub async fn analyze_file_change_internal(
&self,
change_data: String,
) -> Result<PatternAnalysisResult, crate::types::ParseError> {
// Parse the change data (would be JSON in real implementation)
let detected = self.detect_patterns_in_change(&change_data)?;
let violations = self.detect_pattern_violations(&change_data)?;
let recommendations = self.generate_recommendations(&detected, &violations)?;
Ok(PatternAnalysisResult {
detected,
violations,
recommendations,
learned: None, // Would contain newly learned patterns
})
}
/// Find patterns relevant to a given problem description (original signature)
///
/// # Safety
/// This function is marked unsafe due to NAPI bindings requirements.
/// It should only be called from properly initialized JavaScript contexts.
#[cfg_attr(feature = "napi-bindings", napi)]
pub async unsafe fn find_relevant_patterns(
&self,
problem_description: String,
current_file: Option<String>,
selected_code: Option<String>,
) -> Result<Vec<Pattern>, crate::types::ParseError> {
self.find_relevant_patterns_internal(problem_description, current_file, selected_code)
.await
}
/// Internal implementation for find_relevant_patterns (from original)
pub async fn find_relevant_patterns_internal(
&self,
problem_description: String,
current_file: Option<String>,
selected_code: Option<String>,
) -> Result<Vec<Pattern>, crate::types::ParseError> {
let mut relevant_patterns = Vec::new();
// Analyze problem description for keywords
let keywords = self.extract_keywords(&problem_description);
// Find patterns matching the context using engine's learned patterns
let learned_patterns = self.engine.get_learned_patterns();
for pattern in learned_patterns {
let relevance_score =
self.calculate_pattern_relevance(&pattern, &keywords, ¤t_file, &selected_code);
if relevance_score > 0.5 {
relevant_patterns.push(pattern);
}
}
// Sort by relevance and confidence
relevant_patterns.sort_by(|a, b| {
let score_a = a.confidence * a.frequency as f64;
let score_b = b.confidence * b.frequency as f64;
score_b
.partial_cmp(&score_a)
.unwrap_or(std::cmp::Ordering::Equal)
});
Ok(relevant_patterns.into_iter().take(5).collect())
}
/// Predict coding approach based on problem description and context (original signature)
///
/// # Safety
/// This function is marked unsafe due to NAPI bindings requirements.
/// It should only be called from properly initialized JavaScript contexts.
#[cfg_attr(feature = "napi-bindings", napi)]
pub async unsafe fn predict_approach(
&self,
problem_description: String,
context: std::collections::HashMap<String, String>,
) -> Result<ApproachPrediction, crate::types::ParseError> {
self.predict_approach_internal(problem_description, context)
.await
}
/// Internal implementation for predict_approach (from original)
pub async fn predict_approach_internal(
&self,
problem_description: String,
context: std::collections::HashMap<String, String>,
) -> Result<ApproachPrediction, crate::types::ParseError> {
let keywords = self.extract_keywords(&problem_description);
let relevant_patterns = self.find_patterns_by_keywords(&keywords);
// Analyze problem complexity
let complexity = self.estimate_problem_complexity(&problem_description, &context);
// Generate approach based on learned patterns
let approach = self.generate_approach(&relevant_patterns, &complexity);
let prediction = ApproachPrediction {
approach: approach.description,
confidence: approach.confidence,
reasoning: approach.reasoning,
patterns: relevant_patterns
.into_iter()
.map(|p| p.pattern_type)
.collect(),
complexity: complexity.to_string(),
};
Ok(prediction)
}
/// Learn from analysis data
///
/// # Safety
/// This function is marked unsafe for NAPI compatibility. It performs data parsing and
/// learning operations that are inherently safe but marked unsafe for JavaScript interop.
#[cfg_attr(feature = "napi-bindings", napi)]
pub async unsafe fn learn_from_analysis(&mut self, analysis_data: String) -> Result<bool, crate::types::ParseError> {
self.engine.learn_from_analysis(analysis_data).await
}
/// Update pattern learner from change data (from original implementation)
///
/// # Safety
/// This function is marked unsafe for NAPI compatibility. It performs data parsing and
/// pattern update operations that are inherently safe but marked unsafe for JavaScript interop.
#[cfg_attr(feature = "napi-bindings", napi)]
pub async unsafe fn update_from_change(&mut self, change_data: String) -> Result<bool, crate::types::ParseError> {
self.engine.update_from_change(change_data).await
}
// Helper methods from original implementation
fn detect_patterns_in_change(&self, _change_data: &str) -> Result<Vec<String>, crate::types::ParseError> {
// Detect which patterns are present in the change
Ok(vec!["naming_camelCase_function".to_string()])
}
fn detect_pattern_violations(&self, _change_data: &str) -> Result<Vec<String>, crate::types::ParseError> {
// Detect violations of established patterns
Ok(vec![])
}
fn generate_recommendations(
&self,
_detected: &[String],
_violations: &[String],
) -> Result<Vec<String>, crate::types::ParseError> {
// Generate recommendations based on detected patterns and violations
Ok(vec![
"Consider using consistent naming convention".to_string()
])
}
fn extract_keywords(&self, text: &str) -> Vec<String> {
// Extract relevant keywords from text
text.split_whitespace()
.filter(|word| word.len() > 3)
.map(|word| word.to_lowercase())
.collect()
}
fn calculate_pattern_relevance(
&self,
pattern: &Pattern,
keywords: &[String],
_current_file: &Option<String>,
_selected_code: &Option<String>,
) -> f64 {
// Calculate how relevant a pattern is to the current context
let mut relevance = 0.0;
// Check keyword matches
for keyword in keywords {
if pattern.description.to_lowercase().contains(keyword) {
relevance += 0.2;
}
if pattern.pattern_type.to_lowercase().contains(keyword) {
relevance += 0.3;
}
}
// Factor in pattern confidence and frequency
relevance += pattern.confidence * 0.3;
relevance += (pattern.frequency as f64 / 100.0) * 0.2;
relevance.min(1.0)
}
fn find_patterns_by_keywords(&self, keywords: &[String]) -> Vec<Pattern> {
let mut matching_patterns = Vec::new();
let learned_patterns = self.engine.get_learned_patterns();
for pattern in learned_patterns {
for keyword in keywords {
if pattern.description.to_lowercase().contains(keyword)
|| pattern.pattern_type.to_lowercase().contains(keyword)
{
matching_patterns.push(pattern);
break;
}
}
}
matching_patterns
}
fn estimate_problem_complexity(
&self,
problem_description: &str,
_context: &std::collections::HashMap<String, String>,
) -> ProblemComplexity {
let word_count = problem_description.split_whitespace().count();
if word_count < 10 {
ProblemComplexity::Low
} else if word_count < 30 {
ProblemComplexity::Medium
} else {
ProblemComplexity::High
}
}
fn generate_approach(
&self,
relevant_patterns: &[Pattern],
complexity: &ProblemComplexity,
) -> GeneratedApproach {
let confidence = if relevant_patterns.is_empty() {
0.3
} else {
relevant_patterns.iter().map(|p| p.confidence).sum::<f64>()
/ relevant_patterns.len() as f64
};
let description = match complexity {
ProblemComplexity::Low => {
"Use simple, direct implementation following established patterns"
}
ProblemComplexity::Medium => {
"Break down into smaller components, apply relevant design patterns"
}
ProblemComplexity::High => {
"Design comprehensive solution with multiple layers and patterns"
}
};
let reasoning = format!(
"Based on {} relevant patterns and {} complexity assessment",
relevant_patterns.len(),
complexity
);
GeneratedApproach {
description: description.to_string(),
confidence,
reasoning,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::types::LineRange;
#[test]
fn test_pattern_learner_creation() {
let learner = PatternLearner::new();
assert!(learner.engine.get_learning_metrics().total_patterns_learned == 0);
}
#[test]
fn test_pattern_creation() {
let pattern = Pattern {
id: "test_pattern".to_string(),
pattern_type: "naming".to_string(),
description: "Test pattern".to_string(),
frequency: 5,
confidence: 0.8,
examples: vec![],
contexts: vec!["test".to_string()],
};
assert_eq!(pattern.id, "test_pattern");
assert_eq!(pattern.pattern_type, "naming");
assert_eq!(pattern.frequency, 5);
assert_eq!(pattern.confidence, 0.8);
}
#[test]
fn test_pattern_analysis_result() {
let result = PatternAnalysisResult {
detected: vec!["pattern1".to_string()],
violations: vec!["violation1".to_string()],
recommendations: vec!["Use consistent naming".to_string()],
learned: None,
};
assert_eq!(result.detected.len(), 1);
assert_eq!(result.violations.len(), 1);
assert_eq!(result.recommendations.len(), 1);
assert!(result.learned.is_none());
}
#[tokio::test]
async fn test_extract_patterns_internal() {
let learner = PatternLearner::new();
let result = unsafe { learner.extract_patterns("/test/path".to_string()).await };
assert!(result.is_ok());
let patterns = result.unwrap();
// Should have some patterns from the analyzers
assert!(!patterns.is_empty());
}
#[tokio::test]
async fn test_analyze_file_change_internal() {
let learner = PatternLearner::new();
let change_data = r#"{
"type": "modify",
"file": "test.ts",
"oldPath": "test.ts",
"newPath": "test.ts"
}"#.to_string();
let result = learner.analyze_file_change_internal(change_data).await;
assert!(result.is_ok());
let analysis = result.unwrap();
assert!(!analysis.detected.is_empty());
assert!(!analysis.recommendations.is_empty());
}
#[tokio::test]
async fn test_find_relevant_patterns_internal() {
let mut learner = PatternLearner::new();
// Add a test pattern to the engine first
let pattern = Pattern {
id: "test_function".to_string(),
pattern_type: "function".to_string(),
description: "Function pattern for testing".to_string(),
frequency: 10,
confidence: 0.9,
examples: vec![PatternExample {
code: "function test() {}".to_string(),
file_path: "test.ts".to_string(),
line_range: LineRange { start: 1, end: 1 },
}],
contexts: vec!["typescript".to_string()],
};
learner.engine.insert_pattern("test_function".to_string(), pattern);
let result = learner.find_relevant_patterns_internal(
"I need to create a function".to_string(),
Some("test.ts".to_string()),
None,
).await;
assert!(result.is_ok());
let patterns = result.unwrap();
assert!(!patterns.is_empty());
assert_eq!(patterns[0].pattern_type, "function");
}
#[tokio::test]
async fn test_predict_approach_internal() {
let mut learner = PatternLearner::new();
// Add test patterns
let pattern = Pattern {
id: "api_pattern".to_string(),
pattern_type: "api".to_string(),
description: "REST API pattern".to_string(),
frequency: 15,
confidence: 0.85,
examples: vec![PatternExample {
code: "app.get('/api', handler)".to_string(),
file_path: "server.js".to_string(),
line_range: LineRange { start: 10, end: 10 },
}],
contexts: vec!["express".to_string()],
};
learner.engine.insert_pattern("api_pattern".to_string(), pattern);
let mut context = std::collections::HashMap::new();
context.insert("framework".to_string(), "express".to_string());
context.insert("language".to_string(), "javascript".to_string());
let result = learner.predict_approach_internal(
"Build a REST API endpoint".to_string(),
context,
).await;
assert!(result.is_ok());
let prediction = result.unwrap();
assert!(prediction.confidence > 0.0);
assert!(!prediction.patterns.is_empty());
assert!(!prediction.approach.is_empty());
}
#[tokio::test]
async fn test_learn_from_analysis() {
let mut learner = PatternLearner::new();
let analysis_data = r#"{
"patterns": {
"detected": ["service_pattern", "dependency_injection"],
"learned": []
},
"concepts": [
{
"name": "UserService",
"type": "class",
"patterns": ["service", "dependency_injection"]
}
]
}"#.to_string();
let result = unsafe { learner.learn_from_analysis(analysis_data).await };
assert!(result.is_ok());
let updated = result.unwrap();
assert!(updated); // Should return true since patterns were updated
}
#[tokio::test]
async fn test_update_from_change() {
let mut learner = PatternLearner::new();
let change_data = r#"{
"type": "modify",
"path": "test.ts",
"content": "function newName() {}",
"language": "typescript"
}"#.to_string();
let result = unsafe { learner.update_from_change(change_data).await };
assert!(result.is_ok());
assert!(result.unwrap());
}
#[test]
fn test_extract_keywords() {
let learner = PatternLearner::new();
let keywords = learner.extract_keywords("Build a REST API endpoint using Express");
assert!(keywords.contains(&"build".to_string()));
assert!(keywords.contains(&"rest".to_string()));
assert!(keywords.contains(&"endpoint".to_string()));
assert!(keywords.contains(&"using".to_string()));
assert!(keywords.contains(&"express".to_string()));
}
#[test]
fn test_problem_complexity_estimation() {
let learner = PatternLearner::new();
let context = std::collections::HashMap::new();
let low = learner.estimate_problem_complexity("Simple task", &context);
assert_eq!(low, ProblemComplexity::Low);
let medium = learner.estimate_problem_complexity("Build a REST API with authentication and user management", &context);
assert_eq!(medium, ProblemComplexity::Medium);
let high = learner.estimate_problem_complexity(
"Design and implement a comprehensive microservices architecture with distributed caching, message queuing, service discovery, and fault tolerance",
&context
);
assert_eq!(high, ProblemComplexity::High);
}
#[test]
fn test_pattern_relevance_calculation() {
let learner = PatternLearner::new();
let pattern = Pattern {
id: "test".to_string(),
pattern_type: "function".to_string(),
description: "Function pattern for JavaScript development".to_string(),
frequency: 10,
confidence: 0.8,
examples: vec![],
contexts: vec!["javascript".to_string()],
};
let keywords = vec!["function".to_string(), "javascript".to_string()];
let relevance = learner.calculate_pattern_relevance(&pattern, &keywords, &None, &None);
assert!(relevance > 0.5);
}
#[test]
fn test_module_exports() {
// Test that all main types are accessible
let _naming = NamingPatternAnalyzer::new();
let _structural = StructuralPatternAnalyzer::new();
let _implementation = ImplementationPatternAnalyzer::new();
let _predictor = ApproachPredictor::new();
let _engine = PatternLearningEngine::new();
// Test legacy compatibility
let _legacy = PatternLearner::new();
}
#[test]
fn test_approach_prediction_types() {
let prediction = ApproachPrediction {
approach: "Use modular architecture".to_string(),
confidence: 0.85,
reasoning: "Based on complexity analysis".to_string(),
patterns: vec!["modular".to_string()],
complexity: "medium".to_string(),
};
assert_eq!(prediction.approach, "Use modular architecture");
assert_eq!(prediction.confidence, 0.85);
assert!(!prediction.reasoning.is_empty());
assert!(!prediction.patterns.is_empty());
assert_eq!(prediction.complexity, "medium");
}
#[test]
fn test_pattern_example_creation() {
let example = PatternExample {
code: "function calculateTotal() { return 42; }".to_string(),
file_path: "utils.ts".to_string(),
line_range: LineRange { start: 15, end: 15 },
};
assert!(example.code.contains("function"));
assert!(example.code.contains("calculateTotal"));
assert_eq!(example.file_path, "utils.ts");
assert_eq!(example.line_range.start, 15);
assert_eq!(example.line_range.end, 15);
}
} | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/patterns/structural.rs | rust-core/src/patterns/structural.rs | //! Structural pattern detection and analysis
#[cfg(feature = "napi-bindings")]
use napi_derive::napi;
use crate::patterns::types::{Pattern, PatternExample, StructuralPattern, PatternExtractor};
use crate::types::{ParseError, SemanticConcept, LineRange};
use std::collections::{HashMap, HashSet};
use walkdir::WalkDir;
use std::fs;
use std::path::Path;
/// Analyzer for detecting architectural and structural patterns
#[cfg_attr(feature = "napi-bindings", napi)]
pub struct StructuralPatternAnalyzer {
patterns: HashMap<String, StructuralPattern>,
architecture_signatures: HashMap<String, ArchitectureSignature>,
}
#[derive(Debug, Clone)]
struct ArchitectureSignature {
pattern_name: String,
required_components: Vec<String>,
directory_structure: Vec<String>,
file_patterns: Vec<String>,
confidence_threshold: f64,
}
#[derive(Debug, Clone)]
struct DirectoryAnalysis {
path: String,
subdirectories: Vec<String>,
file_types: HashMap<String, usize>,
depth: usize,
}
#[cfg_attr(feature = "napi-bindings", napi)]
impl StructuralPatternAnalyzer {
#[cfg_attr(feature = "napi-bindings", napi(constructor))]
pub fn new() -> Self {
let mut analyzer = StructuralPatternAnalyzer {
patterns: HashMap::new(),
architecture_signatures: HashMap::new(),
};
analyzer.initialize_signatures();
analyzer
}
/// Initialize common architectural pattern signatures
fn initialize_signatures(&mut self) {
// MVC Pattern
self.architecture_signatures.insert("MVC".to_string(), ArchitectureSignature {
pattern_name: "Model-View-Controller".to_string(),
required_components: vec!["model".to_string(), "view".to_string(), "controller".to_string()],
directory_structure: vec!["models/".to_string(), "views/".to_string(), "controllers/".to_string()],
file_patterns: vec!["*Controller.*".to_string(), "*Model.*".to_string(), "*View.*".to_string()],
confidence_threshold: 0.7,
});
// Clean Architecture
self.architecture_signatures.insert("Clean".to_string(), ArchitectureSignature {
pattern_name: "Clean Architecture".to_string(),
required_components: vec!["domain".to_string(), "application".to_string(), "infrastructure".to_string(), "presentation".to_string()],
directory_structure: vec!["domain/".to_string(), "application/".to_string(), "infrastructure/".to_string(), "presentation/".to_string()],
file_patterns: vec!["*Service.*".to_string(), "*Repository.*".to_string(), "*UseCase.*".to_string()],
confidence_threshold: 0.8,
});
// Layered Architecture
self.architecture_signatures.insert("Layered".to_string(), ArchitectureSignature {
pattern_name: "Layered Architecture".to_string(),
required_components: vec!["api".to_string(), "service".to_string(), "data".to_string()],
directory_structure: vec!["api/".to_string(), "service/".to_string(), "data/".to_string()],
file_patterns: vec!["*Api.*".to_string(), "*Service.*".to_string(), "*Repository.*".to_string()],
confidence_threshold: 0.6,
});
// Microservices
self.architecture_signatures.insert("Microservices".to_string(), ArchitectureSignature {
pattern_name: "Microservices Architecture".to_string(),
required_components: vec!["service".to_string(), "gateway".to_string()],
directory_structure: vec!["services/".to_string(), "gateway/".to_string()],
file_patterns: vec!["*Service.*".to_string(), "docker*".to_string(), "*Gateway.*".to_string()],
confidence_threshold: 0.7,
});
// Modular Monolith
self.architecture_signatures.insert("Modular".to_string(), ArchitectureSignature {
pattern_name: "Modular Architecture".to_string(),
required_components: vec!["modules".to_string(), "shared".to_string()],
directory_structure: vec!["modules/".to_string(), "shared/".to_string()],
file_patterns: vec!["mod.*".to_string(), "index.*".to_string()],
confidence_threshold: 0.5,
});
// Event-Driven Architecture
self.architecture_signatures.insert("EventDriven".to_string(), ArchitectureSignature {
pattern_name: "Event-Driven Architecture".to_string(),
required_components: vec!["events".to_string(), "handlers".to_string(), "publishers".to_string()],
directory_structure: vec!["events/".to_string(), "handlers/".to_string()],
file_patterns: vec!["*Event.*".to_string(), "*Handler.*".to_string(), "*Publisher.*".to_string()],
confidence_threshold: 0.7,
});
}
/// Analyze structural patterns from codebase organization
pub fn analyze_codebase_structure(&mut self, path: &str) -> Result<Vec<Pattern>, ParseError> {
let directory_analysis = self.analyze_directory_structure(path)?;
let file_analysis = self.analyze_file_patterns(path)?;
let mut detected_patterns = Vec::new();
// Check each architectural signature
for (pattern_key, signature) in &self.architecture_signatures {
let confidence = self.calculate_structure_confidence(
&directory_analysis,
&file_analysis,
signature,
);
if confidence >= signature.confidence_threshold {
let examples = self.collect_structure_examples(path, signature)?;
let example_count = examples.len() as u32;
let pattern = Pattern {
id: format!("structural_{}", pattern_key),
pattern_type: "structural".to_string(),
description: format!(
"{} detected with {:.1}% confidence",
signature.pattern_name,
confidence * 100.0
),
frequency: example_count,
confidence,
examples,
contexts: vec!["architecture".to_string()],
};
detected_patterns.push(pattern);
// Store in internal patterns
let structural_pattern = StructuralPattern {
pattern_type: signature.pattern_name.clone(),
frequency: example_count,
characteristics: signature.required_components.clone(),
confidence,
};
self.patterns.insert(pattern_key.clone(), structural_pattern);
}
}
Ok(detected_patterns)
}
/// Analyze concepts for structural relationships
pub fn analyze_concept_structures(&mut self, concepts: &[SemanticConcept]) -> Result<Vec<Pattern>, ParseError> {
let mut detected_patterns = Vec::new();
// Analyze file organization patterns
let file_organization = self.analyze_file_organization(concepts);
// Analyze dependency patterns
let dependency_patterns = self.analyze_dependency_patterns(concepts);
// Analyze naming structure patterns
let naming_structure = self.analyze_naming_structure_patterns(concepts);
detected_patterns.extend(file_organization);
detected_patterns.extend(dependency_patterns);
detected_patterns.extend(naming_structure);
Ok(detected_patterns)
}
/// Detect violations of structural patterns
pub fn detect_structural_violations(&self, concepts: &[SemanticConcept]) -> Vec<String> {
let mut violations = Vec::new();
// Check for common structural anti-patterns
violations.extend(self.detect_god_object_violations(concepts));
violations.extend(self.detect_circular_dependency_violations(concepts));
violations.extend(self.detect_layer_violations(concepts));
violations.extend(self.detect_coupling_violations(concepts));
violations
}
/// Generate structural recommendations
pub fn generate_structural_recommendations(&self, concepts: &[SemanticConcept]) -> Vec<String> {
let mut recommendations = Vec::new();
// File organization recommendations
let file_metrics = self.calculate_file_metrics(concepts);
if file_metrics.avg_concepts_per_file > 20.0 {
recommendations.push("Consider breaking down large files into smaller, more focused modules".to_string());
}
if file_metrics.max_concepts_per_file > 50 {
recommendations.push(format!(
"One file contains {} concepts - consider splitting this monolithic file",
file_metrics.max_concepts_per_file
));
}
if file_metrics.total_files < 5 && concepts.len() > 100 {
recommendations.push(format!(
"Only {} files for {} concepts - consider better separation of concerns",
file_metrics.total_files, concepts.len()
));
}
// Coupling recommendations
let coupling_metrics = self.calculate_coupling_metrics(concepts);
if coupling_metrics.high_coupling_count > 5 {
recommendations.push("Reduce tight coupling between components using dependency injection or interfaces".to_string());
}
// Layer separation recommendations
if self.has_layer_violations(concepts) {
recommendations.push("Establish clear layer boundaries and enforce dependency directions".to_string());
}
// Modularity recommendations
let modularity_score = self.calculate_modularity_score(concepts);
if modularity_score < 0.6 {
recommendations.push("Consider refactoring into more modular components with clear responsibilities".to_string());
}
if recommendations.is_empty() {
recommendations.push("Structural patterns look good! Consider documenting architectural decisions".to_string());
}
recommendations
}
/// Analyze directory structure recursively
fn analyze_directory_structure(&self, path: &str) -> Result<Vec<DirectoryAnalysis>, ParseError> {
let mut analyses = Vec::new();
for entry in WalkDir::new(path).max_depth(5).into_iter().filter_map(|e| e.ok()) {
if entry.file_type().is_dir() {
let dir_path = entry.path();
let mut analysis = DirectoryAnalysis {
path: dir_path.to_string_lossy().to_string(),
subdirectories: Vec::new(),
file_types: HashMap::new(),
depth: entry.depth(),
};
// Analyze immediate children
if let Ok(entries) = fs::read_dir(dir_path) {
for child_entry in entries.filter_map(|e| e.ok()) {
if child_entry.file_type().ok().is_some_and(|ft| ft.is_dir()) {
if let Some(name) = child_entry.file_name().to_str() {
analysis.subdirectories.push(name.to_string());
}
} else if let Some(extension) = child_entry.path().extension().and_then(|s| s.to_str()) {
*analysis.file_types.entry(extension.to_string()).or_insert(0) += 1;
}
}
}
analyses.push(analysis);
}
}
Ok(analyses)
}
/// Analyze file patterns in the codebase
fn analyze_file_patterns(&self, path: &str) -> Result<HashMap<String, Vec<String>>, ParseError> {
let mut file_patterns: HashMap<String, Vec<String>> = HashMap::new();
for entry in WalkDir::new(path).into_iter().filter_map(|e| e.ok()) {
if entry.file_type().is_file() {
let file_path = entry.path();
if let Some(file_name) = file_path.file_name().and_then(|n| n.to_str()) {
// Categorize files by patterns
if file_name.contains("Controller") {
file_patterns.entry("controller".to_string()).or_default().push(file_name.to_string());
}
if file_name.contains("Model") {
file_patterns.entry("model".to_string()).or_default().push(file_name.to_string());
}
if file_name.contains("View") {
file_patterns.entry("view".to_string()).or_default().push(file_name.to_string());
}
if file_name.contains("Service") {
file_patterns.entry("service".to_string()).or_default().push(file_name.to_string());
}
if file_name.contains("Repository") {
file_patterns.entry("repository".to_string()).or_default().push(file_name.to_string());
}
if file_name.contains("Handler") {
file_patterns.entry("handler".to_string()).or_default().push(file_name.to_string());
}
}
}
}
Ok(file_patterns)
}
/// Calculate confidence score for a structural pattern
fn calculate_structure_confidence(
&self,
directory_analysis: &[DirectoryAnalysis],
file_patterns: &HashMap<String, Vec<String>>,
signature: &ArchitectureSignature,
) -> f64 {
let mut score = 0.0;
let mut max_score = 0.0;
// Check directory structure matches
max_score += 0.4;
let mut dir_matches = 0;
let mut depth_penalty = 0.0;
for required_dir in &signature.directory_structure {
for analysis in directory_analysis {
if analysis.path.contains(required_dir) ||
analysis.subdirectories.iter().any(|d| d.contains(&required_dir.replace("/", ""))) {
dir_matches += 1;
// Apply depth-based scoring - deeper structures get slight penalty for complexity
if analysis.depth > 4 {
depth_penalty += 0.1;
}
break;
}
}
}
if !signature.directory_structure.is_empty() {
let base_score = dir_matches as f64 / signature.directory_structure.len() as f64;
let depth_adjusted_score = (base_score - (depth_penalty / signature.directory_structure.len() as f64)).max(0.0);
score += 0.4 * depth_adjusted_score;
}
// Check component existence
max_score += 0.3;
let mut component_matches = 0;
for component in &signature.required_components {
if file_patterns.contains_key(component) {
component_matches += 1;
}
}
if !signature.required_components.is_empty() {
score += 0.3 * (component_matches as f64 / signature.required_components.len() as f64);
}
// Check file patterns
max_score += 0.3;
let mut pattern_matches = 0;
for pattern in &signature.file_patterns {
let pattern_key = pattern.replace("*", "").replace(".", "").to_lowercase();
if file_patterns.keys().any(|k| k.contains(&pattern_key)) {
pattern_matches += 1;
}
}
if !signature.file_patterns.is_empty() {
score += 0.3 * (pattern_matches as f64 / signature.file_patterns.len() as f64);
}
if max_score > 0.0 {
score / max_score
} else {
0.0
}
}
/// Collect examples of structural patterns
fn collect_structure_examples(&self, path: &str, signature: &ArchitectureSignature) -> Result<Vec<PatternExample>, ParseError> {
let mut examples = Vec::new();
for entry in WalkDir::new(path).max_depth(3).into_iter().filter_map(|e| e.ok()) {
if entry.file_type().is_dir() {
let dir_name = entry.file_name().to_string_lossy().to_string();
// Check if directory name matches required components
for component in &signature.required_components {
if dir_name.to_lowercase().contains(&component.to_lowercase()) {
examples.push(PatternExample {
code: format!("Directory: {}", dir_name),
file_path: entry.path().to_string_lossy().to_string(),
line_range: LineRange { start: 1, end: 1 },
});
break;
}
}
}
}
// Limit examples to avoid overwhelming output
examples.truncate(10);
Ok(examples)
}
/// Analyze file organization patterns from concepts
fn analyze_file_organization(&self, concepts: &[SemanticConcept]) -> Vec<Pattern> {
let mut patterns = Vec::new();
let mut file_concept_map: HashMap<String, Vec<&SemanticConcept>> = HashMap::new();
// Group concepts by file
for concept in concepts {
file_concept_map.entry(concept.file_path.clone()).or_default().push(concept);
}
// Check for single responsibility principle violations
let large_files: Vec<_> = file_concept_map.iter()
.filter(|(_, concepts)| concepts.len() > 10)
.collect();
if !large_files.is_empty() {
patterns.push(Pattern {
id: "structural_large_files".to_string(),
pattern_type: "structural".to_string(),
description: format!("Files with too many concepts detected ({} files)", large_files.len()),
frequency: large_files.len() as u32,
confidence: 0.8,
examples: large_files.into_iter().take(5).map(|(file_path, concepts)| {
PatternExample {
code: format!("File contains {} concepts", concepts.len()),
file_path: file_path.clone(),
line_range: LineRange { start: 1, end: 1 },
}
}).collect(),
contexts: vec!["organization".to_string()],
});
}
patterns
}
/// Analyze dependency patterns from concepts
fn analyze_dependency_patterns(&self, concepts: &[SemanticConcept]) -> Vec<Pattern> {
let mut patterns = Vec::new();
let mut dependencies: HashMap<String, HashSet<String>> = HashMap::new();
// Build dependency graph from relationships
for concept in concepts {
let concept_file = Path::new(&concept.file_path)
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or(&concept.file_path);
for (relationship_type, target) in &concept.relationships {
if relationship_type.contains("import") || relationship_type.contains("depends") {
dependencies.entry(concept_file.to_string()).or_default().insert(target.clone());
}
}
}
// Detect circular dependencies
let cycles = self.detect_cycles(&dependencies);
if !cycles.is_empty() {
patterns.push(Pattern {
id: "structural_circular_dependencies".to_string(),
pattern_type: "structural".to_string(),
description: format!("Circular dependencies detected ({} cycles)", cycles.len()),
frequency: cycles.len() as u32,
confidence: 0.9,
examples: cycles.into_iter().take(3).map(|cycle| {
PatternExample {
code: format!("Circular dependency: {}", cycle.join(" -> ")),
file_path: "multiple_files".to_string(),
line_range: LineRange { start: 1, end: 1 },
}
}).collect(),
contexts: vec!["dependency".to_string()],
});
}
patterns
}
/// Analyze naming structure patterns
fn analyze_naming_structure_patterns(&self, concepts: &[SemanticConcept]) -> Vec<Pattern> {
let mut patterns = Vec::new();
let mut namespace_patterns: HashMap<String, u32> = HashMap::new();
// Analyze namespace/module patterns
for concept in concepts {
if let Some(namespace) = self.extract_namespace_from_path(&concept.file_path) {
*namespace_patterns.entry(namespace).or_insert(0) += 1;
}
}
if !namespace_patterns.is_empty() {
let most_common = namespace_patterns.iter()
.max_by_key(|(_, count)| *count)
.map(|(ns, count)| (ns.clone(), *count));
if let Some((namespace, count)) = most_common {
patterns.push(Pattern {
id: "structural_namespace_organization".to_string(),
pattern_type: "structural".to_string(),
description: format!("Consistent namespace organization detected ({})", namespace),
frequency: count,
confidence: 0.7,
examples: vec![PatternExample {
code: format!("Namespace pattern: {}", namespace),
file_path: "multiple_files".to_string(),
line_range: LineRange { start: 1, end: 1 },
}],
contexts: vec!["organization".to_string()],
});
}
}
patterns
}
/// Detect God Object violations
fn detect_god_object_violations(&self, concepts: &[SemanticConcept]) -> Vec<String> {
let mut violations = Vec::new();
let mut class_method_counts: HashMap<String, u32> = HashMap::new();
for concept in concepts {
if concept.concept_type == "class" || concept.concept_type == "struct" {
// Count methods in this class
let method_count = concepts.iter()
.filter(|c| c.concept_type == "method" || c.concept_type == "function")
.filter(|c| c.file_path == concept.file_path)
.filter(|c| c.line_range.start >= concept.line_range.start && c.line_range.end <= concept.line_range.end)
.count() as u32;
class_method_counts.insert(concept.name.clone(), method_count);
if method_count > 20 {
violations.push(format!(
"Potential God Object: '{}' has {} methods ({}:{})",
concept.name,
method_count,
concept.file_path,
concept.line_range.start
));
}
}
}
violations
}
/// Detect circular dependency violations
fn detect_circular_dependency_violations(&self, concepts: &[SemanticConcept]) -> Vec<String> {
let mut violations = Vec::new();
let mut dependencies: HashMap<String, HashSet<String>> = HashMap::new();
// Build dependency graph
for concept in concepts {
for (rel_type, target) in &concept.relationships {
if rel_type.contains("depends") || rel_type.contains("import") {
dependencies.entry(concept.name.clone()).or_default().insert(target.clone());
}
}
}
// Simple cycle detection
let cycles = self.detect_cycles(&dependencies);
for cycle in cycles {
violations.push(format!(
"Circular dependency detected: {}",
cycle.join(" -> ")
));
}
violations
}
/// Detect layer violations
fn detect_layer_violations(&self, concepts: &[SemanticConcept]) -> Vec<String> {
let mut violations = Vec::new();
// Define layer hierarchy (lower numbers = higher layers)
let layer_hierarchy = [
("presentation", 0),
("api", 0),
("application", 1),
("domain", 2),
("infrastructure", 3),
("data", 3),
].iter().cloned().collect::<HashMap<&str, u32>>();
for concept in concepts {
let concept_layer = self.determine_layer(&concept.file_path, &layer_hierarchy);
for (rel_type, target) in &concept.relationships {
if rel_type.contains("depends") || rel_type.contains("import") {
// Find target concept to determine its layer
if let Some(target_concept) = concepts.iter().find(|c| c.name == *target) {
let target_layer = self.determine_layer(&target_concept.file_path, &layer_hierarchy);
// Check for violations (higher layer depending on lower layer)
if let (Some(concept_level), Some(target_level)) = (concept_layer, target_layer) {
if concept_level < target_level {
violations.push(format!(
"Layer violation: {} depends on {} (higher layer depending on lower layer)",
concept.name,
target
));
}
}
}
}
}
}
violations
}
/// Detect coupling violations
fn detect_coupling_violations(&self, concepts: &[SemanticConcept]) -> Vec<String> {
let mut violations = Vec::new();
let mut coupling_counts: HashMap<String, u32> = HashMap::new();
for concept in concepts {
let coupling_count = concept.relationships.len() as u32;
coupling_counts.insert(concept.name.clone(), coupling_count);
if coupling_count > 10 {
violations.push(format!(
"High coupling detected: '{}' has {} dependencies ({}:{})",
concept.name,
coupling_count,
concept.file_path,
concept.line_range.start
));
}
}
violations
}
/// Helper methods
fn extract_namespace_from_path(&self, path: &str) -> Option<String> {
let path_obj = Path::new(path);
path_obj.parent()?.file_name()?.to_str().map(String::from)
}
fn detect_cycles(&self, dependencies: &HashMap<String, HashSet<String>>) -> Vec<Vec<String>> {
// Simple DFS-based cycle detection
let mut cycles = Vec::new();
let mut visited = HashSet::new();
let mut path = Vec::new();
for node in dependencies.keys() {
if !visited.contains(node) {
Self::dfs_cycle_detection(node, dependencies, &mut visited, &mut path, &mut cycles);
}
}
cycles
}
fn dfs_cycle_detection(
node: &str,
dependencies: &HashMap<String, HashSet<String>>,
visited: &mut HashSet<String>,
path: &mut Vec<String>,
cycles: &mut Vec<Vec<String>>,
) {
if path.contains(&node.to_string()) {
// Found a cycle
if let Some(cycle_start) = path.iter().position(|n| n == node) {
let cycle = path[cycle_start..].to_vec();
cycles.push(cycle);
}
return;
}
if visited.contains(node) {
return;
}
visited.insert(node.to_string());
path.push(node.to_string());
if let Some(deps) = dependencies.get(node) {
for dep in deps {
Self::dfs_cycle_detection(dep, dependencies, visited, path, cycles);
}
}
path.pop();
}
fn determine_layer(&self, file_path: &str, layer_hierarchy: &HashMap<&str, u32>) -> Option<u32> {
for (layer_name, level) in layer_hierarchy {
if file_path.to_lowercase().contains(layer_name) {
return Some(*level);
}
}
None
}
fn calculate_file_metrics(&self, concepts: &[SemanticConcept]) -> FileMetrics {
let mut file_concept_counts: HashMap<String, u32> = HashMap::new();
for concept in concepts {
*file_concept_counts.entry(concept.file_path.clone()).or_insert(0) += 1;
}
let total_concepts = concepts.len() as f64;
let file_count = file_concept_counts.len() as f64;
let avg_concepts_per_file = if file_count > 0.0 { total_concepts / file_count } else { 0.0 };
FileMetrics {
avg_concepts_per_file,
max_concepts_per_file: file_concept_counts.values().max().copied().unwrap_or(0),
total_files: file_count as u32,
}
}
fn calculate_coupling_metrics(&self, concepts: &[SemanticConcept]) -> CouplingMetrics {
let mut high_coupling_count = 0;
let mut total_coupling = 0;
for concept in concepts {
let coupling = concept.relationships.len();
total_coupling += coupling;
if coupling > 8 {
high_coupling_count += 1;
}
}
CouplingMetrics {
high_coupling_count,
avg_coupling: if !concepts.is_empty() { total_coupling as f64 / concepts.len() as f64 } else { 0.0 },
}
}
fn has_layer_violations(&self, concepts: &[SemanticConcept]) -> bool {
!self.detect_layer_violations(concepts).is_empty()
}
fn calculate_modularity_score(&self, concepts: &[SemanticConcept]) -> f64 {
// Enhanced modularity score based on file organization, coupling, and distribution
let file_metrics = self.calculate_file_metrics(concepts);
let coupling_metrics = self.calculate_coupling_metrics(concepts);
// Base file organization score
let file_score: f64 = if file_metrics.avg_concepts_per_file <= 15.0 { 0.4 } else { 0.1 };
// Penalize files that are too large (monolithic)
let max_file_penalty: f64 = if file_metrics.max_concepts_per_file > 50 { 0.2 } else { 0.0 };
// Reward reasonable file distribution
let distribution_bonus: f64 = if file_metrics.total_files >= 3 &&
(concepts.len() as f64 / file_metrics.total_files as f64) < 25.0 {
0.2
} else {
0.0
};
let coupling_score: f64 = if coupling_metrics.avg_coupling <= 5.0 { 0.4 } else { 0.1 };
(file_score + distribution_bonus + coupling_score - max_file_penalty).clamp(0.0_f64, 1.0_f64)
}
}
#[derive(Debug)]
struct FileMetrics {
avg_concepts_per_file: f64,
max_concepts_per_file: u32,
total_files: u32,
}
#[derive(Debug)]
struct CouplingMetrics {
high_coupling_count: u32,
avg_coupling: f64,
}
impl PatternExtractor for StructuralPatternAnalyzer {
fn extract_patterns(&self, path: &str) -> Result<Vec<Pattern>, ParseError> {
let mut analyzer = self.clone();
analyzer.analyze_codebase_structure(path)
}
}
impl Clone for StructuralPatternAnalyzer {
fn clone(&self) -> Self {
StructuralPatternAnalyzer {
patterns: self.patterns.clone(),
architecture_signatures: self.architecture_signatures.clone(),
}
}
}
impl Default for StructuralPatternAnalyzer {
fn default() -> Self {
Self::new()
}
}
| rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | true |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/analysis/complexity.rs | rust-core/src/analysis/complexity.rs | //! Complexity analysis and metrics calculation
#[cfg(feature = "napi-bindings")]
use napi_derive::napi;
use crate::types::{SemanticConcept, ComplexityMetrics};
use std::collections::HashMap;
/// Analyzer for calculating code complexity metrics
#[cfg_attr(feature = "napi-bindings", napi)]
pub struct ComplexityAnalyzer;
#[cfg_attr(feature = "napi-bindings", napi)]
impl ComplexityAnalyzer {
#[cfg_attr(feature = "napi-bindings", napi(constructor))]
pub fn new() -> Self {
ComplexityAnalyzer
}
/// Calculate complexity metrics from a set of semantic concepts
pub fn calculate_complexity(concepts: &Vec<SemanticConcept>) -> ComplexityMetrics {
let mut function_count = 0;
let mut class_count = 0;
let mut file_count = HashMap::new();
let mut total_lines = 0;
let mut max_depth = 0;
for concept in concepts {
match concept.concept_type.as_str() {
"function" | "method" | "procedure" => function_count += 1,
"class" | "interface" | "struct" | "enum" => class_count += 1,
_ => {}
}
// Count unique files
file_count.insert(&concept.file_path, true);
// Track line ranges for total lines calculation
let concept_lines = concept.line_range.end - concept.line_range.start + 1;
total_lines += concept_lines;
// Calculate depth from relationships (simplified metric)
let relationship_depth = concept.relationships.len() as u32;
if relationship_depth > max_depth {
max_depth = relationship_depth;
}
}
let file_count = file_count.len() as u32;
let avg_functions_per_file = if file_count > 0 {
function_count as f64 / file_count as f64
} else {
0.0
};
let avg_lines_per_concept = if !concepts.is_empty() {
total_lines as f64 / concepts.len() as f64
} else {
0.0
};
ComplexityMetrics {
cyclomatic_complexity: Self::estimate_cyclomatic_complexity(concepts),
cognitive_complexity: Self::estimate_cognitive_complexity(concepts),
function_count,
class_count,
file_count,
avg_functions_per_file,
avg_lines_per_concept,
max_nesting_depth: max_depth,
}
}
/// Estimate cyclomatic complexity based on concept analysis
fn estimate_cyclomatic_complexity(concepts: &Vec<SemanticConcept>) -> f64 {
let mut total_complexity = 0.0;
let mut function_count = 0;
for concept in concepts {
if concept.concept_type == "function" || concept.concept_type == "method" {
function_count += 1;
// Base complexity of 1 for each function
let mut complexity = 1.0;
// Add complexity based on metadata patterns
if let Some(body) = concept.metadata.get("body") {
complexity += Self::count_decision_points(body);
}
// Factor in confidence - lower confidence might indicate more complex code
complexity *= 2.0 - concept.confidence;
total_complexity += complexity;
}
}
if function_count > 0 {
total_complexity / function_count as f64
} else {
1.0
}
}
/// Estimate cognitive complexity based on nesting and control flow
fn estimate_cognitive_complexity(concepts: &Vec<SemanticConcept>) -> f64 {
let mut total_cognitive = 0.0;
let mut function_count = 0;
for concept in concepts {
if concept.concept_type == "function" || concept.concept_type == "method" {
function_count += 1;
let mut cognitive = 0.0;
// Base cognitive load
cognitive += 1.0;
// Add load based on relationships (dependencies increase cognitive load)
cognitive += concept.relationships.len() as f64 * 0.5;
// Add load based on line span (longer functions are harder to understand)
let line_span = concept.line_range.end - concept.line_range.start;
if line_span > 20 {
cognitive += (line_span as f64 / 20.0) * 0.3;
}
total_cognitive += cognitive;
}
}
if function_count > 0 {
total_cognitive / function_count as f64
} else {
1.0
}
}
/// Count decision points in code (simplified heuristic)
fn count_decision_points(body: &str) -> f64 {
let mut count = 0.0;
// Look for common control flow keywords
for keyword in &["if", "while", "for", "switch", "case", "catch", "&&", "||"] {
count += body.matches(keyword).count() as f64;
}
// Ternary operators
count += body.matches('?').count() as f64;
count
}
}
impl Default for ComplexityAnalyzer {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::types::LineRange;
use std::collections::HashMap;
fn create_test_concept(name: &str, concept_type: &str, file_path: &str, start: u32, end: u32) -> SemanticConcept {
SemanticConcept {
id: format!("test_{}", name),
name: name.to_string(),
concept_type: concept_type.to_string(),
confidence: 0.8,
file_path: file_path.to_string(),
line_range: LineRange { start, end },
relationships: HashMap::new(),
metadata: HashMap::new(),
}
}
#[test]
fn test_complexity_analyzer_creation() {
let _analyzer = ComplexityAnalyzer::new();
// Constructor should work
}
#[test]
fn test_calculate_basic_complexity() {
let concepts = vec![
create_test_concept("test_function", "function", "test.rs", 1, 10),
create_test_concept("TestClass", "class", "test.rs", 15, 30),
];
let metrics = ComplexityAnalyzer::calculate_complexity(&concepts);
assert_eq!(metrics.function_count, 1);
assert_eq!(metrics.class_count, 1);
assert_eq!(metrics.file_count, 1);
assert!(metrics.cyclomatic_complexity > 0.0);
assert!(metrics.cognitive_complexity > 0.0);
}
#[test]
fn test_empty_concepts() {
let concepts = vec![];
let metrics = ComplexityAnalyzer::calculate_complexity(&concepts);
assert_eq!(metrics.function_count, 0);
assert_eq!(metrics.class_count, 0);
assert_eq!(metrics.file_count, 0);
assert_eq!(metrics.avg_functions_per_file, 0.0);
assert_eq!(metrics.avg_lines_per_concept, 0.0);
}
#[test]
fn test_multiple_files() {
let concepts = vec![
create_test_concept("func1", "function", "file1.rs", 1, 10),
create_test_concept("func2", "function", "file2.rs", 1, 15),
create_test_concept("Class1", "class", "file1.rs", 20, 40),
];
let metrics = ComplexityAnalyzer::calculate_complexity(&concepts);
assert_eq!(metrics.function_count, 2);
assert_eq!(metrics.class_count, 1);
assert_eq!(metrics.file_count, 2);
assert_eq!(metrics.avg_functions_per_file, 1.0);
}
#[test]
fn test_complex_function_with_metadata() {
let mut concept = create_test_concept("complex_func", "function", "test.rs", 1, 50);
concept.metadata.insert("body".to_string(), "if (x > 0) { while (y < 10) { if (z) return; } }".to_string());
let concepts = vec![concept];
let metrics = ComplexityAnalyzer::calculate_complexity(&concepts);
// Should have higher complexity due to control flow
assert!(metrics.cyclomatic_complexity > 1.0);
assert!(metrics.cognitive_complexity > 1.0);
}
#[test]
fn test_count_decision_points() {
let body1 = "if (x > 0) return x;";
assert_eq!(ComplexityAnalyzer::count_decision_points(body1), 1.0);
let body2 = "if (x > 0 && y < 5) { while (z) { for (i = 0; i < 10; i++) {} } }";
assert!(ComplexityAnalyzer::count_decision_points(body2) >= 4.0);
let body3 = "return x > 0 ? x : -x;";
assert!(ComplexityAnalyzer::count_decision_points(body3) >= 1.0);
}
#[test]
fn test_relationships_impact_complexity() {
let mut concept = create_test_concept("connected_func", "function", "test.rs", 1, 20);
concept.relationships.insert("calls".to_string(), "other_func".to_string());
concept.relationships.insert("uses".to_string(), "SomeClass".to_string());
let concepts = vec![concept];
let metrics = ComplexityAnalyzer::calculate_complexity(&concepts);
// Relationships should increase cognitive complexity
assert!(metrics.cognitive_complexity > 1.0);
assert_eq!(metrics.max_nesting_depth, 2);
}
} | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/analysis/blueprint.rs | rust-core/src/analysis/blueprint.rs | //! Project blueprint analysis - entry points and feature mapping
#[cfg(feature = "napi-bindings")]
use napi_derive::napi;
use crate::types::ParseError;
use crate::analysis::FrameworkInfo;
use std::path::Path;
use std::fs;
/// Entry point information
#[derive(Debug, Clone)]
#[cfg_attr(feature = "napi-bindings", napi(object))]
pub struct EntryPoint {
pub entry_type: String, // 'web', 'api', 'cli', 'script'
pub file_path: String,
pub framework: Option<String>,
pub confidence: f64,
}
/// Key directory information
#[derive(Debug, Clone)]
#[cfg_attr(feature = "napi-bindings", napi(object))]
pub struct KeyDirectory {
pub path: String,
pub dir_type: String, // 'components', 'utils', 'services', etc.
pub file_count: u32,
}
/// Feature mapping information
#[derive(Debug, Clone)]
#[cfg_attr(feature = "napi-bindings", napi(object))]
pub struct FeatureMap {
pub id: String,
pub feature_name: String,
pub primary_files: Vec<String>,
pub related_files: Vec<String>,
pub dependencies: Vec<String>,
}
/// Blueprint analyzer for detecting project structure
#[cfg_attr(feature = "napi-bindings", napi)]
pub struct BlueprintAnalyzer;
impl Default for BlueprintAnalyzer {
fn default() -> Self {
Self::new()
}
}
#[cfg_attr(feature = "napi-bindings", napi)]
impl BlueprintAnalyzer {
#[cfg_attr(feature = "napi-bindings", napi(constructor))]
pub fn new() -> Self {
BlueprintAnalyzer
}
/// Detect entry points using AST-based analysis and pattern matching
#[cfg_attr(feature = "napi-bindings", napi)]
pub async fn detect_entry_points(
path: String,
frameworks: Vec<FrameworkInfo>,
) -> Result<Vec<EntryPoint>, ParseError> {
let mut entry_points = Vec::new();
let framework_names: Vec<String> = frameworks.iter().map(|f| f.name.clone()).collect();
// Check for common entry point file patterns first
Self::check_entry_point_patterns(&path, &framework_names, &mut entry_points)?;
// TODO: For v2, add AST-based detection to find programmatic entry points
// (main functions, app.listen() calls, etc.)
Ok(entry_points)
}
/// Check common entry point file patterns
fn check_entry_point_patterns(
project_path: &str,
frameworks: &[String],
entry_points: &mut Vec<EntryPoint>,
) -> Result<(), ParseError> {
let path = Path::new(project_path);
// React/Next.js entry points
if frameworks.iter().any(|f| {
let lower = f.to_lowercase();
lower.contains("react") || lower.contains("next")
}) {
let react_entries = vec![
"src/index.tsx", "src/index.jsx",
"src/App.tsx", "src/App.jsx",
"pages/_app.tsx", "pages/_app.js",
"app/page.tsx", "app/layout.tsx" // Next.js 13+
];
for entry in react_entries {
let full_path = path.join(entry);
if full_path.exists() {
entry_points.push(EntryPoint {
entry_type: "web".to_string(),
file_path: entry.to_string(),
framework: Some("react".to_string()),
confidence: 0.9,
});
}
}
}
// Express/Node API entry points
if frameworks.iter().any(|f| {
let lower = f.to_lowercase();
lower.contains("express") || lower.contains("node")
}) {
let api_entries = vec![
"server.js", "app.js", "index.js",
"src/server.ts", "src/app.ts", "src/index.ts",
"src/main.ts"
];
for entry in api_entries {
let full_path = path.join(entry);
if full_path.exists() {
entry_points.push(EntryPoint {
entry_type: "api".to_string(),
file_path: entry.to_string(),
framework: Some("express".to_string()),
confidence: 0.85,
});
}
}
}
// Python entry points
if frameworks.iter().any(|f| {
let lower = f.to_lowercase();
lower.contains("python") || lower.contains("fastapi") || lower.contains("flask") || lower.contains("django")
}) {
let python_entries = vec![
"main.py", "app.py", "server.py",
"api/main.py", "src/main.py",
"manage.py" // Django
];
for entry in python_entries {
let full_path = path.join(entry);
if full_path.exists() {
let framework_hint = if frameworks.iter().any(|f| f.to_lowercase().contains("fastapi")) {
Some("fastapi".to_string())
} else if frameworks.iter().any(|f| f.to_lowercase().contains("flask")) {
Some("flask".to_string())
} else if frameworks.iter().any(|f| f.to_lowercase().contains("django")) {
Some("django".to_string())
} else {
Some("python".to_string())
};
entry_points.push(EntryPoint {
entry_type: "api".to_string(),
file_path: entry.to_string(),
framework: framework_hint,
confidence: 0.85,
});
}
}
}
// Rust entry points
if frameworks.iter().any(|f| f.to_lowercase().contains("rust")) {
let rust_entries = vec!["src/main.rs", "src/lib.rs"];
for entry in rust_entries {
let full_path = path.join(entry);
if full_path.exists() {
let entry_type = if entry.contains("main") { "cli" } else { "library" };
entry_points.push(EntryPoint {
entry_type: entry_type.to_string(),
file_path: entry.to_string(),
framework: Some("rust".to_string()),
confidence: 0.95,
});
}
}
}
// Go entry points
if frameworks.iter().any(|f| f.to_lowercase().contains("go")) {
let go_entries = vec!["main.go", "cmd/main.go", "cmd/server/main.go"];
for entry in go_entries {
let full_path = path.join(entry);
if full_path.exists() {
entry_points.push(EntryPoint {
entry_type: "api".to_string(),
file_path: entry.to_string(),
framework: Some("go".to_string()),
confidence: 0.9,
});
}
}
}
// CLI entry points (language-agnostic)
let cli_entries = vec!["cli.js", "bin/cli.js", "src/cli.ts", "src/cli.js"];
for entry in cli_entries {
let full_path = path.join(entry);
if full_path.exists() {
entry_points.push(EntryPoint {
entry_type: "cli".to_string(),
file_path: entry.to_string(),
framework: None,
confidence: 0.8,
});
}
}
Ok(())
}
/// Map key directories in the project
#[cfg_attr(feature = "napi-bindings", napi)]
pub async fn map_key_directories(path: String) -> Result<Vec<KeyDirectory>, ParseError> {
let mut key_dirs = Vec::new();
let project_path = Path::new(&path);
let common_dirs = vec![
("src/components", "components"),
("src/utils", "utils"),
("src/services", "services"),
("src/api", "api"),
("src/auth", "auth"),
("src/models", "models"),
("src/views", "views"),
("src/pages", "pages"),
("src/lib", "library"),
("lib", "library"),
("utils", "utils"),
("middleware", "middleware"),
("routes", "routes"),
("controllers", "controllers"),
];
for (dir_pattern, dir_type) in common_dirs {
let full_path = project_path.join(dir_pattern);
if full_path.exists() && full_path.is_dir() {
let file_count = Self::count_files_in_directory(&full_path, 5, 0)?;
key_dirs.push(KeyDirectory {
path: dir_pattern.to_string(),
dir_type: dir_type.to_string(),
file_count,
});
}
}
Ok(key_dirs)
}
/// Build feature map for the project
#[cfg_attr(feature = "napi-bindings", napi)]
pub async fn build_feature_map(path: String) -> Result<Vec<FeatureMap>, ParseError> {
let mut feature_maps = Vec::new();
let project_path = Path::new(&path);
let feature_patterns: Vec<(&str, Vec<&str>)> = vec![
("authentication", vec!["auth", "authentication"]),
("api", vec!["api", "routes", "endpoints", "controllers"]),
("database", vec!["db", "database", "models", "schemas", "migrations", "storage"]),
("ui-components", vec!["components", "ui"]),
("views", vec!["views", "pages", "screens"]),
("services", vec!["services", "api-clients"]),
("utilities", vec!["utils", "helpers", "lib"]),
("testing", vec!["tests", "__tests__", "test"]),
("configuration", vec!["config", ".config", "settings"]),
("middleware", vec!["middleware", "middlewares"]),
// Language/compiler-specific features for In-Memoria
("language-support", vec!["parsing", "parser", "ast", "tree-sitter", "compiler"]),
("rust-core", vec!["rust-core", "native", "bindings"]),
("mcp-server", vec!["mcp-server", "server", "mcp"]),
("cli", vec!["cli", "bin", "commands"]),
];
for (feature_name, directories) in feature_patterns {
let mut primary_files = Vec::new();
let mut related_files = Vec::new();
for dir in &directories {
// Standard paths
let src_path = project_path.join("src").join(dir);
let alt_path = project_path.join(dir);
// Nested paths for mono-repo/multi-module projects
let rust_core_src_path = project_path.join("rust-core").join("src").join(dir);
let rust_core_path = project_path.join("rust-core").join(dir);
for check_path in &[src_path, alt_path, rust_core_src_path, rust_core_path] {
if check_path.exists() && check_path.is_dir() {
let files = Self::collect_files_in_directory(check_path, project_path, 5, 0)?;
if !files.is_empty() {
let mid_point = files.len().div_ceil(2);
primary_files.extend_from_slice(&files[0..mid_point]);
if mid_point < files.len() {
related_files.extend_from_slice(&files[mid_point..]);
}
}
}
}
}
if !primary_files.is_empty() {
// Deduplicate
primary_files.sort();
primary_files.dedup();
related_files.sort();
related_files.dedup();
feature_maps.push(FeatureMap {
id: uuid::Uuid::new_v4().to_string(),
feature_name: feature_name.to_string(),
primary_files,
related_files,
dependencies: Vec::new(),
});
}
}
Ok(feature_maps)
}
/// Count files in directory with depth limit
fn count_files_in_directory(dir_path: &Path, max_depth: u32, current_depth: u32) -> Result<u32, ParseError> {
if current_depth >= max_depth {
return Ok(0);
}
let mut count = 0;
let entries = fs::read_dir(dir_path).map_err(|e| ParseError::from_reason(format!("Failed to read directory: {}", e)))?;
for entry in entries.flatten() {
let path = entry.path();
let file_name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
// Skip common ignore patterns
if ["node_modules", ".git", "dist", "build", ".next", "__pycache__", "venv", "target"].contains(&file_name) {
continue;
}
if path.is_dir() {
count += Self::count_files_in_directory(&path, max_depth, current_depth + 1)?;
} else if path.is_file() {
count += 1;
}
}
Ok(count)
}
/// Collect files from directory with depth limit
fn collect_files_in_directory(
dir_path: &Path,
project_root: &Path,
max_depth: u32,
current_depth: u32,
) -> Result<Vec<String>, ParseError> {
if current_depth >= max_depth {
return Ok(Vec::new());
}
let mut files = Vec::new();
let entries = fs::read_dir(dir_path).map_err(|e| ParseError::from_reason(format!("Failed to read directory: {}", e)))?;
for entry in entries.flatten() {
let path = entry.path();
let file_name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
// Skip common ignore patterns
if ["node_modules", ".git", "dist", "build", ".next", "__pycache__", "venv", "target"].contains(&file_name) {
continue;
}
if path.is_dir() {
let nested = Self::collect_files_in_directory(&path, project_root, max_depth, current_depth + 1)?;
files.extend(nested);
} else if path.is_file() {
// Only include source code files
if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
if ["ts", "tsx", "js", "jsx", "py", "rs", "go", "java", "c", "cpp", "cs"].contains(&ext) {
if let Ok(relative) = path.strip_prefix(project_root) {
files.push(relative.to_string_lossy().to_string());
}
}
}
}
}
Ok(files)
}
}
| rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/analysis/semantic.rs | rust-core/src/analysis/semantic.rs | //! Main semantic analysis orchestration
#[cfg(feature = "napi-bindings")]
use napi_derive::napi;
use crate::types::{SemanticConcept, CodebaseAnalysisResult, ParseError, AnalysisConfig};
use crate::parsing::{ParserManager, FallbackExtractor, TreeWalker};
use crate::extractors::*;
use crate::analysis::{ComplexityAnalyzer, RelationshipLearner, FrameworkDetector};
use std::collections::HashMap;
use walkdir::WalkDir;
use std::fs;
/// Main semantic analyzer that orchestrates concept extraction across languages
#[cfg_attr(feature = "napi-bindings", napi)]
pub struct SemanticAnalyzer {
parser_manager: ParserManager,
config: AnalysisConfig,
concepts: HashMap<String, SemanticConcept>,
relationships: HashMap<String, Vec<String>>,
}
#[cfg_attr(feature = "napi-bindings", napi)]
impl SemanticAnalyzer {
#[cfg_attr(feature = "napi-bindings", napi(constructor))]
pub fn new() -> Result<Self, ParseError> {
Ok(SemanticAnalyzer {
parser_manager: ParserManager::new()?,
config: AnalysisConfig::default(),
concepts: HashMap::new(),
relationships: HashMap::new(),
})
}
/// Analyzes an entire codebase for semantic concepts and patterns
///
/// # Safety
/// This function is marked unsafe for NAPI compatibility. It performs file system operations
/// and language parsing that are inherently safe but marked unsafe for JavaScript interop.
#[cfg_attr(feature = "napi-bindings", napi)]
pub async unsafe fn analyze_codebase(
&mut self,
path: String,
) -> Result<CodebaseAnalysisResult, ParseError> {
let languages = self.detect_languages(&path).await?;
let framework_info = FrameworkDetector::detect_frameworks(path.clone()).await?;
let frameworks: Vec<String> = framework_info.into_iter().map(|f| f.name).collect();
let concepts = self.extract_concepts(&path).await?;
let complexity = ComplexityAnalyzer::calculate_complexity(&concepts);
Ok(CodebaseAnalysisResult {
languages,
frameworks,
complexity,
concepts,
})
}
/// Analyzes the content of a specific file for semantic concepts
///
/// # Safety
/// This function is marked unsafe for NAPI compatibility. It performs language parsing
/// operations that are inherently safe but marked unsafe for JavaScript interop.
#[cfg_attr(feature = "napi-bindings", napi)]
pub async unsafe fn analyze_file_content(
&mut self,
file_path: String,
content: String,
) -> Result<Vec<SemanticConcept>, ParseError> {
let language = self.config.detect_language_from_path(&file_path);
let concepts = match self
.parse_file_content(&file_path, &content, &language)
.await
{
Ok(tree_concepts) => tree_concepts,
Err(_) => {
// Fallback to pattern-based extraction for unsupported languages
FallbackExtractor::new().extract_concepts(&file_path, &content)
}
};
// Store concepts for relationship analysis
for concept in &concepts {
self.concepts.insert(concept.id.clone(), concept.clone());
}
Ok(concepts)
}
/// Learns semantic concepts from analyzing an entire codebase
///
/// # Safety
/// This function is marked unsafe for NAPI compatibility. It performs file system operations
/// and language parsing that are inherently safe but marked unsafe for JavaScript interop.
#[cfg_attr(feature = "napi-bindings", napi)]
pub async unsafe fn learn_from_codebase(
&mut self,
path: String,
) -> Result<Vec<SemanticConcept>, ParseError> {
// Add overall timeout for the entire learning process (5 minutes)
let learning_result = match tokio::time::timeout(
tokio::time::Duration::from_secs(300),
self.extract_concepts(&path)
).await {
Ok(concepts_result) => concepts_result?,
Err(_timeout) => {
eprintln!("Learning process timed out after 5 minutes");
return Err(ParseError::from_reason(
"Learning process timed out. This can happen with very large codebases or complex file structures."
));
}
};
// Learn relationships between concepts
RelationshipLearner::learn_concept_relationships(&learning_result, &mut self.relationships);
// Update internal knowledge
for concept in &learning_result {
self.concepts.insert(concept.id.clone(), concept.clone());
}
Ok(learning_result)
}
/// Updates the analyzer's internal state from analysis data (from original implementation)
///
/// # Safety
/// This function uses unsafe because it needs to interact with the Node.js runtime
/// through N-API bindings. The caller must ensure the analysis data is valid JSON.
#[cfg_attr(feature = "napi-bindings", napi)]
pub async unsafe fn update_from_analysis(
&mut self,
_analysis_data: String,
) -> Result<bool, ParseError> {
// Parse analysis data and update internal state
// This would typically be called when file changes are detected
Ok(true)
}
/// Get concept relationships for a specific concept ID (from original implementation)
#[cfg_attr(feature = "napi-bindings", napi)]
pub fn get_concept_relationships(&self, concept_id: String) -> Result<Vec<String>, ParseError> {
Ok(self
.relationships
.get(&concept_id)
.cloned()
.unwrap_or_default())
}
/// Parse file content with tree-sitter and extract concepts
pub async fn parse_file_content(
&mut self,
file_path: &str,
content: &str,
language: &str,
) -> Result<Vec<SemanticConcept>, ParseError> {
// Add per-file timeout protection
let parsing_result = tokio::time::timeout(
tokio::time::Duration::from_secs(30), // 30 second timeout per file
self.parse_file_with_language(file_path, content, language)
).await;
match parsing_result {
Ok(result) => result,
Err(_timeout) => {
eprintln!("Timeout parsing {}, using fallback", file_path);
Ok(FallbackExtractor::new().extract_concepts(file_path, content))
}
}
}
/// Internal parsing with specific language
async fn parse_file_with_language(
&mut self,
file_path: &str,
content: &str,
language: &str,
) -> Result<Vec<SemanticConcept>, ParseError> {
let tree = self.parser_manager.parse(content, language)?;
let mut concepts = Vec::new();
// Use language-specific extraction
match language {
"typescript" | "javascript" => {
let extractor = TypeScriptExtractor::new();
self.walk_and_extract(tree.root_node(), file_path, content, &extractor, &mut concepts)?;
}
"rust" => {
let extractor = RustExtractor::new();
self.walk_and_extract(tree.root_node(), file_path, content, &extractor, &mut concepts)?;
}
"python" => {
let extractor = PythonExtractor::new();
self.walk_and_extract(tree.root_node(), file_path, content, &extractor, &mut concepts)?;
}
"php" => {
let extractor = PhpExtractor::new();
self.walk_and_extract(tree.root_node(), file_path, content, &extractor, &mut concepts)?;
}
"sql" => {
let extractor = SqlExtractor::new();
self.walk_and_extract(tree.root_node(), file_path, content, &extractor, &mut concepts)?;
}
"go" => {
let extractor = GoExtractor::new();
self.walk_and_extract(tree.root_node(), file_path, content, &extractor, &mut concepts)?;
}
"java" => {
let extractor = JavaExtractor::new();
self.walk_and_extract(tree.root_node(), file_path, content, &extractor, &mut concepts)?;
}
"cpp" | "c" => {
let extractor = CppExtractor::new();
self.walk_and_extract(tree.root_node(), file_path, content, &extractor, &mut concepts)?;
}
"csharp" => {
let extractor = CSharpExtractor::new();
self.walk_and_extract(tree.root_node(), file_path, content, &extractor, &mut concepts)?;
}
"svelte" => {
let extractor = SvelteExtractor::new();
self.walk_and_extract(tree.root_node(), file_path, content, &extractor, &mut concepts)?;
}
_ => {
let extractor = GenericExtractor::new();
self.walk_and_extract(tree.root_node(), file_path, content, &extractor, &mut concepts)?;
}
}
Ok(concepts)
}
/// Walk tree and extract concepts using a specific extractor
fn walk_and_extract<T>(
&self,
node: tree_sitter::Node<'_>,
file_path: &str,
content: &str,
extractor: &T,
concepts: &mut Vec<SemanticConcept>,
) -> Result<(), ParseError>
where
T: HasExtractConcepts,
{
let walker = TreeWalker::default();
walker.walk(node, &mut |node| {
extractor.extract_concepts(node, file_path, content, concepts)
.map_err(|e| format!("Extraction error: {}", e))
}).map_err(ParseError::from_reason)?;
Ok(())
}
/// Extract concepts from entire codebase
async fn extract_concepts(&mut self, path: &str) -> Result<Vec<SemanticConcept>, ParseError> {
let mut all_concepts = Vec::new();
let mut processed_count = 0;
let debug_enabled = std::env::var("IN_MEMORIA_DEBUG").is_ok();
for entry in WalkDir::new(path).into_iter().filter_map(|e| e.ok()) {
if entry.file_type().is_file() {
let file_path = entry.path();
if debug_enabled {
eprintln!("[DEBUG] entry {}", file_path.display());
}
if self.config.should_analyze_file(file_path) {
if debug_enabled {
eprintln!("[DEBUG] processing file {}", file_path.display());
}
processed_count += 1;
// Prevent processing too many files
if processed_count > self.config.max_files {
eprintln!("Warning: Reached maximum file limit ({}), stopping analysis", self.config.max_files);
break;
}
match fs::read_to_string(file_path) {
Ok(content) => {
let language = self.config.detect_language_from_path(
file_path.to_str().unwrap_or(""));
match self.parse_file_content(
file_path.to_str().unwrap_or(""),
&content,
&language,
).await {
Ok(mut concepts) => {
all_concepts.append(&mut concepts);
}
Err(_) => {
// Fallback to regex-based extraction if tree-sitter fails
eprintln!("Tree-sitter parsing failed for {}, using fallback", file_path.display());
let fallback_concepts = FallbackExtractor::new()
.extract_concepts(
file_path.to_str().unwrap_or(""),
&content,
);
all_concepts.extend(fallback_concepts);
}
};
}
Err(_) => {
// Skip files that can't be read
continue;
}
}
} else if debug_enabled {
eprintln!("[DEBUG] skipped file {}", file_path.display());
}
}
}
eprintln!("Processed {} source files and found {} concepts", processed_count, all_concepts.len());
Ok(all_concepts)
}
/// Detect programming languages in codebase
async fn detect_languages(&self, path: &str) -> Result<Vec<String>, ParseError> {
let mut languages = std::collections::HashSet::new();
for entry in WalkDir::new(path).into_iter().filter_map(|e| e.ok()) {
if entry.file_type().is_file() {
let file_path = entry.path();
if let Some(extension) = file_path.extension().and_then(|s| s.to_str()) {
let language = match extension.to_lowercase().as_str() {
"ts" | "tsx" => Some("typescript"),
"js" | "jsx" => Some("javascript"),
"rs" => Some("rust"),
"py" => Some("python"),
"php" | "phtml" | "inc" => Some("php"),
"sql" => Some("sql"),
"go" => Some("go"),
"java" => Some("java"),
"c" => Some("c"),
"cpp" | "cc" | "cxx" => Some("cpp"),
"cs" => Some("csharp"),
"svelte" => Some("svelte"),
"vue" => Some("javascript"), // Fallback to JS for Vue
_ => None,
};
if let Some(lang) = language {
languages.insert(lang.to_string());
}
}
}
}
Ok(languages.into_iter().collect())
}
}
/// Trait for extractors that can extract concepts from nodes
pub trait HasExtractConcepts {
fn extract_concepts(
&self,
node: tree_sitter::Node<'_>,
file_path: &str,
content: &str,
concepts: &mut Vec<SemanticConcept>,
) -> Result<(), ParseError>;
}
// Implement the trait for all extractors
impl HasExtractConcepts for TypeScriptExtractor {
fn extract_concepts(&self, node: tree_sitter::Node<'_>, file_path: &str, content: &str, concepts: &mut Vec<SemanticConcept>) -> Result<(), ParseError> {
TypeScriptExtractor::extract_concepts(self, node, file_path, content, concepts)
}
}
impl HasExtractConcepts for RustExtractor {
fn extract_concepts(&self, node: tree_sitter::Node<'_>, file_path: &str, content: &str, concepts: &mut Vec<SemanticConcept>) -> Result<(), ParseError> {
RustExtractor::extract_concepts(self, node, file_path, content, concepts)
}
}
impl HasExtractConcepts for PythonExtractor {
fn extract_concepts(&self, node: tree_sitter::Node<'_>, file_path: &str, content: &str, concepts: &mut Vec<SemanticConcept>) -> Result<(), ParseError> {
PythonExtractor::extract_concepts(self, node, file_path, content, concepts)
}
}
impl HasExtractConcepts for PhpExtractor {
fn extract_concepts(&self, node: tree_sitter::Node<'_>, file_path: &str, content: &str, concepts: &mut Vec<SemanticConcept>) -> Result<(), ParseError> {
PhpExtractor::extract_concepts(self, node, file_path, content, concepts)
}
}
impl HasExtractConcepts for SqlExtractor {
fn extract_concepts(&self, node: tree_sitter::Node<'_>, file_path: &str, content: &str, concepts: &mut Vec<SemanticConcept>) -> Result<(), ParseError> {
SqlExtractor::extract_concepts(self, node, file_path, content, concepts)
}
}
impl HasExtractConcepts for GoExtractor {
fn extract_concepts(&self, node: tree_sitter::Node<'_>, file_path: &str, content: &str, concepts: &mut Vec<SemanticConcept>) -> Result<(), ParseError> {
GoExtractor::extract_concepts(self, node, file_path, content, concepts)
}
}
impl HasExtractConcepts for JavaExtractor {
fn extract_concepts(&self, node: tree_sitter::Node<'_>, file_path: &str, content: &str, concepts: &mut Vec<SemanticConcept>) -> Result<(), ParseError> {
JavaExtractor::extract_concepts(self, node, file_path, content, concepts)
}
}
impl HasExtractConcepts for CppExtractor {
fn extract_concepts(&self, node: tree_sitter::Node<'_>, file_path: &str, content: &str, concepts: &mut Vec<SemanticConcept>) -> Result<(), ParseError> {
CppExtractor::extract_concepts(self, node, file_path, content, concepts)
}
}
impl HasExtractConcepts for CSharpExtractor {
fn extract_concepts(&self, node: tree_sitter::Node<'_>, file_path: &str, content: &str, concepts: &mut Vec<SemanticConcept>) -> Result<(), ParseError> {
CSharpExtractor::extract_concepts(self, node, file_path, content, concepts)
}
}
impl HasExtractConcepts for SvelteExtractor {
fn extract_concepts(&self, node: tree_sitter::Node<'_>, file_path: &str, content: &str, concepts: &mut Vec<SemanticConcept>) -> Result<(), ParseError> {
SvelteExtractor::extract_concepts(self, node, file_path, content, concepts)
}
}
impl HasExtractConcepts for GenericExtractor {
fn extract_concepts(&self, node: tree_sitter::Node<'_>, file_path: &str, content: &str, concepts: &mut Vec<SemanticConcept>) -> Result<(), ParseError> {
GenericExtractor::extract_concepts(self, node, file_path, content, concepts)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_semantic_analyzer_creation() {
let analyzer = SemanticAnalyzer::new();
assert!(analyzer.is_ok());
let analyzer = analyzer.unwrap();
assert!(analyzer.parser_manager.supports_language("typescript"));
assert!(analyzer.parser_manager.supports_language("javascript"));
assert!(analyzer.parser_manager.supports_language("rust"));
assert!(analyzer.parser_manager.supports_language("python"));
}
#[tokio::test]
async fn test_typescript_class_parsing() {
let mut analyzer = SemanticAnalyzer::new().unwrap();
let content = "export class UserService { getName() { return 'test'; } }";
println!("🔍 Testing TypeScript class parsing...");
println!("Content: {}", content);
let result = unsafe {
analyzer.analyze_file_content("test.ts".to_string(), content.to_string()).await
};
match result {
Ok(concepts) => {
println!("✅ Parsing succeeded! Found {} concepts:", concepts.len());
for concept in &concepts {
println!(" - {} ({})", concept.name, concept.concept_type);
}
assert!(!concepts.is_empty(), "Should find at least one concept");
}
Err(e) => {
println!("❌ Parsing failed: {}", e);
panic!("TypeScript parsing should succeed, but failed with: {}", e);
}
}
}
#[tokio::test]
async fn test_javascript_function_parsing() {
let mut analyzer = SemanticAnalyzer::new().unwrap();
let content = "function hello() { return 'world'; }";
println!("🔍 Testing JavaScript function parsing...");
println!("Content: {}", content);
let result = unsafe {
analyzer.analyze_file_content("test.js".to_string(), content.to_string()).await
};
match result {
Ok(concepts) => {
println!("✅ Parsing succeeded! Found {} concepts:", concepts.len());
for concept in &concepts {
println!(" - {} ({})", concept.name, concept.concept_type);
}
assert!(!concepts.is_empty(), "Should find at least one concept");
}
Err(e) => {
println!("❌ Parsing failed: {}", e);
panic!("JavaScript parsing should succeed, but failed with: {}", e);
}
}
}
#[tokio::test]
async fn test_python_class_parsing() {
let mut analyzer = SemanticAnalyzer::new().unwrap();
let content = "class User:\n def __init__(self):\n pass";
println!("🔍 Testing Python class parsing...");
println!("Content: {}", content);
let result = unsafe {
analyzer.analyze_file_content("test.py".to_string(), content.to_string()).await
};
match result {
Ok(concepts) => {
println!("✅ Parsing succeeded! Found {} concepts:", concepts.len());
for concept in &concepts {
println!(" - {} ({})", concept.name, concept.concept_type);
}
assert!(!concepts.is_empty(), "Should find at least one concept");
}
Err(e) => {
println!("❌ Parsing failed: {}", e);
panic!("Python parsing should succeed, but failed with: {}", e);
}
}
}
#[tokio::test]
async fn test_rust_struct_parsing() {
let mut analyzer = SemanticAnalyzer::new().unwrap();
let content = "pub struct User { name: String }";
println!("🔍 Testing Rust struct parsing...");
println!("Content: {}", content);
let result = unsafe {
analyzer.analyze_file_content("test.rs".to_string(), content.to_string()).await
};
match result {
Ok(concepts) => {
println!("✅ Parsing succeeded! Found {} concepts:", concepts.len());
for concept in &concepts {
println!(" - {} ({})", concept.name, concept.concept_type);
}
assert!(!concepts.is_empty(), "Should find at least one concept");
}
Err(e) => {
println!("❌ Parsing failed: {}", e);
panic!("Rust parsing should succeed, but failed with: {}", e);
}
}
}
#[tokio::test]
async fn test_learn_from_codebase() {
let mut analyzer = SemanticAnalyzer::new().unwrap();
let result = unsafe {
analyzer.learn_from_codebase(".".to_string()).await
};
assert!(result.is_ok());
let concepts = result.unwrap();
println!("Learned {} concepts from codebase", concepts.len());
// Should find some concepts in the current Rust codebase
assert!(!concepts.is_empty());
}
#[tokio::test]
async fn test_update_from_analysis() {
let mut analyzer = SemanticAnalyzer::new().unwrap();
let analysis_data = r#"{"patterns": [], "concepts": []}"#.to_string();
let result = unsafe {
analyzer.update_from_analysis(analysis_data).await
};
assert!(result.is_ok());
assert!(result.unwrap());
}
#[test]
fn test_get_concept_relationships() {
let analyzer = SemanticAnalyzer::new().unwrap();
let result = analyzer.get_concept_relationships("nonexistent".to_string());
assert!(result.is_ok());
assert_eq!(result.unwrap().len(), 0);
}
#[tokio::test]
async fn test_detect_languages() {
let analyzer = SemanticAnalyzer::new().unwrap();
let result = analyzer.detect_languages(".").await;
assert!(result.is_ok());
let languages = result.unwrap();
println!("Detected languages: {:?}", languages);
// Should detect Rust in the current codebase
assert!(languages.contains(&"rust".to_string()));
}
#[tokio::test]
async fn test_analyze_codebase_structure() {
let mut analyzer = SemanticAnalyzer::new().unwrap();
let result = unsafe {
analyzer.analyze_codebase(".".to_string()).await
};
assert!(result.is_ok());
let analysis = result.unwrap();
println!("Analysis result:");
println!("- Languages: {:?}", analysis.languages);
println!("- Frameworks: {:?}", analysis.frameworks);
println!("- Concepts: {}", analysis.concepts.len());
println!("- Complexity: {:?}", analysis.complexity);
assert!(!analysis.languages.is_empty());
assert!(!analysis.concepts.is_empty());
assert!(analysis.complexity.file_count > 0);
}
#[tokio::test]
async fn test_analyze_simple_typescript() {
let mut analyzer = SemanticAnalyzer::new().unwrap();
let code = "function test() { return 42; }";
let result = unsafe {
analyzer.analyze_file_content("test.ts".to_string(), code.to_string()).await
};
assert!(result.is_ok());
let concepts = result.unwrap();
assert!(!concepts.is_empty());
// Verify concept properties
let concept = &concepts[0];
assert!(!concept.name.is_empty());
assert!(!concept.concept_type.is_empty());
assert!(concept.confidence > 0.0);
assert!(concept.confidence <= 1.0);
assert_eq!(concept.file_path, "test.ts");
assert!(concept.line_range.start > 0);
assert!(concept.line_range.end >= concept.line_range.start);
}
#[tokio::test]
async fn test_new_language_support() {
let mut analyzer = SemanticAnalyzer::new().unwrap();
// Test SQL
let sql_content = "CREATE TABLE users (id INTEGER PRIMARY KEY, name VARCHAR(255));";
println!("🔍 Testing SQL parsing...");
let sql_result = unsafe {
analyzer.analyze_file_content("test.sql".to_string(), sql_content.to_string()).await
};
match &sql_result {
Ok(concepts) => println!("✅ SQL: Found {} concepts", concepts.len()),
Err(e) => println!("❌ SQL failed: {}", e),
}
assert!(sql_result.is_ok(), "SQL parsing should succeed: {:?}", sql_result.err());
// Test Go
let go_content = "package main\nfunc main() {\n println(\"Hello World\")\n}";
println!("🔍 Testing Go parsing...");
let go_result = unsafe {
analyzer.analyze_file_content("test.go".to_string(), go_content.to_string()).await
};
match &go_result {
Ok(concepts) => println!("✅ Go: Found {} concepts", concepts.len()),
Err(e) => println!("❌ Go failed: {}", e),
}
assert!(go_result.is_ok(), "Go parsing should succeed: {:?}", go_result.err());
// Test Java
let java_content = "public class HelloWorld {\n public static void main(String[] args) {\n System.out.println(\"Hello\");\n }\n}";
println!("🔍 Testing Java parsing...");
let java_result = unsafe {
analyzer.analyze_file_content("test.java".to_string(), java_content.to_string()).await
};
match &java_result {
Ok(concepts) => println!("✅ Java: Found {} concepts", concepts.len()),
Err(e) => println!("❌ Java failed: {}", e),
}
assert!(java_result.is_ok(), "Java parsing should succeed: {:?}", java_result.err());
// Test C
let c_content = "#include <stdio.h>\nint main() {\n printf(\"Hello World\");\n return 0;\n}";
println!("🔍 Testing C parsing...");
let c_result = unsafe {
analyzer.analyze_file_content("test.c".to_string(), c_content.to_string()).await
};
assert!(c_result.is_ok(), "C parsing should succeed");
// Test C++
let cpp_content = "#include <iostream>\nclass HelloWorld {\npublic:\n void sayHello() {\n std::cout << \"Hello\";\n }\n};";
println!("🔍 Testing C++ parsing...");
let cpp_result = unsafe {
analyzer.analyze_file_content("test.cpp".to_string(), cpp_content.to_string()).await
};
assert!(cpp_result.is_ok(), "C++ parsing should succeed");
// Test C#
let csharp_content = "using System;\npublic class Program {\n public static void Main() {\n Console.WriteLine(\"Hello World\");\n }\n}";
println!("🔍 Testing C# parsing...");
let csharp_result = unsafe {
analyzer.analyze_file_content("test.cs".to_string(), csharp_content.to_string()).await
};
assert!(csharp_result.is_ok(), "C# parsing should succeed");
// Test Svelte
let svelte_content = "<script>\n let name = \"world\";\n function greet() {\n alert(`Hello ${name}!`);\n }\n</script>";
println!("🔍 Testing Svelte parsing...");
let svelte_result = unsafe {
analyzer.analyze_file_content("test.svelte".to_string(), svelte_content.to_string()).await
};
assert!(svelte_result.is_ok(), "Svelte parsing should succeed");
// Test PHP
let php_content = "<?php\nclass Greeter {\n public function greet(): string {\n return 'Hello';\n }\n}\n";
println!("🔍 Testing PHP parsing...");
let php_result = unsafe {
analyzer.analyze_file_content("Greeter.php".to_string(), php_content.to_string()).await
};
assert!(php_result.is_ok(), "PHP parsing should succeed");
println!("✅ All language parsing tests passed!");
}
#[tokio::test]
async fn test_timeout_handling() {
let mut analyzer = SemanticAnalyzer::new().unwrap();
// Test that timeout doesn't cause crashes - use normal content
let content = "function test() { return 42; }";
let result = unsafe {
analyzer.analyze_file_content("test.js".to_string(), content.to_string()).await
};
assert!(result.is_ok());
}
#[test]
fn test_semantic_concept_creation() {
use crate::types::LineRange;
use std::collections::HashMap;
let concept = SemanticConcept {
id: "test_concept".to_string(),
name: "TestClass".to_string(),
concept_type: "class".to_string(),
confidence: 0.9,
file_path: "test.ts".to_string(),
line_range: LineRange { start: 1, end: 10 },
relationships: HashMap::new(),
metadata: HashMap::new(),
};
assert_eq!(concept.name, "TestClass");
assert_eq!(concept.concept_type, "class");
assert_eq!(concept.confidence, 0.9);
assert_eq!(concept.file_path, "test.ts");
assert_eq!(concept.line_range.start, 1);
assert_eq!(concept.line_range.end, 10);
}
#[test]
fn test_complexity_metrics() {
use crate::types::ComplexityMetrics;
let metrics = ComplexityMetrics {
cyclomatic_complexity: 5.0,
cognitive_complexity: 8.0,
function_count: 10,
class_count: 5,
file_count: 100,
avg_functions_per_file: 2.0,
avg_lines_per_concept: 15.0,
max_nesting_depth: 3,
};
assert!(metrics.cyclomatic_complexity > 0.0);
assert!(metrics.cognitive_complexity >= metrics.cyclomatic_complexity);
assert!(metrics.file_count > 0);
assert!(metrics.function_count > 0);
assert!(metrics.class_count > 0);
}
#[tokio::test]
async fn test_fallback_extraction() {
let mut analyzer = SemanticAnalyzer::new().unwrap();
// Test with a language that might not have full tree-sitter support
// The system should fall back to regex-based extraction
let content = "function calculate() { return 42; }";
let result = unsafe {
analyzer.analyze_file_content("test.unknown".to_string(), content.to_string()).await
};
assert!(result.is_ok());
let concepts = result.unwrap();
assert!(!concepts.is_empty());
// Check that fallback extraction worked
let concept = &concepts[0];
assert!(!concept.name.is_empty());
assert!(concept.confidence > 0.0);
}
}
| rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/analysis/relationships.rs | rust-core/src/analysis/relationships.rs | //! Relationship analysis and learning between semantic concepts
#[cfg(feature = "napi-bindings")]
use napi_derive::napi;
use crate::types::SemanticConcept;
use std::collections::HashMap;
/// Analyzer for learning and discovering relationships between code concepts
#[cfg_attr(feature = "napi-bindings", napi)]
pub struct RelationshipLearner;
#[cfg_attr(feature = "napi-bindings", napi)]
impl RelationshipLearner {
#[cfg_attr(feature = "napi-bindings", napi(constructor))]
pub fn new() -> Self {
RelationshipLearner
}
/// Learn relationships between concepts and update the relationships map
pub fn learn_concept_relationships(
concepts: &Vec<SemanticConcept>,
relationships: &mut HashMap<String, Vec<String>>,
) {
Self::analyze_spatial_relationships(concepts, relationships);
Self::analyze_naming_relationships(concepts, relationships);
Self::analyze_type_relationships(concepts, relationships);
Self::analyze_file_relationships(concepts, relationships);
}
/// Analyze relationships based on proximity in source code
fn analyze_spatial_relationships(
concepts: &Vec<SemanticConcept>,
relationships: &mut HashMap<String, Vec<String>>,
) {
// Group concepts by file for spatial analysis
let mut file_concepts: HashMap<String, Vec<&SemanticConcept>> = HashMap::new();
for concept in concepts {
file_concepts
.entry(concept.file_path.clone())
.or_default()
.push(concept);
}
// Analyze spatial relationships within each file
for concepts_in_file in file_concepts.values() {
for (i, concept1) in concepts_in_file.iter().enumerate() {
for concept2 in concepts_in_file.iter().skip(i + 1) {
let distance = Self::calculate_line_distance(concept1, concept2);
// Consider concepts close if within 10 lines
if distance <= 10 {
Self::add_relationship(
relationships,
&concept1.id,
&concept2.id,
"spatial_proximity",
);
Self::add_relationship(
relationships,
&concept2.id,
&concept1.id,
"spatial_proximity",
);
}
}
}
}
}
/// Analyze relationships based on naming patterns
fn analyze_naming_relationships(
concepts: &Vec<SemanticConcept>,
relationships: &mut HashMap<String, Vec<String>>,
) {
for concept1 in concepts {
for concept2 in concepts {
if concept1.id == concept2.id {
continue;
}
let similarity = Self::calculate_name_similarity(&concept1.name, &concept2.name);
// Consider names similar if they share significant prefixes/suffixes
if similarity > 0.6 {
Self::add_relationship(
relationships,
&concept1.id,
&concept2.id,
"naming_similarity",
);
}
// Check for common naming patterns
if Self::has_naming_relationship(&concept1.name, &concept2.name) {
Self::add_relationship(
relationships,
&concept1.id,
&concept2.id,
"naming_pattern",
);
}
}
}
}
/// Analyze relationships based on concept types
fn analyze_type_relationships(
concepts: &Vec<SemanticConcept>,
relationships: &mut HashMap<String, Vec<String>>,
) {
// Group concepts by type
let mut type_groups: HashMap<String, Vec<&SemanticConcept>> = HashMap::new();
for concept in concepts {
type_groups
.entry(concept.concept_type.clone())
.or_default()
.push(concept);
}
// Analyze relationships within type groups
for (_concept_type, group_concepts) in type_groups {
for (i, concept1) in group_concepts.iter().enumerate() {
for concept2 in group_concepts.iter().skip(i + 1) {
Self::add_relationship(
relationships,
&concept1.id,
&concept2.id,
"same_type",
);
Self::add_relationship(
relationships,
&concept2.id,
&concept1.id,
"same_type",
);
}
}
}
// Analyze cross-type relationships (e.g., functions in classes)
Self::analyze_cross_type_relationships(concepts, relationships);
}
/// Analyze relationships between different concept types
fn analyze_cross_type_relationships(
concepts: &Vec<SemanticConcept>,
relationships: &mut HashMap<String, Vec<String>>,
) {
for concept in concepts {
match concept.concept_type.as_str() {
"function" | "method" => {
// Look for classes or interfaces this function might belong to
for other_concept in concepts {
if (other_concept.concept_type == "class" || other_concept.concept_type == "interface") && Self::is_function_in_class(concept, other_concept) {
Self::add_relationship(
relationships,
&concept.id,
&other_concept.id,
"member_of",
);
Self::add_relationship(
relationships,
&other_concept.id,
&concept.id,
"contains",
);
}
}
}
"variable" | "field" => {
// Look for functions or classes this variable might belong to
for other_concept in concepts {
if (other_concept.concept_type == "function" || other_concept.concept_type == "class") && Self::is_variable_in_scope(concept, other_concept) {
Self::add_relationship(
relationships,
&concept.id,
&other_concept.id,
"scoped_in",
);
}
}
}
_ => {}
}
}
}
/// Analyze relationships based on file organization
fn analyze_file_relationships(
concepts: &Vec<SemanticConcept>,
relationships: &mut HashMap<String, Vec<String>>,
) {
// Group concepts by file
let mut file_groups: HashMap<String, Vec<&SemanticConcept>> = HashMap::new();
for concept in concepts {
file_groups
.entry(concept.file_path.clone())
.or_default()
.push(concept);
}
// Analyze relationships within files
for concepts_in_file in file_groups.values() {
for (i, concept1) in concepts_in_file.iter().enumerate() {
for concept2 in concepts_in_file.iter().skip(i + 1) {
Self::add_relationship(
relationships,
&concept1.id,
&concept2.id,
"same_file",
);
}
}
}
// Analyze cross-file relationships based on import/export patterns
Self::analyze_import_relationships(&file_groups, relationships);
}
/// Analyze import/export relationships between files
fn analyze_import_relationships(
file_groups: &HashMap<String, Vec<&SemanticConcept>>,
relationships: &mut HashMap<String, Vec<String>>,
) {
// This is a simplified analysis - in practice, you'd parse import statements
for (file1, concepts1) in file_groups {
for (file2, concepts2) in file_groups {
if file1 == file2 {
continue;
}
// Look for potential import relationships based on naming
for concept1 in concepts1 {
for concept2 in concepts2 {
if (concept1.concept_type == "class" || concept1.concept_type == "interface") && concept2.metadata.contains_key("imports") {
if let Some(imports) = concept2.metadata.get("imports") {
if imports.contains(&concept1.name) {
Self::add_relationship(
relationships,
&concept2.id,
&concept1.id,
"imports",
);
Self::add_relationship(
relationships,
&concept1.id,
&concept2.id,
"imported_by",
);
}
}
}
}
}
}
}
}
/// Calculate distance between concepts in lines
fn calculate_line_distance(concept1: &SemanticConcept, concept2: &SemanticConcept) -> u32 {
if concept1.file_path != concept2.file_path {
return u32::MAX; // Different files have infinite distance
}
let start1 = concept1.line_range.start;
let end1 = concept1.line_range.end;
let start2 = concept2.line_range.start;
let end2 = concept2.line_range.end;
if end1 < start2 {
start2 - end1
} else if end2 < start1 {
start1.saturating_sub(end2)
} else {
0 // Overlapping ranges
}
}
/// Calculate similarity between two names
fn calculate_name_similarity(name1: &str, name2: &str) -> f64 {
if name1 == name2 {
return 1.0;
}
let len1 = name1.len();
let len2 = name2.len();
let max_len = len1.max(len2);
if max_len == 0 {
return 1.0;
}
// Simple longest common subsequence similarity
let common = Self::longest_common_subsequence(name1, name2);
common as f64 / max_len as f64
}
/// Calculate longest common subsequence length
fn longest_common_subsequence(s1: &str, s2: &str) -> usize {
let chars1: Vec<char> = s1.chars().collect();
let chars2: Vec<char> = s2.chars().collect();
let len1 = chars1.len();
let len2 = chars2.len();
let mut dp = vec![vec![0; len2 + 1]; len1 + 1];
for i in 1..=len1 {
for j in 1..=len2 {
if chars1[i - 1] == chars2[j - 1] {
dp[i][j] = dp[i - 1][j - 1] + 1;
} else {
dp[i][j] = dp[i - 1][j].max(dp[i][j - 1]);
}
}
}
dp[len1][len2]
}
/// Check if two names have a naming relationship (e.g., getter/setter)
fn has_naming_relationship(name1: &str, name2: &str) -> bool {
let name1_lower = name1.to_lowercase();
let name2_lower = name2.to_lowercase();
// Check for getter/setter patterns
if name1_lower.starts_with("get") && name2_lower.starts_with("set") {
let suffix1 = &name1_lower[3..];
let suffix2 = &name2_lower[3..];
return suffix1 == suffix2;
}
// Check for test/implementation patterns
if name1_lower.contains("test") || name2_lower.contains("test") {
let clean1 = name1_lower.replace("test", "");
let clean2 = name2_lower.replace("test", "");
return clean1 == clean2 || clean1.is_empty() || clean2.is_empty();
}
false
}
/// Check if a function is likely a member of a class
fn is_function_in_class(function: &SemanticConcept, class: &SemanticConcept) -> bool {
// Same file check
if function.file_path != class.file_path {
return false;
}
// Check if function is within class line range (with some tolerance)
function.line_range.start >= class.line_range.start
&& function.line_range.end <= class.line_range.end + 5
}
/// Check if a variable is in scope of another concept
fn is_variable_in_scope(variable: &SemanticConcept, scope: &SemanticConcept) -> bool {
// Same file check
if variable.file_path != scope.file_path {
return false;
}
// Check if variable is within scope line range
variable.line_range.start >= scope.line_range.start
&& variable.line_range.end <= scope.line_range.end
}
/// Add a relationship to the relationships map
fn add_relationship(
relationships: &mut HashMap<String, Vec<String>>,
from_id: &str,
to_id: &str,
relationship_type: &str,
) {
let entry = relationships.entry(from_id.to_string()).or_default();
let relationship = format!("{}:{}", relationship_type, to_id);
if !entry.contains(&relationship) {
entry.push(relationship);
}
}
}
impl Default for RelationshipLearner {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::types::LineRange;
fn create_test_concept(
id: &str,
name: &str,
concept_type: &str,
file_path: &str,
start: u32,
end: u32,
) -> SemanticConcept {
SemanticConcept {
id: id.to_string(),
name: name.to_string(),
concept_type: concept_type.to_string(),
confidence: 0.8,
file_path: file_path.to_string(),
line_range: LineRange { start, end },
relationships: HashMap::new(),
metadata: HashMap::new(),
}
}
#[test]
fn test_relationship_learner_creation() {
let _learner = RelationshipLearner::new();
// Constructor should work
}
#[test]
fn test_spatial_relationships() {
let concepts = vec![
create_test_concept("1", "func1", "function", "test.rs", 1, 5),
create_test_concept("2", "func2", "function", "test.rs", 8, 12),
create_test_concept("3", "func3", "function", "test.rs", 50, 60),
];
let mut relationships = HashMap::new();
RelationshipLearner::learn_concept_relationships(&concepts, &mut relationships);
// func1 and func2 should be spatially related (within 10 lines)
assert!(relationships.contains_key("1"));
assert!(relationships.contains_key("2"));
let func1_rels = relationships.get("1").unwrap();
assert!(func1_rels.iter().any(|r| r.contains("spatial_proximity:2")));
}
#[test]
fn test_naming_relationships() {
let concepts = vec![
create_test_concept("1", "getUserName", "function", "test.rs", 1, 5),
create_test_concept("2", "setUserName", "function", "test.rs", 10, 15),
create_test_concept("3", "getData", "function", "test.rs", 20, 25),
];
let mut relationships = HashMap::new();
RelationshipLearner::learn_concept_relationships(&concepts, &mut relationships);
// getUserName and setUserName should have naming relationship
let get_rels = relationships.get("1").unwrap();
assert!(get_rels.iter().any(|r| r.contains("naming_pattern:2")));
}
#[test]
fn test_type_relationships() {
let concepts = vec![
create_test_concept("1", "func1", "function", "test.rs", 1, 5),
create_test_concept("2", "func2", "function", "test.rs", 10, 15),
create_test_concept("3", "Class1", "class", "test.rs", 20, 30),
];
let mut relationships = HashMap::new();
RelationshipLearner::learn_concept_relationships(&concepts, &mut relationships);
// Functions should be related by same type
let func1_rels = relationships.get("1").unwrap();
assert!(func1_rels.iter().any(|r| r.contains("same_type:2")));
}
#[test]
fn test_cross_type_relationships() {
let concepts = vec![
create_test_concept("1", "TestClass", "class", "test.rs", 1, 30),
create_test_concept("2", "method1", "function", "test.rs", 5, 10),
create_test_concept("3", "field1", "variable", "test.rs", 15, 15),
];
let mut relationships = HashMap::new();
RelationshipLearner::learn_concept_relationships(&concepts, &mut relationships);
// Method should be member of class
let method_rels = relationships.get("2").unwrap();
assert!(method_rels.iter().any(|r| r.contains("member_of:1")));
// Class should contain method
let class_rels = relationships.get("1").unwrap();
assert!(class_rels.iter().any(|r| r.contains("contains:2")));
}
#[test]
fn test_calculate_line_distance() {
let concept1 = create_test_concept("1", "func1", "function", "test.rs", 1, 5);
let concept2 = create_test_concept("2", "func2", "function", "test.rs", 10, 15);
let concept3 = create_test_concept("3", "func3", "function", "other.rs", 1, 5);
assert_eq!(RelationshipLearner::calculate_line_distance(&concept1, &concept2), 5);
assert_eq!(RelationshipLearner::calculate_line_distance(&concept1, &concept3), u32::MAX);
}
#[test]
fn test_name_similarity() {
assert_eq!(RelationshipLearner::calculate_name_similarity("test", "test"), 1.0);
assert!(RelationshipLearner::calculate_name_similarity("test", "testing") > 0.7);
assert!(RelationshipLearner::calculate_name_similarity("abc", "xyz") < 0.3);
}
#[test]
fn test_naming_patterns() {
assert!(RelationshipLearner::has_naming_relationship("getName", "setName"));
assert!(RelationshipLearner::has_naming_relationship("testFunction", "function"));
assert!(!RelationshipLearner::has_naming_relationship("foo", "bar"));
}
#[test]
fn test_function_in_class_detection() {
let class = create_test_concept("1", "TestClass", "class", "test.rs", 1, 30);
let method_inside = create_test_concept("2", "method1", "function", "test.rs", 5, 10);
let method_outside = create_test_concept("3", "method2", "function", "test.rs", 35, 40);
assert!(RelationshipLearner::is_function_in_class(&method_inside, &class));
assert!(!RelationshipLearner::is_function_in_class(&method_outside, &class));
}
} | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/analysis/frameworks.rs | rust-core/src/analysis/frameworks.rs | //! Framework detection and analysis
#[cfg(feature = "napi-bindings")]
use napi_derive::napi;
use crate::types::ParseError;
use std::collections::{HashMap, HashSet};
use std::path::Path;
use walkdir::WalkDir;
use std::fs;
/// Framework detection results
#[derive(Debug, Clone)]
#[cfg_attr(feature = "napi-bindings", napi(object))]
pub struct FrameworkInfo {
pub name: String,
pub version: Option<String>,
pub confidence: f64,
pub evidence: Vec<String>,
}
/// Analyzer for detecting frameworks and libraries used in a codebase
#[cfg_attr(feature = "napi-bindings", napi)]
pub struct FrameworkDetector;
#[cfg_attr(feature = "napi-bindings", napi)]
impl FrameworkDetector {
#[cfg_attr(feature = "napi-bindings", napi(constructor))]
pub fn new() -> Self {
FrameworkDetector
}
/// Detect frameworks used in a codebase
#[cfg_attr(feature = "napi-bindings", napi)]
pub async fn detect_frameworks(path: String) -> Result<Vec<FrameworkInfo>, ParseError> {
let path = path.as_str();
let mut frameworks = Vec::new();
let mut evidence_map: HashMap<String, (HashSet<String>, Option<String>)> = HashMap::new();
// Check package files for dependencies
Self::check_package_files(path, &mut evidence_map)?;
// Infer frameworks from file extensions and project structure
Self::infer_from_project_structure(path, &mut evidence_map)?;
// Check configuration files
Self::check_config_files(path, &mut evidence_map)?;
// Convert evidence to framework info
for (framework_name, (evidence_set, version)) in evidence_map {
let confidence = Self::calculate_confidence(&framework_name, &evidence_set);
if confidence > 0.3 { // Only include frameworks with reasonable confidence
frameworks.push(FrameworkInfo {
name: framework_name,
version,
confidence,
evidence: evidence_set.into_iter().collect(),
});
}
}
// Sort by confidence
frameworks.sort_by(|a, b| b.confidence.partial_cmp(&a.confidence).unwrap_or(std::cmp::Ordering::Equal));
Ok(frameworks)
}
/// Check package files (package.json, Cargo.toml, requirements.txt, etc.)
fn check_package_files(
path: &str,
evidence_map: &mut HashMap<String, (HashSet<String>, Option<String>)>,
) -> Result<(), ParseError> {
let package_files = [
"package.json",
"Cargo.toml",
"requirements.txt",
"Pipfile",
"pom.xml",
"build.gradle",
"go.mod",
"composer.json",
"Gemfile",
"mix.exs",
];
for entry in WalkDir::new(path).max_depth(3).into_iter().filter_map(|e| e.ok()) {
let file_path = entry.path();
if let Some(file_name) = file_path.file_name().and_then(|n| n.to_str()) {
if package_files.contains(&file_name) {
Self::analyze_package_file(file_path, evidence_map)?;
}
}
}
Ok(())
}
/// Analyze a specific package file
fn analyze_package_file(
file_path: &Path,
evidence_map: &mut HashMap<String, (HashSet<String>, Option<String>)>,
) -> Result<(), ParseError> {
let content = fs::read_to_string(file_path)
.map_err(|e| ParseError::from_reason(format!("Failed to read package file: {}", e)))?;
let file_name = file_path.file_name().unwrap_or_default().to_str().unwrap_or("");
match file_name {
"package.json" => Self::parse_package_json(&content, evidence_map),
"Cargo.toml" => Self::parse_cargo_toml(&content, evidence_map),
"requirements.txt" => Self::parse_requirements_txt(&content, evidence_map),
"pom.xml" => Self::parse_maven_pom(&content, evidence_map),
"go.mod" => Self::parse_go_mod(&content, evidence_map),
_ => {}
}
Ok(())
}
/// Parse package.json for JavaScript/TypeScript dependencies
fn parse_package_json(content: &str, evidence_map: &mut HashMap<String, (HashSet<String>, Option<String>)>) {
// Simple JSON parsing for common frameworks
let framework_patterns = [
("React", vec!["\"react\":", "\"@types/react\":"]),
("Vue.js", vec!["\"vue\":", "\"@vue/"]),
("Angular", vec!["\"@angular/"]),
("Express", vec!["\"express\":", "\"@types/express\":"]),
("Next.js", vec!["\"next\":", "\"@next/"]),
("Svelte", vec!["\"svelte\":", "\"@svelte/"]),
("Webpack", vec!["\"webpack\":"]),
("Vite", vec!["\"vite\":", "\"@vitejs/"]),
("Jest", vec!["\"jest\":", "\"@jest/"]),
("TypeScript", vec!["\"typescript\":"]),
("Tailwind CSS", vec!["\"tailwindcss\":", "\"@tailwindcss/"]),
("Material-UI", vec!["\"@mui/", "\"@material-ui/"]),
("Lodash", vec!["\"lodash\":", "\"@types/lodash\":"]),
];
for (framework, patterns) in &framework_patterns {
for pattern in patterns {
if content.contains(pattern) {
let entry = evidence_map.entry(framework.to_string()).or_insert_with(|| (HashSet::new(), None));
entry.0.insert(format!("package.json dependency: {}", pattern));
// Try to extract version
if let Some(version) = Self::extract_version_from_json(content, pattern) {
entry.1 = Some(version);
}
}
}
}
}
/// Parse Cargo.toml for Rust dependencies
fn parse_cargo_toml(content: &str, evidence_map: &mut HashMap<String, (HashSet<String>, Option<String>)>) {
let framework_patterns = [
("Tokio", vec!["tokio =", "tokio."]),
("Serde", vec!["serde =", "serde_"]),
("Actix Web", vec!["actix-web =", "actix_"]),
("Rocket", vec!["rocket =", "rocket_"]),
("Diesel", vec!["diesel =", "diesel_"]),
("SQLx", vec!["sqlx =", "sqlx-"]),
("Clap", vec!["clap =", "structopt ="]),
("Reqwest", vec!["reqwest ="]),
("Tree-sitter", vec!["tree-sitter", "tree_sitter"]),
];
for (framework, patterns) in &framework_patterns {
for pattern in patterns {
if content.contains(pattern) {
let entry = evidence_map.entry(framework.to_string()).or_insert_with(|| (HashSet::new(), None));
entry.0.insert(format!("Cargo.toml dependency: {}", pattern));
// Try to extract version
if let Some(version) = Self::extract_version_from_toml(content, pattern) {
entry.1 = Some(version);
}
}
}
}
}
/// Parse requirements.txt for Python dependencies
fn parse_requirements_txt(content: &str, evidence_map: &mut HashMap<String, (HashSet<String>, Option<String>)>) {
let framework_patterns = [
("Django", "django"),
("Flask", "flask"),
("FastAPI", "fastapi"),
("NumPy", "numpy"),
("Pandas", "pandas"),
("Matplotlib", "matplotlib"),
("SQLAlchemy", "sqlalchemy"),
("Requests", "requests"),
("PyTorch", "torch"),
("TensorFlow", "tensorflow"),
];
for (framework, pattern) in &framework_patterns {
if content.to_lowercase().contains(&pattern.to_lowercase()) {
let entry = evidence_map.entry(framework.to_string()).or_insert_with(|| (HashSet::new(), None));
entry.0.insert(format!("requirements.txt dependency: {}", pattern));
// Try to extract version
if let Some(version) = Self::extract_version_from_requirements(content, pattern) {
entry.1 = Some(version);
}
}
}
}
/// Parse Maven pom.xml for Java dependencies
fn parse_maven_pom(content: &str, evidence_map: &mut HashMap<String, (HashSet<String>, Option<String>)>) {
let framework_patterns = [
("Spring Framework", vec!["<groupId>org.springframework", "<artifactId>spring-"]),
("Spring Boot", vec!["spring-boot-starter", "spring-boot-parent"]),
("Hibernate", vec!["<artifactId>hibernate"]),
("JUnit", vec!["<artifactId>junit", "<groupId>org.junit"]),
("Apache Commons", vec!["<groupId>org.apache.commons"]),
("Jackson", vec!["<groupId>com.fasterxml.jackson"]),
];
for (framework, patterns) in &framework_patterns {
for pattern in patterns {
if content.contains(pattern) {
let entry = evidence_map.entry(framework.to_string()).or_insert_with(|| (HashSet::new(), None));
entry.0.insert(format!("pom.xml dependency: {}", pattern));
}
}
}
}
/// Parse go.mod for Go dependencies
fn parse_go_mod(content: &str, evidence_map: &mut HashMap<String, (HashSet<String>, Option<String>)>) {
let framework_patterns = [
("Gin", "github.com/gin-gonic/gin"),
("Echo", "github.com/labstack/echo"),
("Fiber", "github.com/gofiber/fiber"),
("GORM", "gorm.io/gorm"),
("Cobra", "github.com/spf13/cobra"),
("Viper", "github.com/spf13/viper"),
];
for (framework, pattern) in &framework_patterns {
if content.contains(pattern) {
let entry = evidence_map.entry(framework.to_string()).or_insert_with(|| (HashSet::new(), None));
entry.0.insert(format!("go.mod dependency: {}", pattern));
}
}
}
/// Check source code for framework usage patterns
fn infer_from_project_structure(
path: &str,
evidence_map: &mut HashMap<String, (HashSet<String>, Option<String>)>,
) -> Result<(), ParseError> {
let mut extension_counts = std::collections::HashMap::new();
for entry in WalkDir::new(path).into_iter().filter_map(|e| e.ok()) {
if entry.file_type().is_file() {
let file_path = entry.path();
// Skip files in dot folders and common build/dependency directories
if file_path.components().any(|component| {
let comp_str = component.as_os_str().to_str().unwrap_or("");
(comp_str.starts_with('.') && comp_str != ".")
|| comp_str == "node_modules"
|| comp_str == "target"
|| comp_str == "dist"
|| comp_str == "build"
}) {
continue;
}
if let Some(extension) = file_path.extension().and_then(|s| s.to_str()) {
*extension_counts.entry(extension.to_lowercase()).or_insert(0) += 1;
}
}
}
// Infer languages/frameworks based on significant file presence
for (ext, count) in extension_counts {
if count >= 5 { // Only consider if there are at least 5 files of this type
match ext.as_str() {
"rs" => {
let entry = evidence_map.entry("Rust".to_string()).or_insert_with(|| (HashSet::new(), None));
entry.0.insert(format!("Project structure: {} Rust files", count));
}
"ts" | "tsx" => {
let entry = evidence_map.entry("TypeScript".to_string()).or_insert_with(|| (HashSet::new(), None));
entry.0.insert(format!("Project structure: {} TypeScript files", count));
}
"js" | "jsx" => {
let entry = evidence_map.entry("JavaScript".to_string()).or_insert_with(|| (HashSet::new(), None));
entry.0.insert(format!("Project structure: {} JavaScript files", count));
}
"py" => {
let entry = evidence_map.entry("Python".to_string()).or_insert_with(|| (HashSet::new(), None));
entry.0.insert(format!("Project structure: {} Python files", count));
}
"java" => {
let entry = evidence_map.entry("Java".to_string()).or_insert_with(|| (HashSet::new(), None));
entry.0.insert(format!("Project structure: {} Java files", count));
}
"go" => {
let entry = evidence_map.entry("Go".to_string()).or_insert_with(|| (HashSet::new(), None));
entry.0.insert(format!("Project structure: {} Go files", count));
}
_ => {}
}
}
}
Ok(())
}
/// Check configuration files for framework indicators
fn check_config_files(
path: &str,
evidence_map: &mut HashMap<String, (HashSet<String>, Option<String>)>,
) -> Result<(), ParseError> {
let config_files = [
("webpack.config.js", "Webpack"),
("vite.config.js", "Vite"),
("next.config.js", "Next.js"),
("nuxt.config.js", "Nuxt.js"),
("vue.config.js", "Vue.js"),
("angular.json", "Angular"),
("tsconfig.json", "TypeScript"),
("tailwind.config.js", "Tailwind CSS"),
("jest.config.js", "Jest"),
("vitest.config.js", "Vitest"),
("svelte.config.js", "Svelte"),
];
for entry in WalkDir::new(path).max_depth(3).into_iter().filter_map(|e| e.ok()) {
let file_path = entry.path();
if let Some(file_name) = file_path.file_name().and_then(|n| n.to_str()) {
for (config_file, framework) in &config_files {
if file_name == *config_file {
let entry = evidence_map.entry(framework.to_string()).or_insert_with(|| (HashSet::new(), None));
entry.0.insert(format!("Configuration file: {}", config_file));
}
}
}
}
Ok(())
}
/// Calculate confidence score based on evidence
fn calculate_confidence(_framework: &str, evidence: &HashSet<String>) -> f64 {
let evidence_count = evidence.len() as f64;
let base_confidence = (evidence_count * 0.2).min(1.0);
// Boost confidence for certain types of evidence
let mut boosted_confidence = base_confidence;
for evidence_item in evidence {
if evidence_item.contains("package.json") || evidence_item.contains("Cargo.toml") {
boosted_confidence += 0.3;
} else if evidence_item.contains("Configuration file") {
boosted_confidence += 0.2;
} else if evidence_item.contains("Project structure") {
boosted_confidence += 0.2; // Boost confidence for project structure evidence
} else if evidence_item.contains("Source code usage") {
boosted_confidence += 0.1;
}
}
boosted_confidence.min(1.0)
}
/// Extract version from JSON dependency
fn extract_version_from_json(content: &str, pattern: &str) -> Option<String> {
// Simple regex-like extraction - in practice you'd use a proper JSON parser
if let Some(start) = content.find(pattern) {
if let Some(version_start) = content[start..].find(": \"") {
let version_content = &content[start + version_start + 3..];
if let Some(version_end) = version_content.find('"') {
let version = &version_content[..version_end];
if !version.is_empty() && version != "latest" {
return Some(version.to_string());
}
}
}
}
None
}
/// Extract version from TOML dependency
fn extract_version_from_toml(content: &str, pattern: &str) -> Option<String> {
if let Some(start) = content.find(pattern) {
if let Some(version_start) = content[start..].find(" = \"") {
let version_content = &content[start + version_start + 4..];
if let Some(version_end) = version_content.find('"') {
let version = &version_content[..version_end];
if !version.is_empty() {
return Some(version.to_string());
}
}
}
}
None
}
/// Extract version from requirements.txt
fn extract_version_from_requirements(content: &str, pattern: &str) -> Option<String> {
for line in content.lines() {
if line.to_lowercase().contains(&pattern.to_lowercase()) {
if let Some(version_start) = line.find("==") {
let version = &line[version_start + 2..].trim();
if !version.is_empty() {
return Some(version.to_string());
}
} else if let Some(version_start) = line.find(">=") {
let version = &line[version_start + 2..].trim();
if !version.is_empty() {
return Some(format!(">={}", version));
}
}
}
}
None
}
}
impl Default for FrameworkDetector {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use std::io::Write;
use tempfile::TempDir;
fn create_test_file(dir: &TempDir, path: &str, content: &str) -> std::io::Result<()> {
let full_path = dir.path().join(path);
if let Some(parent) = full_path.parent() {
fs::create_dir_all(parent)?;
}
let mut file = fs::File::create(full_path)?;
file.write_all(content.as_bytes())?;
Ok(())
}
#[test]
fn test_framework_detector_creation() {
let _detector = FrameworkDetector::new();
// Constructor should work
}
#[tokio::test]
async fn test_detect_react_from_package_json() {
let temp_dir = TempDir::new().unwrap();
let package_json = r#"{
"dependencies": {
"react": "^18.0.0",
"@types/react": "^18.0.0"
}
}"#;
create_test_file(&temp_dir, "package.json", package_json).unwrap();
let frameworks = FrameworkDetector::detect_frameworks(temp_dir.path().to_str().unwrap().to_string()).await.unwrap();
assert!(frameworks.iter().any(|f| f.name == "React"));
assert!(frameworks.iter().any(|f| f.name == "TypeScript"));
}
#[tokio::test]
async fn test_detect_rust_frameworks() {
let temp_dir = TempDir::new().unwrap();
let cargo_toml = r#"[dependencies]
tokio = "1.0"
serde = { version = "1.0", features = ["derive"] }
"#;
create_test_file(&temp_dir, "Cargo.toml", cargo_toml).unwrap();
let frameworks = FrameworkDetector::detect_frameworks(temp_dir.path().to_str().unwrap().to_string()).await.unwrap();
assert!(frameworks.iter().any(|f| f.name == "Tokio"));
assert!(frameworks.iter().any(|f| f.name == "Serde"));
}
#[tokio::test]
async fn test_detect_python_frameworks() {
let temp_dir = TempDir::new().unwrap();
let requirements = "django==4.2.0\nflask>=2.0.0\nnumpy\n";
create_test_file(&temp_dir, "requirements.txt", requirements).unwrap();
let frameworks = FrameworkDetector::detect_frameworks(temp_dir.path().to_str().unwrap().to_string()).await.unwrap();
assert!(frameworks.iter().any(|f| f.name == "Django"));
assert!(frameworks.iter().any(|f| f.name == "Flask"));
assert!(frameworks.iter().any(|f| f.name == "NumPy"));
}
#[tokio::test]
async fn test_detect_from_source_code() {
let temp_dir = TempDir::new().unwrap();
let react_code = r#"import React, { useState } from 'react';
function App() {
const [count, setCount] = useState(0);
return <div>{count}</div>;
}
"#;
create_test_file(&temp_dir, "src/App.js", react_code).unwrap();
let frameworks = FrameworkDetector::detect_frameworks(temp_dir.path().to_str().unwrap().to_string()).await.unwrap();
assert!(frameworks.iter().any(|f| f.name == "React"));
}
#[tokio::test]
async fn test_detect_from_config_files() {
let temp_dir = TempDir::new().unwrap();
create_test_file(&temp_dir, "webpack.config.js", "module.exports = {};").unwrap();
create_test_file(&temp_dir, "tsconfig.json", "{}").unwrap();
let frameworks = FrameworkDetector::detect_frameworks(temp_dir.path().to_str().unwrap().to_string()).await.unwrap();
assert!(frameworks.iter().any(|f| f.name == "Webpack"));
assert!(frameworks.iter().any(|f| f.name == "TypeScript"));
}
#[test]
fn test_calculate_confidence() {
let mut evidence = HashSet::new();
evidence.insert("package.json dependency: react".to_string());
evidence.insert("Source code usage: useState(".to_string());
let confidence = FrameworkDetector::calculate_confidence("React", &evidence);
assert!(confidence > 0.5);
assert!(confidence <= 1.0);
}
#[test]
fn test_version_extraction() {
let json_content = r#"{"dependencies": {"react": "^18.2.0"}}"#;
let version = FrameworkDetector::extract_version_from_json(json_content, "\"react\":");
assert_eq!(version, Some("^18.2.0".to_string()));
let toml_content = r#"tokio = "1.28.0""#;
let version = FrameworkDetector::extract_version_from_toml(toml_content, "tokio =");
assert_eq!(version, Some("1.28.0".to_string()));
let req_content = "django==4.2.0\nflask>=2.0.0";
let version = FrameworkDetector::extract_version_from_requirements(req_content, "django");
assert_eq!(version, Some("4.2.0".to_string()));
}
} | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/analysis/mod.rs | rust-core/src/analysis/mod.rs | pub mod semantic;
pub mod complexity;
pub mod relationships;
pub mod frameworks;
pub mod blueprint;
pub use semantic::*;
pub use complexity::*;
pub use relationships::*;
pub use frameworks::*;
pub use blueprint::*; | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/types/config.rs | rust-core/src/types/config.rs | //! Configuration and file filtering logic for semantic analysis
use std::path::Path;
/// Configuration for file analysis
pub struct AnalysisConfig {
/// Maximum file size to analyze (in bytes)
pub max_file_size: u64,
/// Maximum files to process
pub max_files: usize,
/// Supported file extensions
pub supported_extensions: Vec<&'static str>,
}
impl Default for AnalysisConfig {
fn default() -> Self {
Self {
max_file_size: 1_048_576, // 1MB
max_files: 1000,
supported_extensions: vec![
"ts", "tsx", "js", "jsx", "rs", "py", "go", "java",
"cpp", "c", "cs", "svelte", "sql", "php", "phtml", "inc"
],
}
}
}
impl AnalysisConfig {
/// Check if a file should be analyzed based on configuration rules
pub fn should_analyze_file(&self, file_path: &Path) -> bool {
// Skip common non-source directories and build artifacts
let path_str = file_path.to_string_lossy();
if self.is_ignored_directory(&path_str) {
return false;
}
// Skip common generated/minified file patterns
let file_name = file_path.file_name()
.and_then(|n| n.to_str())
.unwrap_or("");
if self.is_ignored_file(file_name) {
return false;
}
// Check file size - skip very large files to prevent hanging
if let Ok(metadata) = file_path.metadata() {
if metadata.len() > self.max_file_size {
return false;
}
}
// Check if file extension is supported
if let Some(extension) = file_path.extension().and_then(|s| s.to_str()) {
let ext = extension.to_lowercase();
let supported = self.supported_extensions.iter().any(|s| *s == ext);
if std::env::var("IN_MEMORIA_DEBUG_PHP").is_ok()
&& matches!(ext.as_str(), "php" | "phtml" | "inc")
{
eprintln!(
"[PHP DEBUG] should_analyze_file: path={} supported={}",
file_path.display(),
supported
);
}
supported
} else {
false
}
}
/// Check if a directory should be ignored
fn is_ignored_directory(&self, path_str: &str) -> bool {
path_str.contains("node_modules")
|| path_str.contains(".git")
|| path_str.contains("target")
|| path_str.contains("dist")
|| path_str.contains("build")
|| path_str.contains("out")
|| path_str.contains("output")
|| path_str.contains(".next")
|| path_str.contains(".nuxt")
|| path_str.contains(".svelte-kit")
|| path_str.contains(".vitepress")
|| path_str.contains("_site")
|| path_str.contains("public")
|| path_str.contains("static")
|| path_str.contains("assets")
|| path_str.contains("__pycache__")
|| path_str.contains(".pytest_cache")
|| path_str.contains("coverage")
|| path_str.contains(".coverage")
|| path_str.contains("htmlcov")
|| path_str.contains("vendor")
|| path_str.contains("bin")
|| path_str.contains("obj")
|| path_str.contains("Debug")
|| path_str.contains("Release")
|| path_str.contains(".venv")
|| path_str.contains("venv")
|| path_str.contains("env")
|| path_str.contains(".env")
|| path_str.contains("tmp")
|| path_str.contains("temp")
|| path_str.contains(".tmp")
|| path_str.contains("cache")
|| path_str.contains(".cache")
|| path_str.contains("logs")
|| path_str.contains(".logs")
|| path_str.contains("lib-cov")
|| path_str.contains("nyc_output")
|| path_str.contains(".nyc_output")
|| path_str.contains("bower_components")
|| path_str.contains("jspm_packages")
}
/// Check if a file should be ignored based on its name
fn is_ignored_file(&self, file_name: &str) -> bool {
file_name.ends_with(".min.js")
|| file_name.ends_with(".min.css")
|| file_name.ends_with(".bundle.js")
|| file_name.ends_with(".chunk.js")
|| file_name.ends_with(".map")
|| file_name.starts_with(".")
|| file_name == "package-lock.json"
|| file_name == "yarn.lock"
|| file_name == "Cargo.lock"
|| file_name == "Gemfile.lock"
|| file_name == "Pipfile.lock"
|| file_name == "poetry.lock"
}
/// Detect programming language from file path
pub fn detect_language_from_path(&self, file_path: &str) -> String {
if let Some(extension) = Path::new(file_path)
.extension()
.and_then(|s| s.to_str())
{
match extension.to_lowercase().as_str() {
"ts" | "tsx" => "typescript".to_string(),
"js" | "jsx" => "javascript".to_string(),
"rs" => "rust".to_string(),
"py" => "python".to_string(),
"php" | "phtml" | "inc" => "php".to_string(),
"sql" => "sql".to_string(),
"go" => "go".to_string(),
"java" => "java".to_string(),
"c" => "c".to_string(),
"cpp" | "cc" | "cxx" => "cpp".to_string(),
"cs" => "csharp".to_string(),
"svelte" => "svelte".to_string(),
_ => "generic".to_string(),
}
} else {
"generic".to_string()
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::path::Path;
#[test]
fn test_default_config() {
let config = AnalysisConfig::default();
assert_eq!(config.max_file_size, 1_048_576);
assert_eq!(config.max_files, 1000);
assert!(config.supported_extensions.contains(&"ts"));
assert!(config.supported_extensions.contains(&"rs"));
assert!(config.supported_extensions.contains(&"sql"));
}
#[test]
fn test_supported_file_extensions() {
let config = AnalysisConfig::default();
// Test supported extensions
assert!(config.should_analyze_file(Path::new("test.ts")));
assert!(config.should_analyze_file(Path::new("test.js")));
assert!(config.should_analyze_file(Path::new("test.py")));
assert!(config.should_analyze_file(Path::new("test.rs")));
assert!(config.should_analyze_file(Path::new("test.go")));
assert!(config.should_analyze_file(Path::new("test.java")));
assert!(config.should_analyze_file(Path::new("test.c")));
assert!(config.should_analyze_file(Path::new("test.cpp")));
assert!(config.should_analyze_file(Path::new("test.cs")));
assert!(config.should_analyze_file(Path::new("test.svelte")));
assert!(config.should_analyze_file(Path::new("test.sql")));
assert!(config.should_analyze_file(Path::new("test.php")));
assert!(config.should_analyze_file(Path::new("test.phtml")));
assert!(config.should_analyze_file(Path::new("test.inc")));
}
#[test]
fn test_unsupported_file_extensions() {
let config = AnalysisConfig::default();
// Test unsupported extensions
assert!(!config.should_analyze_file(Path::new("test.md")));
assert!(!config.should_analyze_file(Path::new("test.json")));
assert!(!config.should_analyze_file(Path::new("test.css")));
assert!(!config.should_analyze_file(Path::new("test.html")));
}
#[test]
fn test_ignored_directories() {
let config = AnalysisConfig::default();
// Test build directories
assert!(!config.should_analyze_file(Path::new("dist/test.js")));
assert!(!config.should_analyze_file(Path::new("build/test.js")));
assert!(!config.should_analyze_file(Path::new("node_modules/test.js")));
assert!(!config.should_analyze_file(Path::new(".next/test.js")));
assert!(!config.should_analyze_file(Path::new("target/test.rs")));
assert!(!config.should_analyze_file(Path::new("__pycache__/test.py")));
}
#[test]
fn test_ignored_files() {
let config = AnalysisConfig::default();
// Test minified and generated files
assert!(!config.should_analyze_file(Path::new("app.min.js")));
assert!(!config.should_analyze_file(Path::new("bundle.min.css")));
assert!(!config.should_analyze_file(Path::new("package-lock.json")));
assert!(!config.should_analyze_file(Path::new("yarn.lock")));
assert!(!config.should_analyze_file(Path::new("Cargo.lock")));
assert!(!config.should_analyze_file(Path::new("app.bundle.js")));
assert!(!config.should_analyze_file(Path::new("source.map")));
}
#[test]
fn test_sql_server_database_project_structure() {
let config = AnalysisConfig::default();
// Test SQL Server Database Project structure (dbo/Tables/, etc.)
assert!(config.should_analyze_file(Path::new("dbo/Tables/Users.sql")));
assert!(config.should_analyze_file(Path::new("dbo/Views/UserView.sql")));
assert!(config.should_analyze_file(Path::new("dbo/StoredProcedures/GetUser.sql")));
assert!(config.should_analyze_file(Path::new("Security/Roles/db_owner.sql")));
}
#[test]
fn test_language_detection() {
let config = AnalysisConfig::default();
assert_eq!(config.detect_language_from_path("test.ts"), "typescript");
assert_eq!(config.detect_language_from_path("test.tsx"), "typescript");
assert_eq!(config.detect_language_from_path("test.js"), "javascript");
assert_eq!(config.detect_language_from_path("test.jsx"), "javascript");
assert_eq!(config.detect_language_from_path("test.rs"), "rust");
assert_eq!(config.detect_language_from_path("test.py"), "python");
assert_eq!(config.detect_language_from_path("test.sql"), "sql");
assert_eq!(config.detect_language_from_path("test.go"), "go");
assert_eq!(config.detect_language_from_path("test.java"), "java");
assert_eq!(config.detect_language_from_path("test.c"), "c");
assert_eq!(config.detect_language_from_path("test.cpp"), "cpp");
assert_eq!(config.detect_language_from_path("test.cc"), "cpp");
assert_eq!(config.detect_language_from_path("test.cxx"), "cpp");
assert_eq!(config.detect_language_from_path("test.cs"), "csharp");
assert_eq!(config.detect_language_from_path("test.svelte"), "svelte");
assert_eq!(config.detect_language_from_path("test.php"), "php");
assert_eq!(config.detect_language_from_path("test.phtml"), "php");
assert_eq!(config.detect_language_from_path("test.inc"), "php");
assert_eq!(config.detect_language_from_path("test.unknown"), "generic");
assert_eq!(config.detect_language_from_path("noextension"), "generic");
}
#[test]
fn test_custom_config() {
let config = AnalysisConfig {
max_file_size: 500_000, // 500KB
max_files: 500,
supported_extensions: vec!["ts", "js", "rs"],
};
assert_eq!(config.max_file_size, 500_000);
assert_eq!(config.max_files, 500);
assert_eq!(config.supported_extensions.len(), 3);
// Should support only the specified extensions
assert!(config.should_analyze_file(Path::new("test.ts")));
assert!(config.should_analyze_file(Path::new("test.js")));
assert!(config.should_analyze_file(Path::new("test.rs")));
assert!(!config.should_analyze_file(Path::new("test.py"))); // No longer supported
}
#[test]
fn test_is_ignored_directory_directly() {
let config = AnalysisConfig::default();
assert!(config.is_ignored_directory("node_modules/package"));
assert!(config.is_ignored_directory("dist/build"));
assert!(config.is_ignored_directory("target/debug"));
assert!(config.is_ignored_directory("__pycache__/test"));
assert!(!config.is_ignored_directory("src/components"));
assert!(!config.is_ignored_directory("lib/utils"));
}
#[test]
fn test_is_ignored_file_directly() {
let config = AnalysisConfig::default();
assert!(config.is_ignored_file("app.min.js"));
assert!(config.is_ignored_file("package-lock.json"));
assert!(config.is_ignored_file(".gitignore"));
assert!(config.is_ignored_file("bundle.chunk.js"));
assert!(!config.is_ignored_file("app.js"));
assert!(!config.is_ignored_file("package.json"));
assert!(!config.is_ignored_file("test.ts"));
}
}
| rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/types/errors.rs | rust-core/src/types/errors.rs | //! Error handling for the semantic analysis system
/// Simple error type for when napi is not available
#[derive(Debug)]
pub struct SimpleError {
message: String,
}
impl SimpleError {
pub fn from_reason<S: Into<String>>(message: S) -> Self {
Self {
message: message.into(),
}
}
}
impl std::fmt::Display for SimpleError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.message)
}
}
impl std::error::Error for SimpleError {}
/// Conditional type alias - use proper napi::Error when available
#[cfg(feature = "napi-bindings")]
pub type ParseError = napi::Error;
#[cfg(not(feature = "napi-bindings"))]
pub type ParseError = SimpleError;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_simple_error_creation() {
let error = SimpleError::from_reason("test error");
assert_eq!(error.message, "test error");
assert_eq!(format!("{}", error), "test error");
}
#[test]
fn test_simple_error_display() {
let error = SimpleError::from_reason("display test");
let display_str = format!("{}", error);
assert_eq!(display_str, "display test");
}
#[test]
fn test_simple_error_debug() {
let error = SimpleError::from_reason("debug test");
let debug_str = format!("{:?}", error);
assert!(debug_str.contains("debug test"));
}
#[test]
fn test_error_trait_implementation() {
let error = SimpleError::from_reason("trait test");
let error_trait: &dyn std::error::Error = &error;
assert_eq!(format!("{}", error_trait), "trait test");
}
} | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/types/core_types.rs | rust-core/src/types/core_types.rs | //! Core type definitions for semantic analysis
#[cfg(feature = "napi-bindings")]
use napi_derive::napi;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "napi-bindings", napi(object))]
pub struct SemanticConcept {
pub id: String,
pub name: String,
pub concept_type: String,
pub confidence: f64,
pub file_path: String,
pub line_range: LineRange,
pub relationships: HashMap<String, String>,
pub metadata: HashMap<String, String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "napi-bindings", napi(object))]
pub struct LineRange {
pub start: u32,
pub end: u32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "napi-bindings", napi(object))]
pub struct CodebaseAnalysisResult {
pub languages: Vec<String>,
pub frameworks: Vec<String>,
pub complexity: ComplexityMetrics,
pub concepts: Vec<SemanticConcept>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "napi-bindings", napi(object))]
pub struct ComplexityMetrics {
pub cyclomatic_complexity: f64,
pub cognitive_complexity: f64,
pub function_count: u32,
pub class_count: u32,
pub file_count: u32,
pub avg_functions_per_file: f64,
pub avg_lines_per_concept: f64,
pub max_nesting_depth: u32,
}
// AST-related types from ast_parser.rs
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "napi-bindings", napi(object))]
pub struct AstNode {
pub node_type: String,
pub text: String,
pub start_line: u32,
pub end_line: u32,
pub start_column: u32,
pub end_column: u32,
pub children: Vec<AstNode>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "napi-bindings", napi(object))]
pub struct ParseResult {
pub language: String,
pub tree: AstNode,
pub errors: Vec<String>,
pub symbols: Vec<Symbol>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "napi-bindings", napi(object))]
pub struct Symbol {
pub name: String,
pub symbol_type: String,
pub line: u32,
pub column: u32,
pub scope: String,
}
#[cfg(test)]
mod tests {
use super::*;
fn create_test_concept(name: &str, concept_type: &str) -> SemanticConcept {
SemanticConcept {
id: format!("test_{}", name),
name: name.to_string(),
concept_type: concept_type.to_string(),
confidence: 0.8,
file_path: "test.ts".to_string(),
line_range: LineRange { start: 1, end: 1 },
relationships: HashMap::new(),
metadata: HashMap::new(),
}
}
#[test]
fn test_semantic_concept_creation() {
let concept = create_test_concept("TestClass", "class");
assert_eq!(concept.name, "TestClass");
assert_eq!(concept.concept_type, "class");
assert_eq!(concept.confidence, 0.8);
assert_eq!(concept.file_path, "test.ts");
assert_eq!(concept.line_range.start, 1);
assert_eq!(concept.line_range.end, 1);
}
#[test]
fn test_line_range() {
let range = LineRange { start: 10, end: 20 };
assert_eq!(range.start, 10);
assert_eq!(range.end, 20);
}
#[test]
fn test_concept_relationships() {
let mut concept = create_test_concept("UserService", "class");
concept.relationships.insert("implements".to_string(), "IUserService".to_string());
concept.relationships.insert("extends".to_string(), "BaseService".to_string());
assert_eq!(
concept.relationships.get("implements"),
Some(&"IUserService".to_string())
);
assert_eq!(
concept.relationships.get("extends"),
Some(&"BaseService".to_string())
);
assert_eq!(concept.relationships.len(), 2);
}
#[test]
fn test_concept_metadata() {
let mut concept = create_test_concept("calculateTotal", "function");
concept.metadata.insert("visibility".to_string(), "public".to_string());
concept.metadata.insert("async".to_string(), "false".to_string());
concept.metadata.insert("parameters".to_string(), "2".to_string());
assert_eq!(
concept.metadata.get("visibility"),
Some(&"public".to_string())
);
assert_eq!(concept.metadata.get("async"), Some(&"false".to_string()));
assert_eq!(concept.metadata.get("parameters"), Some(&"2".to_string()));
}
#[test]
fn test_codebase_analysis_result() {
let analysis = CodebaseAnalysisResult {
languages: vec!["typescript".to_string(), "javascript".to_string()],
frameworks: vec!["react".to_string(), "express".to_string()],
complexity: ComplexityMetrics {
cyclomatic_complexity: 15.0,
cognitive_complexity: 22.0,
function_count: 10,
class_count: 5,
file_count: 3,
avg_functions_per_file: 3.33,
avg_lines_per_concept: 50.0,
max_nesting_depth: 4,
},
concepts: vec![
create_test_concept("UserService", "class"),
create_test_concept("getUser", "function"),
],
};
assert_eq!(analysis.languages.len(), 2);
assert_eq!(analysis.frameworks.len(), 2);
assert_eq!(analysis.concepts.len(), 2);
assert!(analysis.languages.contains(&"typescript".to_string()));
assert!(analysis.frameworks.contains(&"react".to_string()));
}
#[test]
fn test_complexity_metrics() {
let metrics = ComplexityMetrics {
cyclomatic_complexity: 10.5,
cognitive_complexity: 15.2,
function_count: 8,
class_count: 3,
file_count: 2,
avg_functions_per_file: 4.0,
avg_lines_per_concept: 37.5,
max_nesting_depth: 3,
};
assert_eq!(metrics.cyclomatic_complexity, 10.5);
assert_eq!(metrics.cognitive_complexity, 15.2);
assert_eq!(metrics.function_count, 8);
assert_eq!(metrics.class_count, 3);
}
#[test]
fn test_concept_confidence_bounds() {
let mut concept = create_test_concept("test", "function");
// Test valid confidence values
concept.confidence = 0.0;
assert!(concept.confidence >= 0.0 && concept.confidence <= 1.0);
concept.confidence = 1.0;
assert!(concept.confidence >= 0.0 && concept.confidence <= 1.0);
concept.confidence = 0.75;
assert!(concept.confidence >= 0.0 && concept.confidence <= 1.0);
}
#[test]
fn test_multiple_concept_types() {
let concepts = [
create_test_concept("UserService", "class"),
create_test_concept("IUserService", "interface"),
create_test_concept("getUser", "function"),
create_test_concept("userId", "variable"),
create_test_concept("UserType", "type"),
];
let types: Vec<&str> = concepts.iter().map(|c| c.concept_type.as_str()).collect();
assert!(types.contains(&"class"));
assert!(types.contains(&"interface"));
assert!(types.contains(&"function"));
assert!(types.contains(&"variable"));
assert!(types.contains(&"type"));
}
#[test]
fn test_concept_hierarchy() {
let mut parent_concept = create_test_concept("UserService", "class");
let mut method_concept = create_test_concept("getUser", "function");
// Simulate parent-child relationship
method_concept
.relationships
.insert("parent".to_string(), parent_concept.id.clone());
parent_concept
.relationships
.insert("methods".to_string(), method_concept.id.clone());
assert_eq!(
method_concept.relationships.get("parent"),
Some(&parent_concept.id)
);
assert_eq!(
parent_concept.relationships.get("methods"),
Some(&method_concept.id)
);
}
#[test]
fn test_concept_serialization() {
let concept = create_test_concept("TestFunction", "function");
// Test that the concept can be serialized and deserialized
let serialized = serde_json::to_string(&concept).expect("Should serialize");
let deserialized: SemanticConcept = serde_json::from_str(&serialized).expect("Should deserialize");
assert_eq!(concept.name, deserialized.name);
assert_eq!(concept.concept_type, deserialized.concept_type);
assert_eq!(concept.confidence, deserialized.confidence);
}
} | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/types/mod.rs | rust-core/src/types/mod.rs | pub mod core_types;
pub mod errors;
pub mod config;
pub use core_types::*;
pub use errors::*;
pub use config::*; | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/extractors/rust.rs | rust-core/src/extractors/rust.rs | //! Rust concept extraction
use crate::parsing::NameExtractor;
use crate::types::{LineRange, ParseError, SemanticConcept};
use std::collections::HashMap;
use tree_sitter::Node;
/// Extractor for Rust concepts
pub struct RustExtractor;
impl RustExtractor {
pub fn new() -> Self {
Self
}
/// Extract concepts from Rust AST nodes
pub fn extract_concepts(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
concepts: &mut Vec<SemanticConcept>,
) -> Result<(), ParseError> {
match node.kind() {
"struct_item" | "enum_item" | "trait_item" | "impl_item" => {
if let Some(concept) =
self.extract_concept_from_node(node, file_path, content, "struct")?
{
concepts.push(concept);
}
}
"function_item" => {
if let Some(concept) =
self.extract_concept_from_node(node, file_path, content, "function")?
{
concepts.push(concept);
}
}
"let_declaration" => {
if let Some(concept) =
self.extract_concept_from_node(node, file_path, content, "variable")?
{
concepts.push(concept);
}
}
_ => {}
}
Ok(())
}
/// Extract a concept from a node with proper name extraction
fn extract_concept_from_node(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
concept_type: &str,
) -> Result<Option<SemanticConcept>, ParseError> {
let name = NameExtractor::extract_name_from_node(node, content)
.map_err(ParseError::from_reason)?;
if name.is_empty() {
return Ok(None);
}
let concept = SemanticConcept {
id: format!(
"concept_{}",
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_millis())
.unwrap_or_else(|_| {
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
let mut hasher = DefaultHasher::new();
format!("{}{}", file_path, name).hash(&mut hasher);
hasher.finish() as u128
})
),
name,
concept_type: concept_type.to_string(),
confidence: 0.8,
file_path: file_path.to_string(),
line_range: LineRange {
start: node.start_position().row as u32 + 1,
end: node.end_position().row as u32 + 1,
},
relationships: HashMap::new(),
metadata: HashMap::new(),
};
Ok(Some(concept))
}
}
impl Default for RustExtractor {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::parsing::ParserManager;
fn create_rust_tree(code: &str) -> tree_sitter::Tree {
let mut manager = ParserManager::new().unwrap();
manager.parse(code, "rust").unwrap()
}
fn extract_all_concepts(
extractor: &RustExtractor,
tree: &tree_sitter::Tree,
file_path: &str,
content: &str,
) -> Vec<SemanticConcept> {
let mut concepts = Vec::new();
fn walk_and_extract(
extractor: &RustExtractor,
node: tree_sitter::Node<'_>,
file_path: &str,
content: &str,
concepts: &mut Vec<SemanticConcept>,
) {
let _ = extractor.extract_concepts(node, file_path, content, concepts);
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
walk_and_extract(extractor, child, file_path, content, concepts);
}
}
walk_and_extract(
extractor,
tree.root_node(),
file_path,
content,
&mut concepts,
);
concepts
}
#[test]
fn test_extract_rust_struct() {
let extractor = RustExtractor::new();
let code = "pub struct User { name: String, age: u32 }";
let tree = create_rust_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "user.rs", code);
let struct_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "struct")
.collect();
assert!(!struct_concepts.is_empty());
let struct_concept = &struct_concepts[0];
assert_eq!(struct_concept.name, "User");
assert_eq!(struct_concept.concept_type, "struct");
assert_eq!(struct_concept.confidence, 0.8);
}
#[test]
fn test_extract_rust_enum() {
let extractor = RustExtractor::new();
let code = "enum Color { Red, Green, Blue }";
let tree = create_rust_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "color.rs", code);
let enum_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "struct") // Enums mapped as structs
.collect();
assert!(!enum_concepts.is_empty());
let enum_concept = &enum_concepts[0];
assert_eq!(enum_concept.name, "Color");
}
#[test]
fn test_extract_rust_function() {
let extractor = RustExtractor::new();
let code = "pub fn calculate_total(price: f64, tax: f64) -> f64 { price + tax }";
let tree = create_rust_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "calc.rs", code);
let function_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "function")
.collect();
assert!(!function_concepts.is_empty());
let function_concept = &function_concepts[0];
assert_eq!(function_concept.name, "calculate_total");
assert_eq!(function_concept.concept_type, "function");
}
#[test]
fn test_extract_rust_trait() {
let extractor = RustExtractor::new();
let code = "trait Display { fn fmt(&self) -> String; }";
let tree = create_rust_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "display.rs", code);
let trait_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "struct") // Traits mapped as structs
.collect();
assert!(!trait_concepts.is_empty());
let trait_concept = &trait_concepts[0];
assert_eq!(trait_concept.name, "Display");
}
#[test]
fn test_extract_rust_impl() {
let extractor = RustExtractor::new();
let code = r#"
struct User { name: String }
impl User {
fn new(name: String) -> Self {
User { name }
}
fn get_name(&self) -> &str {
&self.name
}
}
"#;
let tree = create_rust_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "user_impl.rs", code);
// Should find struct, impl, and functions
let struct_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "struct")
.collect();
let function_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "function")
.collect();
assert!(struct_concepts.len() >= 2); // struct + impl
assert!(function_concepts.len() >= 2); // new + get_name
}
#[test]
fn test_extract_rust_let_binding() {
let extractor = RustExtractor::new();
let code = "let user_name = String::from(\"John\");";
let tree = create_rust_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "var.rs", code);
let _variable_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "variable")
.collect();
// May or may not find variables depending on tree structure
// Length is always >= 0 for Vec
}
#[test]
fn test_complex_rust_example() {
let extractor = RustExtractor::new();
let code = r#"
use std::collections::HashMap;
#[derive(Debug, Clone)]
pub struct User {
id: u32,
name: String,
email: String,
}
pub trait Repository<T> {
fn find_by_id(&self, id: u32) -> Option<T>;
fn save(&mut self, entity: T) -> Result<(), String>;
}
pub struct UserRepository {
users: HashMap<u32, User>,
}
impl Repository<User> for UserRepository {
fn find_by_id(&self, id: u32) -> Option<User> {
self.users.get(&id).cloned()
}
fn save(&mut self, user: User) -> Result<(), String> {
self.users.insert(user.id, user);
Ok(())
}
}
impl UserRepository {
pub fn new() -> Self {
UserRepository {
users: HashMap::new(),
}
}
}
"#;
let tree = create_rust_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "complex.rs", code);
// Should find multiple concepts
assert!(!concepts.is_empty());
let struct_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "struct")
.collect();
let function_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "function")
.collect();
assert!(!struct_concepts.is_empty());
assert!(!function_concepts.is_empty());
// Check for specific names
let concept_names: Vec<&String> = concepts.iter().map(|c| &c.name).collect();
assert!(concept_names.contains(&&"User".to_string()));
assert!(concept_names.contains(&&"UserRepository".to_string()));
assert!(concept_names.contains(&&"Repository".to_string()));
}
#[test]
fn test_empty_rust_code() {
let extractor = RustExtractor::new();
let code = "";
let tree = create_rust_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "empty.rs", code);
assert_eq!(concepts.len(), 0);
}
#[test]
fn test_invalid_rust_syntax() {
let extractor = RustExtractor::new();
let code = "struct {{{ invalid syntax";
let tree = create_rust_tree(code);
let _concepts = extract_all_concepts(&extractor, &tree, "invalid.rs", code);
// Should not crash on invalid syntax
// Length is always >= 0 for Vec
}
#[test]
fn test_extractor_default() {
let extractor = RustExtractor;
let code = "struct Test;";
let tree = create_rust_tree(code);
let mut concepts = Vec::new();
let result = extractor.extract_concepts(tree.root_node(), "test.rs", code, &mut concepts);
assert!(result.is_ok());
}
}
| rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/extractors/go.rs | rust-core/src/extractors/go.rs | //! Go concept extraction
use crate::types::{SemanticConcept, LineRange, ParseError};
use crate::parsing::NameExtractor;
use std::collections::HashMap;
use tree_sitter::Node;
pub struct GoExtractor;
impl GoExtractor {
pub fn new() -> Self { Self }
pub fn extract_concepts(&self, node: Node<'_>, file_path: &str, content: &str, concepts: &mut Vec<SemanticConcept>) -> Result<(), ParseError> {
match node.kind() {
"type_declaration" | "struct_type" | "interface_type" => {
if let Some(concept) = self.extract_concept_from_node(node, file_path, content, "struct")? {
concepts.push(concept);
}
}
"function_declaration" | "method_declaration" => {
if let Some(concept) = self.extract_concept_from_node(node, file_path, content, "function")? {
concepts.push(concept);
}
}
"var_declaration" | "const_declaration" => {
if let Some(concept) = self.extract_concept_from_node(node, file_path, content, "variable")? {
concepts.push(concept);
}
}
_ => {}
}
Ok(())
}
fn extract_concept_from_node(&self, node: Node<'_>, file_path: &str, content: &str, concept_type: &str) -> Result<Option<SemanticConcept>, ParseError> {
let name = NameExtractor::extract_name_from_node(node, content)
.map_err(ParseError::from_reason)?;
if name.is_empty() { return Ok(None); }
Ok(Some(SemanticConcept {
id: format!("concept_{}", std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH).map(|d| d.as_millis()).unwrap_or(0)),
name, concept_type: concept_type.to_string(), confidence: 0.8, file_path: file_path.to_string(),
line_range: LineRange { start: node.start_position().row as u32 + 1, end: node.end_position().row as u32 + 1 },
relationships: HashMap::new(), metadata: HashMap::new(),
}))
}
}
impl Default for GoExtractor { fn default() -> Self { Self::new() } }
#[cfg(test)]
mod tests {
use super::*;
use crate::parsing::ParserManager;
#[test]
fn test_go_function() {
let extractor = GoExtractor::new();
let mut manager = ParserManager::new().unwrap();
let code = "func main() { println(\"Hello\") }";
let tree = manager.parse(code, "go").unwrap();
let mut concepts = Vec::new();
let _ = extractor.extract_concepts(tree.root_node(), "main.go", code, &mut concepts);
// Length is always >= 0 for Vec
}
} | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/extractors/svelte.rs | rust-core/src/extractors/svelte.rs | //! Svelte concept extraction
use crate::types::{SemanticConcept, LineRange, ParseError};
use crate::parsing::NameExtractor;
use std::collections::HashMap;
use tree_sitter::Node;
pub struct SvelteExtractor;
impl SvelteExtractor {
pub fn new() -> Self { Self }
pub fn extract_concepts(&self, node: Node<'_>, file_path: &str, content: &str, concepts: &mut Vec<SemanticConcept>) -> Result<(), ParseError> {
match node.kind() {
"script_element" => {
// Parse as JavaScript/TypeScript content within Svelte
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
self.extract_concepts(child, file_path, content, concepts)?;
}
}
"element" => {
if let Some(concept) = self.extract_concept_from_node(node, file_path, content, "component")? {
concepts.push(concept);
}
}
_ => {}
}
Ok(())
}
fn extract_concept_from_node(&self, node: Node<'_>, file_path: &str, content: &str, concept_type: &str) -> Result<Option<SemanticConcept>, ParseError> {
let name = NameExtractor::extract_name_from_node(node, content)
.map_err(ParseError::from_reason)?;
if name.is_empty() { return Ok(None); }
Ok(Some(SemanticConcept {
id: format!("concept_{}", std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH).map(|d| d.as_millis()).unwrap_or(0)),
name, concept_type: concept_type.to_string(), confidence: 0.8, file_path: file_path.to_string(),
line_range: LineRange { start: node.start_position().row as u32 + 1, end: node.end_position().row as u32 + 1 },
relationships: HashMap::new(), metadata: HashMap::new(),
}))
}
}
impl Default for SvelteExtractor { fn default() -> Self { Self::new() } }
#[cfg(test)]
mod tests {
use super::*;
use crate::parsing::ParserManager;
#[test]
fn test_svelte_component() {
let extractor = SvelteExtractor::new();
let mut manager = ParserManager::new().unwrap();
let code = "<script>\nlet name = 'world';\n</script>\n<h1>Hello {name}!</h1>";
let tree = manager.parse(code, "svelte").unwrap();
let mut concepts = Vec::new();
let _ = extractor.extract_concepts(tree.root_node(), "App.svelte", code, &mut concepts);
// Length is always >= 0 for Vec
}
} | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/extractors/typescript.rs | rust-core/src/extractors/typescript.rs | //! Comprehensive TypeScript and JavaScript concept extraction using full grammar support
//!
//! This module provides detailed extraction of TypeScript/JavaScript constructs including:
//! - Classes (regular, abstract, with decorators)
//! - Interfaces with method signatures
//! - Functions (regular, async, generator, arrow functions)
//! - Type definitions (type aliases, enums, generics)
//! - Modules and namespaces
//! - Import/export statements
//! - Variables with type annotations
//! - JSX elements (for TSX)
use crate::types::{LineRange, ParseError, SemanticConcept};
use std::collections::HashMap;
use tree_sitter::Node;
/// Advanced TypeScript/JavaScript concept extractor using full grammar support
pub struct TypeScriptExtractor;
impl TypeScriptExtractor {
pub fn new() -> Self {
Self
}
/// Extract concepts from TypeScript/JavaScript AST nodes
pub fn extract_concepts(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
concepts: &mut Vec<SemanticConcept>,
) -> Result<(), ParseError> {
match node.kind() {
"class_declaration" | "interface_declaration" | "type_alias_declaration" => {
if let Some(concept) =
self.extract_concept_from_node(node, file_path, content, "class")?
{
concepts.push(concept);
}
}
"function_declaration"
| "method_definition"
| "arrow_function"
| "function"
| "function_expression" => {
if let Some(concept) =
self.extract_concept_from_node(node, file_path, content, "function")?
{
concepts.push(concept);
}
}
"variable_declaration" | "lexical_declaration" => {
self.extract_variables(node, file_path, content, concepts)?;
}
// Module constructs
"module" | "internal_module" => {
if let Some(concept) = self.extract_module(node, file_path, content)? {
concepts.push(concept);
}
}
// Import/Export
"import_statement" => {
if let Some(concept) = self.extract_import(node, file_path, content)? {
concepts.push(concept);
}
}
"export_statement" => {
if let Some(concept) = self.extract_export(node, file_path, content)? {
concepts.push(concept);
}
}
_ => {}
}
Ok(())
}
/// Extract a concept from a node with proper name extraction
fn extract_concept_from_node(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
concept_type: &str,
) -> Result<Option<SemanticConcept>, ParseError> {
let name = self.extract_name_from_node(node, content)?;
if name.is_empty() {
return Ok(None);
}
let mut metadata = HashMap::new();
// Add enhanced metadata based on type
match concept_type {
"function" => {
if self.has_async_modifier(node) {
metadata.insert("async".to_string(), "true".to_string());
}
if self.has_static_modifier(node) {
metadata.insert("static".to_string(), "true".to_string());
}
if let Some(params) = self.extract_function_parameters(node, content) {
metadata.insert("parameters".to_string(), params);
}
}
"class" => {
let decorators = self.extract_decorators(node, content);
if !decorators.is_empty() {
metadata.insert("decorators".to_string(), decorators.join(", "));
}
if let Some(access) = self.extract_accessibility_modifier(node, content) {
metadata.insert("accessibility".to_string(), access);
}
// Check for extends/implements
if let Some(extends) = self.find_child_by_kind(node, "class_heritage") {
if let Some(extends_text) = self.extract_text_from_node(extends, content) {
metadata.insert("extends".to_string(), extends_text);
}
}
}
"interface" => {
if let Some(extends) = self.find_child_by_kind(node, "extends_type_clause") {
let extended_types = self.extract_extended_types(extends, content);
if !extended_types.is_empty() {
metadata.insert("extends".to_string(), extended_types.join(", "));
}
}
}
"type" => {
if let Some(type_node) = self.find_child_by_field(node, "value") {
if let Some(type_def) = self.extract_text_from_node(type_node, content) {
metadata.insert("definition".to_string(), type_def);
}
}
}
"enum" => {
// Check if const enum
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "const" {
metadata.insert("const".to_string(), "true".to_string());
break;
}
}
}
"variable" => {
// Check for type annotation
if let Some(type_node) = self.find_child_by_kind(node, "type_annotation") {
if let Some(type_text) = self.extract_text_from_node(type_node, content) {
metadata.insert("type".to_string(), type_text);
}
}
}
_ => {}
}
Ok(Some(self.create_concept(
name,
concept_type.to_string(),
node,
file_path,
0.8,
metadata,
)))
}
/// Extract variables from variable/lexical declarations
fn extract_variables(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
concepts: &mut Vec<SemanticConcept>,
) -> Result<(), ParseError> {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "variable_declarator" {
if let Some(name_node) = self.find_child_by_field(child, "name") {
if let Some(name) = self.extract_text_from_node(name_node, content) {
if !name.is_empty() {
let mut metadata = HashMap::new();
// Check if has type annotation
if let Some(type_node) =
self.find_child_by_kind(child, "type_annotation")
{
if let Some(type_text) =
self.extract_text_from_node(type_node, content)
{
metadata.insert("type".to_string(), type_text);
}
}
concepts.push(self.create_concept(
name,
"variable".to_string(),
child,
file_path,
0.7,
metadata,
));
}
}
}
}
}
Ok(())
}
/// Extract module/namespace declaration
fn extract_module(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
) -> Result<Option<SemanticConcept>, ParseError> {
let name = self.extract_name_from_node(node, content)?;
if name.is_empty() {
return Ok(None);
}
let mut metadata = HashMap::new();
// Distinguish between module and namespace
if node.kind() == "internal_module" {
metadata.insert("type".to_string(), "namespace".to_string());
} else {
metadata.insert("type".to_string(), "module".to_string());
}
Ok(Some(self.create_concept(
name,
"module".to_string(),
node,
file_path,
0.8,
metadata,
)))
}
/// Extract import statement
fn extract_import(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
) -> Result<Option<SemanticConcept>, ParseError> {
// Extract source path
if let Some(source_node) = self.find_child_by_field(node, "source") {
if let Some(source_text) = self.extract_text_from_node(source_node, content) {
let mut metadata = HashMap::new();
metadata.insert("source".to_string(), source_text.clone());
// Try to get a meaningful name from the import
let name = source_text
.trim_matches('"')
.trim_matches('\'')
.split('/')
.next_back()
.unwrap_or("import")
.to_string();
return Ok(Some(self.create_concept(
name,
"import".to_string(),
node,
file_path,
0.6,
metadata,
)));
}
}
Ok(None)
}
/// Extract export statement
fn extract_export(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
) -> Result<Option<SemanticConcept>, ParseError> {
// Check if it's a declaration export
if let Some(decl_node) = self.find_child_by_field(node, "declaration") {
let name = self.extract_name_from_node(decl_node, content)?;
if !name.is_empty() {
let mut metadata = HashMap::new();
metadata.insert("exported".to_string(), "true".to_string());
let concept_type = match decl_node.kind() {
"class_declaration" => "class",
"function_declaration" => "function",
"interface_declaration" => "interface",
"type_alias_declaration" => "type",
"enum_declaration" => "enum",
_ => "export",
};
return Ok(Some(self.create_concept(
name,
concept_type.to_string(),
decl_node,
file_path,
0.8,
metadata,
)));
}
}
// Handle export clauses
if let Some(_export_clause) = self.find_child_by_kind(node, "export_clause") {
return Ok(Some(self.create_concept(
"export_clause".to_string(),
"export".to_string(),
node,
file_path,
0.5,
HashMap::new(),
)));
}
Ok(None)
}
}
impl Default for TypeScriptExtractor {
fn default() -> Self {
Self::new()
}
}
// Helper methods implementation
impl TypeScriptExtractor {
/// Extract name from a node using various strategies
fn extract_name_from_node(&self, node: Node<'_>, content: &str) -> Result<String, ParseError> {
// Try to find name field first
if let Some(name_node) = self.find_child_by_field(node, "name") {
if let Some(name) = self.extract_text_from_node(name_node, content) {
return Ok(name);
}
}
// Try common name patterns
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
match child.kind() {
"identifier" | "type_identifier" | "property_identifier" => {
if let Some(name) = self.extract_text_from_node(child, content) {
return Ok(name);
}
}
_ => continue,
}
}
Ok(String::new())
}
/// Create a semantic concept with standard metadata
fn create_concept(
&self,
name: String,
concept_type: String,
node: Node<'_>,
file_path: &str,
confidence: f64,
metadata: HashMap<String, String>,
) -> SemanticConcept {
SemanticConcept {
id: format!(
"concept_{}",
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_millis())
.unwrap_or_else(|_| {
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
let mut hasher = DefaultHasher::new();
format!("{}{}", file_path, name).hash(&mut hasher);
hasher.finish() as u128
})
),
name,
concept_type,
confidence,
file_path: file_path.to_string(),
line_range: LineRange {
start: node.start_position().row as u32 + 1,
end: node.end_position().row as u32 + 1,
},
relationships: HashMap::new(),
metadata,
}
}
/// Find child node by field name
fn find_child_by_field<'a>(&self, node: Node<'a>, field_name: &str) -> Option<Node<'a>> {
node.child_by_field_name(field_name)
}
/// Find child node by kind
fn find_child_by_kind<'a>(&self, node: Node<'a>, kind: &str) -> Option<Node<'a>> {
let mut cursor = node.walk();
let children: Vec<_> = node.children(&mut cursor).collect();
children.into_iter().find(|child| child.kind() == kind)
}
/// Extract text from a node
fn extract_text_from_node(&self, node: Node<'_>, content: &str) -> Option<String> {
if node.start_byte() < content.len() && node.end_byte() <= content.len() {
Some(content[node.start_byte()..node.end_byte()].to_string())
} else {
None
}
}
/// Check if node has async modifier
fn has_async_modifier(&self, node: Node<'_>) -> bool {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "async" {
return true;
}
}
false
}
/// Check if node has static modifier
fn has_static_modifier(&self, node: Node<'_>) -> bool {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "static" {
return true;
}
}
false
}
/// Extract accessibility modifier (public/private/protected)
fn extract_accessibility_modifier(&self, node: Node<'_>, content: &str) -> Option<String> {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "accessibility_modifier" {
return self.extract_text_from_node(child, content);
}
}
None
}
/// Extract decorators from node
fn extract_decorators(&self, node: Node<'_>, content: &str) -> Vec<String> {
let mut decorators = Vec::new();
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "decorator" {
if let Some(decorator_text) = self.extract_text_from_node(child, content) {
decorators.push(decorator_text);
}
}
}
decorators
}
/// Extract function parameters
fn extract_function_parameters(&self, node: Node<'_>, content: &str) -> Option<String> {
if let Some(params_node) = self.find_child_by_field(node, "parameters") {
self.extract_text_from_node(params_node, content)
} else {
None
}
}
/// Extract extended types from extends clause
fn extract_extended_types(&self, node: Node<'_>, content: &str) -> Vec<String> {
let mut types = Vec::new();
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "type" || child.kind() == "identifier" {
if let Some(type_text) = self.extract_text_from_node(child, content) {
types.push(type_text);
}
}
}
types
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::parsing::ParserManager;
fn create_ts_tree(code: &str) -> tree_sitter::Tree {
let mut manager = ParserManager::new().unwrap();
manager.parse(code, "typescript").unwrap()
}
fn create_js_tree(code: &str) -> tree_sitter::Tree {
let mut manager = ParserManager::new().unwrap();
manager.parse(code, "javascript").unwrap()
}
fn extract_all_concepts(
extractor: &TypeScriptExtractor,
tree: &tree_sitter::Tree,
file_path: &str,
content: &str,
) -> Vec<SemanticConcept> {
let mut concepts = Vec::new();
fn walk_and_extract(
extractor: &TypeScriptExtractor,
node: tree_sitter::Node<'_>,
file_path: &str,
content: &str,
concepts: &mut Vec<SemanticConcept>,
) {
let _ = extractor.extract_concepts(node, file_path, content, concepts);
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
walk_and_extract(extractor, child, file_path, content, concepts);
}
}
walk_and_extract(
extractor,
tree.root_node(),
file_path,
content,
&mut concepts,
);
concepts
}
#[test]
fn test_extract_typescript_class() {
let extractor = TypeScriptExtractor::new();
let code = "export class UserService { getName() { return 'test'; } }";
let tree = create_ts_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "test.ts", code);
// Should find class and method
let class_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "class")
.collect();
assert!(!class_concepts.is_empty());
let class_concept = &class_concepts[0];
assert_eq!(class_concept.name, "UserService");
assert_eq!(class_concept.concept_type, "class");
assert_eq!(class_concept.confidence, 0.8);
}
#[test]
fn test_extract_typescript_interface() {
let extractor = TypeScriptExtractor::new();
let code = "interface IUserService { getName(): string; getAge(): number; }";
let tree = create_ts_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "interface.ts", code);
let interface_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "class") // Interfaces mapped as classes
.collect();
assert!(!interface_concepts.is_empty());
let interface_concept = &interface_concepts[0];
assert_eq!(interface_concept.name, "IUserService");
}
#[test]
fn test_extract_typescript_function() {
let extractor = TypeScriptExtractor::new();
let code =
"function calculateTotal(price: number, tax: number): number { return price + tax; }";
let tree = create_ts_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "calc.ts", code);
let function_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "function")
.collect();
assert!(!function_concepts.is_empty());
let function_concept = &function_concepts[0];
assert_eq!(function_concept.name, "calculateTotal");
assert_eq!(function_concept.concept_type, "function");
}
#[test]
fn test_extract_arrow_function() {
let extractor = TypeScriptExtractor::new();
let code = "const handleClick = (event: Event) => { console.log('clicked'); };";
let tree = create_ts_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "handler.ts", code);
// Should find variable and potentially arrow function
let __variable_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "variable")
.collect();
assert!(!__variable_concepts.is_empty());
}
#[test]
fn test_extract_javascript_class() {
let extractor = TypeScriptExtractor::new();
let code = "class Calculator { add(a, b) { return a + b; } }";
let tree = create_js_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "calc.js", code);
let class_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "class")
.collect();
assert!(!class_concepts.is_empty());
let class_concept = &class_concepts[0];
assert_eq!(class_concept.name, "Calculator");
}
#[test]
fn test_extract_javascript_function() {
let extractor = TypeScriptExtractor::new();
let code = "function hello() { return 'world'; }";
let tree = create_js_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "hello.js", code);
let function_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "function")
.collect();
assert!(!function_concepts.is_empty());
let function_concept = &function_concepts[0];
assert_eq!(function_concept.name, "hello");
assert_eq!(function_concept.concept_type, "function");
}
#[test]
fn test_extract_variable_declaration() {
let extractor = TypeScriptExtractor::new();
let code = "const userName = 'john'; let userAge = 25; var isActive = true;";
let tree = create_js_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "vars.js", code);
let __variable_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "variable")
.collect();
assert!(!__variable_concepts.is_empty());
// Should find at least one variable
let variable_names: Vec<&String> = __variable_concepts.iter().map(|c| &c.name).collect();
// Note: The exact extraction depends on tree structure
assert!(!variable_names.is_empty());
}
#[test]
fn test_extract_method_definition() {
let extractor = TypeScriptExtractor::new();
let code = r#"
class UserManager {
constructor(name) {
this.name = name;
}
getName() {
return this.name;
}
setName(newName) {
this.name = newName;
}
}
"#;
let tree = create_js_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "manager.js", code);
// Should find class and methods
let class_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "class")
.collect();
assert!(!class_concepts.is_empty());
let function_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "function")
.collect();
assert!(!function_concepts.is_empty());
// Should find the class name
let class_concept = &class_concepts[0];
assert_eq!(class_concept.name, "UserManager");
}
#[test]
fn test_extract_type_alias() {
let extractor = TypeScriptExtractor::new();
let code = "type UserType = { name: string; age: number; };";
let tree = create_ts_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "types.ts", code);
let type_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "class") // Type aliases mapped as classes
.collect();
assert!(!type_concepts.is_empty());
let type_concept = &type_concepts[0];
assert_eq!(type_concept.name, "UserType");
}
#[test]
fn test_empty_code() {
let extractor = TypeScriptExtractor::new();
let code = "";
let tree = create_js_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "empty.js", code);
assert_eq!(concepts.len(), 0);
}
#[test]
fn test_invalid_syntax() {
let extractor = TypeScriptExtractor::new();
let code = "function {{{ invalid syntax";
let tree = create_js_tree(code);
let _concepts = extract_all_concepts(&extractor, &tree, "invalid.js", code);
// Should not crash on invalid syntax
// Tree-sitter will parse what it can
// May or may not find concepts, length is always >= 0
}
#[test]
fn test_concept_line_numbers() {
let extractor = TypeScriptExtractor::new();
let code = r#"
function first() {}
class Second {}
const third = () => {};
"#;
let tree = create_js_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "lines.js", code);
// Check that concepts have reasonable line numbers
for concept in &concepts {
assert!(concept.line_range.start > 0);
assert!(concept.line_range.end >= concept.line_range.start);
assert!(concept.line_range.start <= 6); // Code has 6 lines
}
}
#[test]
fn test_concept_metadata() {
let extractor = TypeScriptExtractor::new();
let code = "function testFunction() { return 42; }";
let tree = create_js_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "test.js", code);
assert!(!concepts.is_empty());
let concept = &concepts[0];
assert!(!concept.id.is_empty());
assert!(!concept.name.is_empty());
assert!(!concept.concept_type.is_empty());
assert!(concept.confidence > 0.0 && concept.confidence <= 1.0);
assert_eq!(concept.file_path, "test.js");
assert!(concept.line_range.start > 0);
}
#[test]
fn test_extractor_default() {
let extractor = TypeScriptExtractor;
let code = "const test = 42;";
let tree = create_js_tree(code);
let mut concepts = Vec::new();
let result = extractor.extract_concepts(tree.root_node(), "test.js", code, &mut concepts);
assert!(result.is_ok());
}
#[test]
fn test_complex_typescript_example() {
let extractor = TypeScriptExtractor::new();
let code = r#"
interface DatabaseConnection {
connect(): Promise<void>;
disconnect(): Promise<void>;
}
class UserRepository implements DatabaseConnection {
private connection: any;
constructor(connectionString: string) {
this.connection = connectionString;
}
async connect(): Promise<void> {
// Connection logic
}
async disconnect(): Promise<void> {
// Disconnection logic
}
async findUser(id: number): Promise<User | null> {
// Find user logic
return null;
}
}
type User = {
id: number;
name: string;
email: string;
};
const createRepository = (connectionString: string): UserRepository => {
return new UserRepository(connectionString);
};
"#;
let tree = create_ts_tree(code);
let concepts = extract_all_concepts(&extractor, &tree, "complex.ts", code);
// Should find multiple concepts
assert!(!concepts.is_empty());
let class_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "class")
.collect();
let function_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "function")
.collect();
let __variable_concepts: Vec<_> = concepts
.iter()
.filter(|c| c.concept_type == "variable")
.collect();
// Should find interfaces, classes, types, and functions
assert!(!class_concepts.is_empty());
assert!(!function_concepts.is_empty());
// Check specific concept names
let concept_names: Vec<&String> = concepts.iter().map(|c| &c.name).collect();
assert!(concept_names.contains(&&"DatabaseConnection".to_string()));
assert!(concept_names.contains(&&"UserRepository".to_string()));
assert!(concept_names.contains(&&"User".to_string()));
}
}
| rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/extractors/python.rs | rust-core/src/extractors/python.rs | //! Comprehensive Python concept extraction using full grammar support
//!
//! This module provides detailed extraction of Python constructs including:
//! - Classes (with inheritance, decorators, type parameters)
//! - Functions (async, generators, decorators, type hints)
//! - Variables (with type annotations)
//! - Modern Python (type aliases, pattern matching, async/await)
//! - Import system (from/import, relative imports)
//! - Advanced features (context managers, comprehensions)
use crate::types::{SemanticConcept, LineRange, ParseError};
use crate::parsing::NameExtractor;
use std::collections::HashMap;
use tree_sitter::Node;
/// Information about class body analysis
#[derive(Debug, Default)]
struct ClassInfo {
has_abstract_methods: bool,
method_count: usize,
property_count: usize,
decorators: Vec<String>,
has_init: bool,
has_classmethods: bool,
has_staticmethods: bool,
}
/// Advanced Python concept extractor using full grammar support
pub struct PythonExtractor;
impl PythonExtractor {
pub fn new() -> Self {
Self
}
/// Extract concepts from Python AST nodes using full grammar awareness
pub fn extract_concepts(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
concepts: &mut Vec<SemanticConcept>,
) -> Result<(), ParseError> {
match node.kind() {
// Class and object-oriented constructs
"class_definition" => {
if let Some(concept) = self.extract_class(node, file_path, content)? {
concepts.push(concept);
}
}
"decorated_definition" => {
self.extract_decorated_definition(node, file_path, content, concepts)?;
}
// Function constructs
"function_definition" => {
if let Some(concept) = self.extract_function(node, file_path, content)? {
concepts.push(concept);
}
}
// Variable assignments and type annotations
"assignment" => {
self.extract_assignment(node, file_path, content, concepts)?;
}
"augmented_assignment" => {
if let Some(concept) = self.extract_augmented_assignment(node, file_path, content)? {
concepts.push(concept);
}
}
// Modern Python features
"type_alias_statement" => {
if let Some(concept) = self.extract_type_alias(node, file_path, content)? {
concepts.push(concept);
}
}
"match_statement" => {
if let Some(concept) = self.extract_match_statement(node, file_path, content)? {
concepts.push(concept);
}
}
// Import system
"import_statement" | "import_from_statement" => {
if let Some(concept) = self.extract_import(node, file_path, content)? {
concepts.push(concept);
}
}
// Context managers
"with_statement" => {
if let Some(concept) = self.extract_with_statement(node, file_path, content)? {
concepts.push(concept);
}
}
_ => {}
}
Ok(())
}
fn extract_concept_from_node(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
concept_type: &str,
) -> Result<Option<SemanticConcept>, ParseError> {
let name = NameExtractor::extract_name_from_node(node, content)
.map_err(ParseError::from_reason)?;
if name.is_empty() {
return Ok(None);
}
let concept = SemanticConcept {
id: format!("concept_{}", std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_millis())
.unwrap_or(0)),
name,
concept_type: concept_type.to_string(),
confidence: 0.8,
file_path: file_path.to_string(),
line_range: LineRange {
start: node.start_position().row as u32 + 1,
end: node.end_position().row as u32 + 1,
},
relationships: HashMap::new(),
metadata: HashMap::new(),
};
Ok(Some(concept))
}
/// Extract class definition with comprehensive metadata
fn extract_class(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
) -> Result<Option<SemanticConcept>, ParseError> {
let name = self.extract_name_from_node(node, content)?;
if name.is_empty() {
return Ok(None);
}
let mut metadata = HashMap::new();
// Extract superclasses (inheritance)
if let Some(superclasses_node) = self.find_child_by_field(node, "superclasses") {
let superclasses = self.extract_argument_list(superclasses_node, content);
if !superclasses.is_empty() {
metadata.insert("superclasses".to_string(), superclasses.join(", "));
metadata.insert("has_inheritance".to_string(), "true".to_string());
}
}
// Check for type parameters (Generic classes)
if let Some(type_params) = self.find_child_by_field(node, "type_parameters") {
if let Some(type_params_text) = self.extract_text_from_node(type_params, content) {
metadata.insert("type_parameters".to_string(), type_params_text);
metadata.insert("generic".to_string(), "true".to_string());
}
}
// Analyze class body for additional metadata
if let Some(body_node) = self.find_child_by_field(node, "body") {
let class_info = self.analyze_class_body(body_node, content);
if class_info.has_abstract_methods {
metadata.insert("abstract".to_string(), "true".to_string());
}
if class_info.method_count > 0 {
metadata.insert("method_count".to_string(), class_info.method_count.to_string());
}
if class_info.property_count > 0 {
metadata.insert("property_count".to_string(), class_info.property_count.to_string());
}
if !class_info.decorators.is_empty() {
metadata.insert("decorators".to_string(), class_info.decorators.join(", "));
}
if class_info.has_init {
metadata.insert("has_constructor".to_string(), "true".to_string());
}
if class_info.has_classmethods {
metadata.insert("has_classmethods".to_string(), "true".to_string());
}
if class_info.has_staticmethods {
metadata.insert("has_staticmethods".to_string(), "true".to_string());
}
}
Ok(Some(self.create_concept(
name,
"class".to_string(),
node,
file_path,
0.9,
metadata,
)))
}
/// Extract function definition with type hints and modifiers
fn extract_function(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
) -> Result<Option<SemanticConcept>, ParseError> {
let name = self.extract_name_from_node(node, content)?;
if name.is_empty() {
return Ok(None);
}
let mut metadata = HashMap::new();
// Check if async function
if self.has_async_modifier(node) {
metadata.insert("async".to_string(), "true".to_string());
}
// Extract parameters with type hints
if let Some(params_node) = self.find_child_by_field(node, "parameters") {
let params = self.extract_parameters_with_types(params_node, content);
if !params.is_empty() {
metadata.insert("parameters".to_string(), params);
}
}
// Check for generator (yield in body)
if self.contains_yield(node, content) {
metadata.insert("generator".to_string(), "true".to_string());
}
// Use legacy method for additional validation
if let Some(legacy_concept) = self.extract_concept_from_node(node, file_path, content, "function")? {
// Merge any additional metadata from legacy extraction
for (key, value) in legacy_concept.metadata {
metadata.entry(key).or_insert(value);
}
}
Ok(Some(self.create_concept(
name,
"function".to_string(),
node,
file_path,
0.8,
metadata,
)))
}
/// Extract decorated definition (classes/functions with decorators)
fn extract_decorated_definition(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
concepts: &mut Vec<SemanticConcept>,
) -> Result<(), ParseError> {
// Extract decorators
let decorators = self.extract_decorators(node, content);
// Find the actual definition (class or function)
if let Some(definition_node) = self.find_child_by_field(node, "definition") {
match definition_node.kind() {
"class_definition" => {
if let Some(mut concept) = self.extract_class(definition_node, file_path, content)? {
if !decorators.is_empty() {
concept.metadata.insert("decorators".to_string(), decorators.join(", "));
}
concepts.push(concept);
}
}
"function_definition" => {
if let Some(mut concept) = self.extract_function(definition_node, file_path, content)? {
if !decorators.is_empty() {
concept.metadata.insert("decorators".to_string(), decorators.join(", "));
}
concepts.push(concept);
}
}
_ => {}
}
}
Ok(())
}
/// Extract assignment with type annotations
fn extract_assignment(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
concepts: &mut Vec<SemanticConcept>,
) -> Result<(), ParseError> {
// Get left-hand side (variable names)
if let Some(left_node) = self.find_child_by_field(node, "left") {
let var_names = self.extract_variable_names(left_node, content);
for var_name in var_names {
if !var_name.is_empty() {
let mut metadata = HashMap::new();
// Check for type annotation
if let Some(type_node) = self.find_child_by_field(node, "type") {
if let Some(type_text) = self.extract_text_from_node(type_node, content) {
metadata.insert("type".to_string(), type_text);
}
} else {
// Try to infer type from right-hand side
if let Some(right_node) = self.find_child_by_field(node, "right") {
let inferred_type = self.infer_value_type(right_node);
if !inferred_type.is_empty() {
metadata.insert("inferred_type".to_string(), inferred_type);
}
}
}
concepts.push(self.create_concept(
var_name,
"variable".to_string(),
node,
file_path,
0.7,
metadata,
));
}
}
}
Ok(())
}
/// Extract augmented assignment (+=, -=, etc.)
fn extract_augmented_assignment(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
) -> Result<Option<SemanticConcept>, ParseError> {
let var_name = if let Some(left_node) = self.find_child_by_field(node, "left") {
self.extract_text_from_node(left_node, content).unwrap_or_default()
} else {
return Ok(None);
};
if var_name.is_empty() {
return Ok(None);
}
let mut metadata = HashMap::new();
metadata.insert("augmented_assignment".to_string(), "true".to_string());
Ok(Some(self.create_concept(
var_name,
"variable".to_string(),
node,
file_path,
0.6,
metadata,
)))
}
/// Extract type alias statement
fn extract_type_alias(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
) -> Result<Option<SemanticConcept>, ParseError> {
let name = if let Some(left_node) = self.find_child_by_field(node, "left") {
self.extract_text_from_node(left_node, content).unwrap_or_default()
} else {
return Ok(None);
};
let mut metadata = HashMap::new();
// Extract type definition
if let Some(right_node) = self.find_child_by_field(node, "right") {
if let Some(type_def) = self.extract_text_from_node(right_node, content) {
metadata.insert("definition".to_string(), type_def);
}
}
Ok(Some(self.create_concept(
name,
"type_alias".to_string(),
node,
file_path,
0.8,
metadata,
)))
}
/// Extract match statement
fn extract_match_statement(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
) -> Result<Option<SemanticConcept>, ParseError> {
let subjects = self.extract_match_subjects(node, content);
if subjects.is_empty() {
return Ok(None);
}
let mut metadata = HashMap::new();
metadata.insert("subjects".to_string(), subjects.join(", "));
// Count case clauses for complexity analysis
let case_count = self.count_case_clauses(node);
metadata.insert("case_count".to_string(), case_count.to_string());
Ok(Some(self.create_concept(
format!("match_{}", subjects.join("_")),
"match_statement".to_string(),
node,
file_path,
0.7,
metadata,
)))
}
/// Extract import statements
fn extract_import(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
) -> Result<Option<SemanticConcept>, ParseError> {
let mut metadata = HashMap::new();
match node.kind() {
"import_statement" => {
let modules = self.extract_import_modules(node, content);
if modules.is_empty() {
return Ok(None);
}
metadata.insert("modules".to_string(), modules.join(", "));
Ok(Some(self.create_concept(
format!("import_{}", modules.join("_")),
"import".to_string(),
node,
file_path,
0.6,
metadata,
)))
}
"import_from_statement" => {
if let Some(module_node) = self.find_child_by_field(node, "module_name") {
let module_name = self.extract_text_from_node(module_node, content).unwrap_or_default();
let imported_items = self.extract_import_items(node, content);
metadata.insert("module".to_string(), module_name.clone());
metadata.insert("import_type".to_string(), "from_import".to_string());
if !imported_items.is_empty() {
metadata.insert("items".to_string(), imported_items.join(", "));
}
Ok(Some(self.create_concept(
format!("from_{}_import", module_name),
"import".to_string(),
node,
file_path,
0.6,
metadata,
)))
} else {
Ok(None)
}
}
_ => Ok(None)
}
}
/// Extract with statement (context managers)
fn extract_with_statement(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
) -> Result<Option<SemanticConcept>, ParseError> {
let mut metadata = HashMap::new();
// Check if async with
if self.has_async_modifier(node) {
metadata.insert("async".to_string(), "true".to_string());
}
let context_managers = self.extract_with_items(node, content);
if context_managers.is_empty() {
return Ok(None);
}
metadata.insert("context_managers".to_string(), context_managers.join(", "));
Ok(Some(self.create_concept(
format!("with_{}", context_managers.join("_")),
"context_manager".to_string(),
node,
file_path,
0.7,
metadata,
)))
}
// Helper methods for comprehensive Python extraction
/// Analyze class body for comprehensive metadata
fn analyze_class_body(&self, body_node: Node<'_>, content: &str) -> ClassInfo {
let mut info = ClassInfo::default();
let mut cursor = body_node.walk();
for child in body_node.children(&mut cursor) {
match child.kind() {
"function_definition" => {
info.method_count += 1;
// Check method name
if let Ok(method_name) = self.extract_name_from_node(child, content) {
if method_name == "__init__" {
info.has_init = true;
}
}
// Check for abstract methods
if self.has_decorator_named(child, content, "abstractmethod") {
info.has_abstract_methods = true;
}
// Check for class/static methods
if self.has_decorator_named(child, content, "classmethod") {
info.has_classmethods = true;
}
if self.has_decorator_named(child, content, "staticmethod") {
info.has_staticmethods = true;
}
if self.has_decorator_named(child, content, "property") {
info.property_count += 1;
}
}
"decorated_definition" => {
// Handle decorated methods
let decorators = self.extract_decorators(child, content);
info.decorators.extend(decorators);
if let Some(def_node) = self.find_child_by_field(child, "definition") {
if def_node.kind() == "function_definition" {
info.method_count += 1;
if let Ok(method_name) = self.extract_name_from_node(def_node, content) {
if method_name == "__init__" {
info.has_init = true;
}
}
}
}
}
_ => {}
}
}
info
}
/// Check if a function/method has a specific decorator
fn has_decorator_named(&self, node: Node<'_>, content: &str, decorator_name: &str) -> bool {
let decorators = self.extract_decorators(node, content);
decorators.iter().any(|d| d.contains(decorator_name))
}
/// Extract name from a node using various strategies
fn extract_name_from_node(&self, node: Node<'_>, content: &str) -> Result<String, ParseError> {
// Try to find name field first
if let Some(name_node) = self.find_child_by_field(node, "name") {
if let Some(name) = self.extract_text_from_node(name_node, content) {
return Ok(name);
}
}
// Try common name patterns
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "identifier" {
if let Some(name) = self.extract_text_from_node(child, content) {
return Ok(name);
}
}
}
Ok(String::new())
}
/// Find child node by field name
fn find_child_by_field<'a>(&self, node: Node<'a>, field_name: &str) -> Option<Node<'a>> {
node.child_by_field_name(field_name)
}
/// Extract text from a node
fn extract_text_from_node(&self, node: Node<'_>, content: &str) -> Option<String> {
if node.start_byte() < content.len() && node.end_byte() <= content.len() {
Some(content[node.start_byte()..node.end_byte()].to_string())
} else {
None
}
}
/// Create a semantic concept with standard metadata
fn create_concept(
&self,
name: String,
concept_type: String,
node: Node<'_>,
file_path: &str,
confidence: f64,
metadata: HashMap<String, String>,
) -> SemanticConcept {
SemanticConcept {
id: format!(
"concept_{}",
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_millis())
.unwrap_or_else(|_| {
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
let mut hasher = DefaultHasher::new();
format!("{}{}", file_path, name).hash(&mut hasher);
hasher.finish() as u128
})
),
name,
concept_type,
confidence,
file_path: file_path.to_string(),
line_range: LineRange {
start: node.start_position().row as u32 + 1,
end: node.end_position().row as u32 + 1,
},
relationships: HashMap::new(),
metadata,
}
}
/// Check if node has async modifier
fn has_async_modifier(&self, node: Node<'_>) -> bool {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "async" {
return true;
}
}
false
}
/// Extract decorators from a decorated definition
fn extract_decorators(&self, node: Node<'_>, content: &str) -> Vec<String> {
let mut decorators = Vec::new();
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "decorator" {
if let Some(decorator_text) = self.extract_text_from_node(child, content) {
decorators.push(decorator_text.trim_start_matches('@').to_string());
}
}
}
decorators
}
/// Extract argument list (for superclasses, function calls, etc.)
fn extract_argument_list(&self, node: Node<'_>, content: &str) -> Vec<String> {
let mut args = Vec::new();
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "expression" || child.kind() == "identifier" {
if let Some(arg_text) = self.extract_text_from_node(child, content) {
args.push(arg_text);
}
}
}
args
}
/// Extract parameters with type hints
fn extract_parameters_with_types(&self, node: Node<'_>, content: &str) -> String {
self.extract_text_from_node(node, content).unwrap_or_default()
}
/// Check if function contains yield (generator)
fn contains_yield(&self, node: Node<'_>, _content: &str) -> bool {
let mut cursor = node.walk();
fn check_yield_recursive<'a>(node: Node<'a>, cursor: &mut tree_sitter::TreeCursor<'a>) -> bool {
if node.kind() == "yield" {
return true;
}
let children: Vec<_> = node.children(cursor).collect();
for child in children {
if check_yield_recursive(child, cursor) {
return true;
}
}
false
}
check_yield_recursive(node, &mut cursor)
}
/// Extract variable names from assignment patterns
fn extract_variable_names(&self, node: Node<'_>, content: &str) -> Vec<String> {
let mut names = Vec::new();
match node.kind() {
"identifier" => {
if let Some(name) = self.extract_text_from_node(node, content) {
names.push(name);
}
}
"pattern_list" => {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
names.extend(self.extract_variable_names(child, content));
}
}
_ => {
// Try to extract from first identifier child
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "identifier" {
if let Some(name) = self.extract_text_from_node(child, content) {
names.push(name);
}
}
}
}
}
names
}
/// Infer value type from right-hand side of assignment
fn infer_value_type(&self, node: Node<'_>) -> String {
match node.kind() {
"string" | "concatenated_string" => "str".to_string(),
"integer" => "int".to_string(),
"float" => "float".to_string(),
"true" | "false" => "bool".to_string(),
"none" => "None".to_string(),
"list" => "list".to_string(),
"dictionary" => "dict".to_string(),
"set" => "set".to_string(),
"tuple" => "tuple".to_string(),
"call" => "function_call".to_string(),
_ => String::new(),
}
}
/// Extract match statement subjects
fn extract_match_subjects(&self, node: Node<'_>, content: &str) -> Vec<String> {
let mut subjects = Vec::new();
// Look for subject field directly
let mut cursor = node.walk();
for child in node.named_children(&mut cursor) {
if let Some(subject_node) = self.find_child_by_field(child, "subject") {
if let Some(subject_text) = self.extract_text_from_node(subject_node, content) {
subjects.push(subject_text);
}
}
}
// If no subjects found, try to extract from expressions
if subjects.is_empty() {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "expression" {
if let Some(subject_text) = self.extract_text_from_node(child, content) {
subjects.push(subject_text);
}
}
}
}
subjects
}
/// Count case clauses in match statement
fn count_case_clauses(&self, node: Node<'_>) -> usize {
let mut count = 0;
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "case_clause" {
count += 1;
}
}
count
}
/// Extract modules from import statement
fn extract_import_modules(&self, node: Node<'_>, content: &str) -> Vec<String> {
let mut modules = Vec::new();
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "dotted_name" || child.kind() == "identifier" {
if let Some(module_text) = self.extract_text_from_node(child, content) {
modules.push(module_text);
}
}
}
modules
}
/// Extract imported items from from-import statement
fn extract_import_items(&self, node: Node<'_>, content: &str) -> Vec<String> {
let mut items = Vec::new();
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "dotted_name" || child.kind() == "identifier" || child.kind() == "aliased_import" {
if let Some(item_text) = self.extract_text_from_node(child, content) {
items.push(item_text);
}
}
}
items
}
/// Extract with statement items (context managers)
fn extract_with_items(&self, node: Node<'_>, content: &str) -> Vec<String> {
let mut items = Vec::new();
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "with_item" {
if let Some(item_text) = self.extract_text_from_node(child, content) {
items.push(item_text);
}
}
}
items
}
}
impl Default for PythonExtractor {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::parsing::ParserManager;
#[test]
fn test_python_class_extraction() {
let extractor = PythonExtractor::new();
let mut manager = ParserManager::new().unwrap();
let code = "class User:\n def __init__(self):\n pass";
let tree = manager.parse(code, "python").unwrap();
let mut concepts = Vec::new();
let _ = extractor.extract_concepts(tree.root_node(), "test.py", code, &mut concepts);
// Should find at least the class
assert!(!concepts.is_empty());
}
#[test]
fn test_python_function_extraction() {
let extractor = PythonExtractor::new();
let mut manager = ParserManager::new().unwrap();
let code = "def calculate_total(price, tax):\n return price + tax";
let tree = manager.parse(code, "python").unwrap();
let mut concepts = Vec::new();
let _ = extractor.extract_concepts(tree.root_node(), "calc.py", code, &mut concepts);
assert!(!concepts.is_empty());
}
} | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/extractors/csharp.rs | rust-core/src/extractors/csharp.rs | //! C# concept extraction
use crate::types::{SemanticConcept, LineRange, ParseError};
use crate::parsing::NameExtractor;
use std::collections::HashMap;
use tree_sitter::Node;
pub struct CSharpExtractor;
impl CSharpExtractor {
pub fn new() -> Self { Self }
pub fn extract_concepts(&self, node: Node<'_>, file_path: &str, content: &str, concepts: &mut Vec<SemanticConcept>) -> Result<(), ParseError> {
match node.kind() {
"class_declaration" | "interface_declaration" | "struct_declaration" | "enum_declaration" => {
if let Some(concept) = self.extract_concept_from_node(node, file_path, content, "class")? {
concepts.push(concept);
}
}
"method_declaration" | "constructor_declaration" => {
if let Some(concept) = self.extract_concept_from_node(node, file_path, content, "function")? {
concepts.push(concept);
}
}
"field_declaration" | "variable_declaration" => {
if let Some(concept) = self.extract_concept_from_node(node, file_path, content, "variable")? {
concepts.push(concept);
}
}
_ => {}
}
Ok(())
}
fn extract_concept_from_node(&self, node: Node<'_>, file_path: &str, content: &str, concept_type: &str) -> Result<Option<SemanticConcept>, ParseError> {
let name = NameExtractor::extract_name_from_node(node, content)
.map_err(ParseError::from_reason)?;
if name.is_empty() { return Ok(None); }
Ok(Some(SemanticConcept {
id: format!("concept_{}", std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH).map(|d| d.as_millis()).unwrap_or(0)),
name, concept_type: concept_type.to_string(), confidence: 0.8, file_path: file_path.to_string(),
line_range: LineRange { start: node.start_position().row as u32 + 1, end: node.end_position().row as u32 + 1 },
relationships: HashMap::new(), metadata: HashMap::new(),
}))
}
}
impl Default for CSharpExtractor { fn default() -> Self { Self::new() } }
#[cfg(test)]
mod tests {
use super::*;
use crate::parsing::ParserManager;
#[test]
fn test_csharp_class() {
let extractor = CSharpExtractor::new();
let mut manager = ParserManager::new().unwrap();
let code = "public class Program { public static void Main() {} }";
let tree = manager.parse(code, "csharp").unwrap();
let mut concepts = Vec::new();
let _ = extractor.extract_concepts(tree.root_node(), "Program.cs", code, &mut concepts);
// Length is always >= 0 for Vec
}
} | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/extractors/sql.rs | rust-core/src/extractors/sql.rs | //! Comprehensive SQL concept extraction using full grammar support
//!
//! This module provides detailed extraction of SQL constructs including:
//! - Tables (CREATE TABLE, columns, constraints)
//! - Views (CREATE VIEW)
//! - Functions and Procedures (CREATE FUNCTION)
//! - Indexes (CREATE INDEX)
//! - Queries (SELECT, INSERT, UPDATE, DELETE)
//! - Database objects (schemas, triggers, etc.)
use crate::types::{SemanticConcept, LineRange, ParseError};
// Remove unused import
use std::collections::HashMap;
use tree_sitter::Node;
/// Advanced SQL concept extractor using full grammar support
pub struct SqlExtractor;
impl SqlExtractor {
pub fn new() -> Self {
Self
}
/// Extract concepts from SQL AST nodes using full grammar awareness
pub fn extract_concepts(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
concepts: &mut Vec<SemanticConcept>,
) -> Result<(), ParseError> {
match node.kind() {
// DDL Statements
"create_table" => {
if let Some(concept) = self.extract_table(node, file_path, content)? {
concepts.push(concept);
// Also extract columns
self.extract_table_columns(node, file_path, content, concepts)?;
}
}
"create_view" => {
if let Some(concept) = self.extract_view(node, file_path, content)? {
concepts.push(concept);
}
}
"create_function" => {
if let Some(concept) = self.extract_function(node, file_path, content)? {
concepts.push(concept);
}
}
"create_index" => {
if let Some(concept) = self.extract_index(node, file_path, content)? {
concepts.push(concept);
}
}
"create_trigger" => {
if let Some(concept) = self.extract_trigger(node, file_path, content)? {
concepts.push(concept);
}
}
// DML Statements
"_select_statement" | "select" => {
if let Some(concept) = self.extract_select_query(node, file_path, content)? {
concepts.push(concept);
}
}
"_insert_statement" | "insert" => {
if let Some(concept) = self.extract_insert_query(node, file_path, content)? {
concepts.push(concept);
}
}
"_update_statement" | "update" => {
if let Some(concept) = self.extract_update_query(node, file_path, content)? {
concepts.push(concept);
}
}
"_delete_statement" | "delete" => {
if let Some(concept) = self.extract_delete_query(node, file_path, content)? {
concepts.push(concept);
}
}
// Standalone column definitions
"column_definition" => {
if let Some(concept) = self.extract_column(node, file_path, content)? {
concepts.push(concept);
}
}
_ => {}
}
Ok(())
}
/// Extract table definition with comprehensive metadata
fn extract_table(&self, node: Node<'_>, file_path: &str, content: &str) -> Result<Option<SemanticConcept>, ParseError> {
let name = self.extract_object_name(node, content)?;
if name.is_empty() {
return Ok(None);
}
let mut metadata = HashMap::new();
let mut relationships = HashMap::new();
// Extract table type (temporary, unlogged, external, etc.)
let mut table_type = "table".to_string();
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
match child.kind() {
"keyword_temporary" => {
table_type = "temporary_table".to_string();
metadata.insert("temporary".to_string(), "true".to_string());
}
"keyword_unlogged" => {
metadata.insert("unlogged".to_string(), "true".to_string());
}
"keyword_external" => {
table_type = "external_table".to_string();
metadata.insert("external".to_string(), "true".to_string());
}
"_if_not_exists" => {
metadata.insert("if_not_exists".to_string(), "true".to_string());
}
"column_definitions" => {
let column_count = self.count_columns(child);
metadata.insert("column_count".to_string(), column_count.to_string());
}
_ => {}
}
}
relationships.insert("sql_object_type".to_string(), table_type.clone());
Ok(Some(SemanticConcept {
id: format!("sql_table_{}_{}", file_path, name),
name,
concept_type: table_type,
confidence: 0.95,
file_path: file_path.to_string(),
line_range: self.get_line_range(node),
relationships,
metadata,
}))
}
/// Extract table columns with types and constraints
fn extract_table_columns(
&self,
table_node: Node<'_>,
file_path: &str,
content: &str,
concepts: &mut Vec<SemanticConcept>,
) -> Result<(), ParseError> {
let table_name = self.extract_object_name(table_node, content)?;
Self::walk_node_recursively(table_node, &mut |node| {
if node.kind() == "column_definition" {
if let Ok(Some(mut column)) = self.extract_column(node, file_path, content) {
// Link column to its table
column.relationships.insert("parent_table".to_string(), table_name.clone());
concepts.push(column);
}
}
});
Ok(())
}
/// Extract column definition with type and constraints
fn extract_column(&self, node: Node<'_>, file_path: &str, content: &str) -> Result<Option<SemanticConcept>, ParseError> {
let mut column_name = String::new();
let mut _column_type = String::new();
let mut metadata = HashMap::new();
let mut relationships = HashMap::new();
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
match child.kind() {
"_column" => {
column_name = self.extract_identifier(child, content);
}
"_type" => {
_column_type = self.extract_type_info(child, content);
metadata.insert("data_type".to_string(), _column_type.clone());
}
"_column_constraint" => {
let constraint_info = self.extract_constraint_info(child, content);
for (key, value) in constraint_info {
metadata.insert(key, value);
}
}
_ => {}
}
}
if column_name.is_empty() {
return Ok(None);
}
relationships.insert("sql_object_type".to_string(), "column".to_string());
Ok(Some(SemanticConcept {
id: format!("sql_column_{}_{}", file_path, column_name),
name: column_name,
concept_type: "column".to_string(),
confidence: 0.9,
file_path: file_path.to_string(),
line_range: self.get_line_range(node),
relationships,
metadata,
}))
}
/// Extract view definition
fn extract_view(&self, node: Node<'_>, file_path: &str, content: &str) -> Result<Option<SemanticConcept>, ParseError> {
let name = self.extract_object_name(node, content)?;
if name.is_empty() {
return Ok(None);
}
let mut metadata = HashMap::new();
let mut relationships = HashMap::new();
// Check for view modifiers
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
match child.kind() {
"_or_replace" => {
metadata.insert("or_replace".to_string(), "true".to_string());
}
"keyword_temporary" => {
metadata.insert("temporary".to_string(), "true".to_string());
}
"keyword_recursive" => {
metadata.insert("recursive".to_string(), "true".to_string());
}
"_if_not_exists" => {
metadata.insert("if_not_exists".to_string(), "true".to_string());
}
"create_query" | "_select_statement" => {
metadata.insert("has_query".to_string(), "true".to_string());
// Could extract referenced tables here
}
_ => {}
}
}
relationships.insert("sql_object_type".to_string(), "view".to_string());
Ok(Some(SemanticConcept {
id: format!("sql_view_{}_{}", file_path, name),
name,
concept_type: "view".to_string(),
confidence: 0.95,
file_path: file_path.to_string(),
line_range: self.get_line_range(node),
relationships,
metadata,
}))
}
/// Extract function definition
fn extract_function(&self, node: Node<'_>, file_path: &str, content: &str) -> Result<Option<SemanticConcept>, ParseError> {
let name = self.extract_object_name(node, content)?;
if name.is_empty() {
return Ok(None);
}
let mut metadata = HashMap::new();
let mut relationships = HashMap::new();
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
match child.kind() {
"_or_replace" => {
metadata.insert("or_replace".to_string(), "true".to_string());
}
"function_arguments" => {
let arg_count = self.count_function_arguments(child);
metadata.insert("argument_count".to_string(), arg_count.to_string());
}
"_type" => {
let return_type = self.extract_type_info(child, content);
metadata.insert("return_type".to_string(), return_type);
}
"keyword_setof" => {
metadata.insert("returns_set".to_string(), "true".to_string());
}
_ => {}
}
}
relationships.insert("sql_object_type".to_string(), "function".to_string());
Ok(Some(SemanticConcept {
id: format!("sql_function_{}_{}", file_path, name),
name,
concept_type: "function".to_string(),
confidence: 0.9,
file_path: file_path.to_string(),
line_range: self.get_line_range(node),
relationships,
metadata,
}))
}
/// Extract index definition
fn extract_index(&self, node: Node<'_>, file_path: &str, content: &str) -> Result<Option<SemanticConcept>, ParseError> {
let mut index_name = String::new();
let mut metadata = HashMap::new();
let mut relationships = HashMap::new();
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
match child.kind() {
"keyword_unique" => {
metadata.insert("unique".to_string(), "true".to_string());
}
"keyword_concurrently" => {
metadata.insert("concurrent".to_string(), "true".to_string());
}
"_if_not_exists" => {
metadata.insert("if_not_exists".to_string(), "true".to_string());
}
"_column" => {
if index_name.is_empty() {
index_name = self.extract_identifier(child, content);
}
}
"object_reference" => {
let table_name = self.extract_identifier(child, content);
relationships.insert("indexed_table".to_string(), table_name);
}
_ => {}
}
}
if index_name.is_empty() {
index_name = "unnamed_index".to_string();
}
relationships.insert("sql_object_type".to_string(), "index".to_string());
Ok(Some(SemanticConcept {
id: format!("sql_index_{}_{}", file_path, index_name),
name: index_name,
concept_type: "index".to_string(),
confidence: 0.85,
file_path: file_path.to_string(),
line_range: self.get_line_range(node),
relationships,
metadata,
}))
}
/// Extract trigger definition
fn extract_trigger(&self, node: Node<'_>, file_path: &str, content: &str) -> Result<Option<SemanticConcept>, ParseError> {
let name = self.extract_object_name(node, content)?;
if name.is_empty() {
return Ok(None);
}
let mut metadata = HashMap::new();
let mut relationships = HashMap::new();
// Extract trigger timing, events, etc.
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
match child.kind() {
"keyword_before" => metadata.insert("timing".to_string(), "before".to_string()),
"keyword_after" => metadata.insert("timing".to_string(), "after".to_string()),
"keyword_instead" => metadata.insert("timing".to_string(), "instead_of".to_string()),
"keyword_insert" => metadata.insert("event_insert".to_string(), "true".to_string()),
"keyword_update" => metadata.insert("event_update".to_string(), "true".to_string()),
"keyword_delete" => metadata.insert("event_delete".to_string(), "true".to_string()),
_ => None,
};
}
relationships.insert("sql_object_type".to_string(), "trigger".to_string());
Ok(Some(SemanticConcept {
id: format!("sql_trigger_{}_{}", file_path, name),
name,
concept_type: "trigger".to_string(),
confidence: 0.85,
file_path: file_path.to_string(),
line_range: self.get_line_range(node),
relationships,
metadata,
}))
}
/// Extract SELECT query information
fn extract_select_query(&self, node: Node<'_>, file_path: &str, content: &str) -> Result<Option<SemanticConcept>, ParseError> {
let mut metadata = HashMap::new();
let mut relationships = HashMap::new();
let mut referenced_tables = Vec::new();
// Extract table references from FROM clauses
Self::walk_node_recursively(node, &mut |child| {
if child.kind() == "object_reference" {
let table_name = self.extract_identifier(child, content);
if !table_name.is_empty() {
referenced_tables.push(table_name);
}
}
});
if !referenced_tables.is_empty() {
metadata.insert("referenced_tables".to_string(), referenced_tables.join(","));
}
let _query_text = self.get_node_text(node, content);
let query_id = format!("sql_select_{}_{}", file_path, self.get_line_range(node).start);
relationships.insert("sql_object_type".to_string(), "query".to_string());
relationships.insert("query_type".to_string(), "select".to_string());
Ok(Some(SemanticConcept {
id: query_id,
name: "SELECT Query".to_string(),
concept_type: "query".to_string(),
confidence: 0.75,
file_path: file_path.to_string(),
line_range: self.get_line_range(node),
relationships,
metadata,
}))
}
/// Extract INSERT query information
fn extract_insert_query(&self, node: Node<'_>, file_path: &str, content: &str) -> Result<Option<SemanticConcept>, ParseError> {
let mut metadata = HashMap::new();
let mut relationships = HashMap::new();
// Find target table
if let Some(target_table) = self.find_insert_target_table(node, content) {
metadata.insert("target_table".to_string(), target_table);
}
let query_id = format!("sql_insert_{}_{}", file_path, self.get_line_range(node).start);
relationships.insert("sql_object_type".to_string(), "query".to_string());
relationships.insert("query_type".to_string(), "insert".to_string());
Ok(Some(SemanticConcept {
id: query_id,
name: "INSERT Query".to_string(),
concept_type: "query".to_string(),
confidence: 0.75,
file_path: file_path.to_string(),
line_range: self.get_line_range(node),
relationships,
metadata,
}))
}
/// Extract UPDATE query information
fn extract_update_query(&self, node: Node<'_>, file_path: &str, content: &str) -> Result<Option<SemanticConcept>, ParseError> {
let mut metadata = HashMap::new();
let mut relationships = HashMap::new();
// Find target table
if let Some(target_table) = self.find_update_target_table(node, content) {
metadata.insert("target_table".to_string(), target_table);
}
let query_id = format!("sql_update_{}_{}", file_path, self.get_line_range(node).start);
relationships.insert("sql_object_type".to_string(), "query".to_string());
relationships.insert("query_type".to_string(), "update".to_string());
Ok(Some(SemanticConcept {
id: query_id,
name: "UPDATE Query".to_string(),
concept_type: "query".to_string(),
confidence: 0.75,
file_path: file_path.to_string(),
line_range: self.get_line_range(node),
relationships,
metadata,
}))
}
/// Extract DELETE query information
fn extract_delete_query(&self, node: Node<'_>, file_path: &str, content: &str) -> Result<Option<SemanticConcept>, ParseError> {
let mut metadata = HashMap::new();
let mut relationships = HashMap::new();
// Find target table
if let Some(target_table) = self.find_delete_target_table(node, content) {
metadata.insert("target_table".to_string(), target_table);
}
let query_id = format!("sql_delete_{}_{}", file_path, self.get_line_range(node).start);
relationships.insert("sql_object_type".to_string(), "query".to_string());
relationships.insert("query_type".to_string(), "delete".to_string());
Ok(Some(SemanticConcept {
id: query_id,
name: "DELETE Query".to_string(),
concept_type: "query".to_string(),
confidence: 0.75,
file_path: file_path.to_string(),
line_range: self.get_line_range(node),
relationships,
metadata,
}))
}
// Helper methods
/// Extract object name from various SQL constructs
fn extract_object_name(&self, node: Node<'_>, content: &str) -> Result<String, ParseError> {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "object_reference" {
return Ok(self.extract_identifier(child, content));
}
}
Ok(String::new())
}
/// Extract identifier text from node
fn extract_identifier(&self, node: Node<'_>, content: &str) -> String {
self.get_node_text(node, content)
.split('.')
.next_back()
.unwrap_or("")
.trim_matches('"')
.trim_matches('`')
.to_string()
}
/// Get text content of a node
fn get_node_text(&self, node: Node<'_>, content: &str) -> String {
content
.get(node.start_byte()..node.end_byte())
.unwrap_or("")
.to_string()
}
/// Get line range for a node
fn get_line_range(&self, node: Node<'_>) -> LineRange {
LineRange {
start: node.start_position().row as u32 + 1,
end: node.end_position().row as u32 + 1,
}
}
/// Walk node recursively with callback
fn walk_node_recursively<F>(node: Node<'_>, callback: &mut F)
where
F: FnMut(Node<'_>),
{
callback(node);
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
Self::walk_node_recursively(child, callback);
}
}
/// Count columns in column_definitions
fn count_columns(&self, node: Node<'_>) -> usize {
let mut count = 0;
Self::walk_node_recursively(node, &mut |child| {
if child.kind() == "column_definition" {
count += 1;
}
});
count
}
/// Count function arguments
fn count_function_arguments(&self, node: Node<'_>) -> usize {
let mut count = 0;
Self::walk_node_recursively(node, &mut |child| {
if child.kind() == "function_argument" {
count += 1;
}
});
count
}
/// Extract type information
fn extract_type_info(&self, node: Node<'_>, content: &str) -> String {
self.get_node_text(node, content)
}
/// Extract constraint information from column constraint
fn extract_constraint_info(&self, node: Node<'_>, _content: &str) -> HashMap<String, String> {
let mut constraints = HashMap::new();
Self::walk_node_recursively(node, &mut |child| {
match child.kind() {
"keyword_null" => {
constraints.insert("nullable".to_string(), "true".to_string());
}
"_not_null" => {
constraints.insert("nullable".to_string(), "false".to_string());
}
"keyword_primary" => {
constraints.insert("primary_key".to_string(), "true".to_string());
}
"keyword_unique" => {
constraints.insert("unique".to_string(), "true".to_string());
}
"keyword_references" => {
constraints.insert("foreign_key".to_string(), "true".to_string());
}
"keyword_default" => {
constraints.insert("has_default".to_string(), "true".to_string());
}
_ => {}
}
});
constraints
}
/// Find INSERT target table
fn find_insert_target_table(&self, node: Node<'_>, content: &str) -> Option<String> {
let mut target = None;
Self::walk_node_recursively(node, &mut |child| {
if child.kind() == "object_reference" && target.is_none() {
target = Some(self.extract_identifier(child, content));
}
});
target
}
/// Find UPDATE target table
fn find_update_target_table(&self, node: Node<'_>, content: &str) -> Option<String> {
let mut target = None;
Self::walk_node_recursively(node, &mut |child| {
if child.kind() == "relation" && target.is_none() {
target = Some(self.extract_identifier(child, content));
}
});
target
}
/// Find DELETE target table
fn find_delete_target_table(&self, node: Node<'_>, content: &str) -> Option<String> {
let mut target = None;
Self::walk_node_recursively(node, &mut |child| {
if child.kind() == "_delete_from" && target.is_none() {
// Look for relation within the delete_from
let mut cursor = child.walk();
for grandchild in child.children(&mut cursor) {
if grandchild.kind() == "relation" {
target = Some(self.extract_identifier(grandchild, content));
break;
}
}
}
});
target
}
}
impl Default for SqlExtractor {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::parsing::ParserManager;
fn create_sql_tree(sql: &str) -> tree_sitter::Tree {
let mut parser_manager = ParserManager::new().unwrap();
parser_manager.parse(sql, "sql").unwrap()
}
fn extract_all_concepts(extractor: &SqlExtractor, tree: &tree_sitter::Tree, file_path: &str, content: &str) -> Vec<SemanticConcept> {
let mut concepts = Vec::new();
let _cursor = tree.root_node().walk();
fn walk_recursive(node: tree_sitter::Node<'_>, extractor: &SqlExtractor, file_path: &str, content: &str, concepts: &mut Vec<SemanticConcept>) {
let _ = extractor.extract_concepts(node, file_path, content, concepts);
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
walk_recursive(child, extractor, file_path, content, concepts);
}
}
walk_recursive(tree.root_node(), extractor, file_path, content, &mut concepts);
concepts
}
#[test]
fn test_comprehensive_table_extraction() {
let extractor = SqlExtractor::new();
let sql = r#"
CREATE TABLE users (
id INTEGER PRIMARY KEY,
name VARCHAR(100) NOT NULL,
email VARCHAR(255) UNIQUE,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
"#;
let tree = create_sql_tree(sql);
let concepts = extract_all_concepts(&extractor, &tree, "test.sql", sql);
println!("Found {} concepts", concepts.len());
for concept in &concepts {
println!("- {}: {} ({})", concept.name, concept.concept_type, concept.confidence);
}
// Should find table + columns
assert!(concepts.len() >= 2); // At least table + some columns
let table = concepts.iter().find(|c| c.concept_type == "table").unwrap();
assert_eq!(table.name, "users");
assert!(table.metadata.contains_key("column_count"));
let columns: Vec<_> = concepts.iter().filter(|c| c.concept_type == "column").collect();
assert!(columns.len() >= 2); // Should find multiple columns
// Check column details
let id_column = columns.iter().find(|c| c.name == "id");
assert!(id_column.is_some());
if let Some(col) = id_column {
assert_eq!(col.metadata.get("data_type").unwrap(), "INTEGER");
assert_eq!(col.metadata.get("primary_key").unwrap(), "true");
}
}
#[test]
fn test_view_extraction() {
let extractor = SqlExtractor::new();
let sql = r#"
CREATE OR REPLACE VIEW user_summary AS
SELECT id, name, email FROM users WHERE active = true;
"#;
let tree = create_sql_tree(sql);
let concepts = extract_all_concepts(&extractor, &tree, "test.sql", sql);
let view = concepts.iter().find(|c| c.concept_type == "view");
assert!(view.is_some());
if let Some(v) = view {
assert_eq!(v.name, "user_summary");
assert_eq!(v.metadata.get("or_replace").unwrap(), "true");
assert_eq!(v.metadata.get("has_query").unwrap(), "true");
}
}
#[test]
fn test_function_extraction() {
let extractor = SqlExtractor::new();
let sql = r#"
CREATE FUNCTION get_user_count() RETURNS INTEGER AS $$
BEGIN
RETURN (SELECT COUNT(*) FROM users);
END;
$$ LANGUAGE plpgsql;
"#;
let tree = create_sql_tree(sql);
let concepts = extract_all_concepts(&extractor, &tree, "test.sql", sql);
let function = concepts.iter().find(|c| c.concept_type == "function");
assert!(function.is_some());
if let Some(f) = function {
assert_eq!(f.name, "get_user_count");
assert!(f.metadata.contains_key("return_type"));
}
}
#[test]
fn test_index_extraction() {
let extractor = SqlExtractor::new();
let sql = r#"
CREATE UNIQUE INDEX idx_users_email ON users (email);
"#;
let tree = create_sql_tree(sql);
let concepts = extract_all_concepts(&extractor, &tree, "test.sql", sql);
let index = concepts.iter().find(|c| c.concept_type == "index");
assert!(index.is_some());
if let Some(idx) = index {
assert_eq!(idx.metadata.get("unique").unwrap(), "true");
assert!(idx.relationships.contains_key("indexed_table"));
}
}
#[test]
fn test_query_extraction() {
let extractor = SqlExtractor::new();
let sql = r#"
SELECT u.name, u.email, p.title
FROM users u
JOIN profiles p ON u.id = p.user_id
WHERE u.active = true;
INSERT INTO users (name, email) VALUES ('John', 'john@example.com');
UPDATE users SET last_login = NOW() WHERE id = 1;
DELETE FROM users WHERE active = false;
"#;
let tree = create_sql_tree(sql);
let concepts = extract_all_concepts(&extractor, &tree, "test.sql", sql);
let queries: Vec<_> = concepts.iter().filter(|c| c.concept_type == "query").collect();
assert!(queries.len() >= 4); // SELECT, INSERT, UPDATE, DELETE
let select_query = queries.iter().find(|q|
q.relationships.get("query_type") == Some(&"select".to_string())
);
assert!(select_query.is_some());
let insert_query = queries.iter().find(|q|
q.relationships.get("query_type") == Some(&"insert".to_string())
);
assert!(insert_query.is_some());
}
#[test]
fn test_complex_sql_schema() {
let extractor = SqlExtractor::new();
let sql = r#"
CREATE TABLE orders (
id SERIAL PRIMARY KEY,
user_id INTEGER REFERENCES users(id),
total DECIMAL(10,2) NOT NULL,
status VARCHAR(20) DEFAULT 'pending'
);
CREATE INDEX idx_orders_user_id ON orders (user_id);
CREATE INDEX idx_orders_status ON orders (status);
CREATE VIEW active_orders AS
SELECT o.*, u.name as user_name
FROM orders o
JOIN users u ON o.user_id = u.id
WHERE o.status IN ('pending', 'processing');
"#;
let tree = create_sql_tree(sql);
let concepts = extract_all_concepts(&extractor, &tree, "schema.sql", sql);
println!("Complex schema found {} concepts:", concepts.len());
for concept in &concepts {
println!("- {}: {} (confidence: {:.2})",
concept.name, concept.concept_type, concept.confidence);
}
// Should find table, columns, indexes, and view
let tables: Vec<_> = concepts.iter().filter(|c| c.concept_type == "table").collect();
let columns: Vec<_> = concepts.iter().filter(|c| c.concept_type == "column").collect();
let indexes: Vec<_> = concepts.iter().filter(|c| c.concept_type == "index").collect();
let views: Vec<_> = concepts.iter().filter(|c| c.concept_type == "view").collect();
assert_eq!(tables.len(), 1);
assert!(columns.len() >= 3); // Multiple columns
assert!(indexes.len() >= 2); // Two indexes
assert_eq!(views.len(), 1);
// Verify table metadata
let orders_table = tables[0];
assert_eq!(orders_table.name, "orders");
// Verify foreign key constraint
let user_id_column = columns.iter().find(|c| c.name == "user_id");
assert!(user_id_column.is_some());
if let Some(col) = user_id_column {
| rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | true |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/extractors/mod.rs | rust-core/src/extractors/mod.rs | pub mod typescript;
pub mod rust;
pub mod python;
pub mod sql;
pub mod go;
pub mod java;
pub mod php;
pub mod cpp;
pub mod csharp;
pub mod svelte;
pub mod generic;
pub use typescript::*;
pub use rust::*;
pub use python::*;
pub use sql::*;
pub use go::*;
pub use java::*;
pub use php::*;
pub use cpp::*;
pub use csharp::*;
pub use svelte::*;
pub use generic::*;
| rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/extractors/java.rs | rust-core/src/extractors/java.rs | //! Java concept extraction
use crate::types::{SemanticConcept, LineRange, ParseError};
use crate::parsing::NameExtractor;
use std::collections::HashMap;
use tree_sitter::Node;
pub struct JavaExtractor;
impl JavaExtractor {
pub fn new() -> Self { Self }
pub fn extract_concepts(&self, node: Node<'_>, file_path: &str, content: &str, concepts: &mut Vec<SemanticConcept>) -> Result<(), ParseError> {
match node.kind() {
"class_declaration" | "interface_declaration" | "enum_declaration" => {
if let Some(concept) = self.extract_concept_from_node(node, file_path, content, "class")? {
concepts.push(concept);
}
}
"method_declaration" | "constructor_declaration" => {
if let Some(concept) = self.extract_concept_from_node(node, file_path, content, "function")? {
concepts.push(concept);
}
}
"field_declaration" | "variable_declarator" => {
if let Some(concept) = self.extract_concept_from_node(node, file_path, content, "variable")? {
concepts.push(concept);
}
}
_ => {}
}
Ok(())
}
fn extract_concept_from_node(&self, node: Node<'_>, file_path: &str, content: &str, concept_type: &str) -> Result<Option<SemanticConcept>, ParseError> {
let name = NameExtractor::extract_name_from_node(node, content)
.map_err(ParseError::from_reason)?;
if name.is_empty() { return Ok(None); }
Ok(Some(SemanticConcept {
id: format!("concept_{}", std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH).map(|d| d.as_millis()).unwrap_or(0)),
name, concept_type: concept_type.to_string(), confidence: 0.8, file_path: file_path.to_string(),
line_range: LineRange { start: node.start_position().row as u32 + 1, end: node.end_position().row as u32 + 1 },
relationships: HashMap::new(), metadata: HashMap::new(),
}))
}
}
impl Default for JavaExtractor { fn default() -> Self { Self::new() } }
#[cfg(test)]
mod tests {
use super::*;
use crate::parsing::ParserManager;
#[test]
fn test_java_class() {
let extractor = JavaExtractor::new();
let mut manager = ParserManager::new().unwrap();
let code = "public class HelloWorld { public static void main(String[] args) {} }";
let tree = manager.parse(code, "java").unwrap();
let mut concepts = Vec::new();
let _ = extractor.extract_concepts(tree.root_node(), "Hello.java", code, &mut concepts);
// Length is always >= 0 for Vec
}
} | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/extractors/cpp.rs | rust-core/src/extractors/cpp.rs | //! C/C++ concept extraction
use crate::types::{SemanticConcept, LineRange, ParseError};
use crate::parsing::NameExtractor;
use std::collections::HashMap;
use tree_sitter::Node;
pub struct CppExtractor;
impl CppExtractor {
pub fn new() -> Self { Self }
pub fn extract_concepts(&self, node: Node<'_>, file_path: &str, content: &str, concepts: &mut Vec<SemanticConcept>) -> Result<(), ParseError> {
match node.kind() {
"struct_specifier" | "class_specifier" | "union_specifier" | "enum_specifier" => {
if let Some(concept) = self.extract_concept_from_node(node, file_path, content, "class")? {
concepts.push(concept);
}
}
"function_definition" | "function_declarator" => {
if let Some(concept) = self.extract_concept_from_node(node, file_path, content, "function")? {
concepts.push(concept);
}
}
"declaration" => {
if let Some(concept) = self.extract_concept_from_node(node, file_path, content, "variable")? {
concepts.push(concept);
}
}
_ => {}
}
Ok(())
}
fn extract_concept_from_node(&self, node: Node<'_>, file_path: &str, content: &str, concept_type: &str) -> Result<Option<SemanticConcept>, ParseError> {
let name = NameExtractor::extract_name_from_node(node, content)
.map_err(ParseError::from_reason)?;
if name.is_empty() { return Ok(None); }
Ok(Some(SemanticConcept {
id: format!("concept_{}", std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH).map(|d| d.as_millis()).unwrap_or(0)),
name, concept_type: concept_type.to_string(), confidence: 0.8, file_path: file_path.to_string(),
line_range: LineRange { start: node.start_position().row as u32 + 1, end: node.end_position().row as u32 + 1 },
relationships: HashMap::new(), metadata: HashMap::new(),
}))
}
}
impl Default for CppExtractor { fn default() -> Self { Self::new() } }
#[cfg(test)]
mod tests {
use super::*;
use crate::parsing::ParserManager;
#[test]
fn test_cpp_class() {
let extractor = CppExtractor::new();
let mut manager = ParserManager::new().unwrap();
let code = "class HelloWorld { public: void sayHello(); };";
let tree = manager.parse(code, "cpp").unwrap();
let mut concepts = Vec::new();
let _ = extractor.extract_concepts(tree.root_node(), "hello.cpp", code, &mut concepts);
// Length is always >= 0 for Vec
}
} | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/extractors/generic.rs | rust-core/src/extractors/generic.rs | //! Generic concept extraction for unknown languages
use crate::types::{SemanticConcept, LineRange, ParseError};
use crate::parsing::NameExtractor;
use std::collections::HashMap;
use tree_sitter::Node;
pub struct GenericExtractor;
impl GenericExtractor {
pub fn new() -> Self { Self }
pub fn extract_concepts(&self, node: Node<'_>, file_path: &str, content: &str, concepts: &mut Vec<SemanticConcept>) -> Result<(), ParseError> {
// Generic extraction for unknown languages
match node.kind() {
kind if kind.contains("class") => {
if let Some(concept) = self.extract_concept_from_node(node, file_path, content, "class")? {
concepts.push(concept);
}
}
kind if kind.contains("function") => {
if let Some(concept) = self.extract_concept_from_node(node, file_path, content, "function")? {
concepts.push(concept);
}
}
_ => {}
}
Ok(())
}
fn extract_concept_from_node(&self, node: Node<'_>, file_path: &str, content: &str, concept_type: &str) -> Result<Option<SemanticConcept>, ParseError> {
let name = NameExtractor::extract_name_from_node(node, content)
.map_err(ParseError::from_reason)?;
if name.is_empty() { return Ok(None); }
Ok(Some(SemanticConcept {
id: format!("concept_{}", std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH).map(|d| d.as_millis()).unwrap_or(0)),
name, concept_type: concept_type.to_string(), confidence: 0.6, file_path: file_path.to_string(),
line_range: LineRange { start: node.start_position().row as u32 + 1, end: node.end_position().row as u32 + 1 },
relationships: HashMap::new(), metadata: HashMap::new(),
}))
}
}
impl Default for GenericExtractor { fn default() -> Self { Self::new() } }
#[cfg(test)]
mod tests {
use super::*;
use crate::parsing::ParserManager;
#[test]
fn test_generic_extraction() {
let extractor = GenericExtractor::new();
let mut manager = ParserManager::new().unwrap();
let code = "function test() {}";
let tree = manager.parse(code, "javascript").unwrap();
let mut concepts = Vec::new();
let _ = extractor.extract_concepts(tree.root_node(), "test.js", code, &mut concepts);
// Length is always >= 0 for Vec
}
} | rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
pi22by7/In-Memoria | https://github.com/pi22by7/In-Memoria/blob/9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78/rust-core/src/extractors/php.rs | rust-core/src/extractors/php.rs | //! PHP concept extraction with docblock awareness
use crate::parsing::NameExtractor;
use crate::types::{LineRange, ParseError, SemanticConcept};
use regex::Regex;
use std::collections::HashMap;
use tree_sitter::Node;
pub struct PhpExtractor;
impl Default for PhpExtractor {
fn default() -> Self {
Self::new()
}
}
impl PhpExtractor {
pub fn new() -> Self {
Self
}
pub fn extract_concepts(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
concepts: &mut Vec<SemanticConcept>,
) -> Result<(), ParseError> {
match node.kind() {
"class_declaration"
| "interface_declaration"
| "trait_declaration"
| "enum_declaration" => {
if let Some(concept) =
self.build_named_construct(node, file_path, content, node.kind())?
{
concepts.push(concept);
}
}
"anonymous_class" => {
if let Some(concept) = self.extract_anonymous_class(node, file_path, content)? {
concepts.push(concept);
}
}
"function_definition" => {
if let Some(concept) =
self.build_named_construct(node, file_path, content, "function")?
{
concepts.push(concept);
}
}
"method_declaration" => {
if let Some(concept) =
self.build_named_construct(node, file_path, content, "method")?
{
concepts.push(concept);
}
}
"arrow_function_expression" => {
if let Some(concept) = self.extract_arrow_function(node, file_path, content)? {
concepts.push(concept);
}
}
"property_declaration" => {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "property_element" {
if let Some(concept) =
self.build_named_construct(child, file_path, content, "property")?
{
concepts.push(concept);
}
}
}
}
"property_promotion_parameter" => {
if let Some(concept) =
self.build_named_construct(node, file_path, content, "property")?
{
concepts.push(concept);
}
}
"const_declaration" => {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "constant_declarator" {
if let Some(concept) =
self.build_named_construct(child, file_path, content, "constant")?
{
concepts.push(concept);
}
}
}
}
"namespace_definition" => {
if let Some(concept) =
self.build_named_construct(node, file_path, content, "namespace")?
{
concepts.push(concept);
}
}
"attribute_list" => {
if let Some(concept) = self.extract_attribute(node, file_path, content)? {
concepts.push(concept);
}
}
_ => {}
}
Ok(())
}
fn build_named_construct(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
concept_type: &str,
) -> Result<Option<SemanticConcept>, ParseError> {
let name = Self::extract_name(node, content)?;
if name.is_empty() {
return Ok(None);
}
let (start_line, start_col, end_line, end_col) = NameExtractor::get_position_info(node);
let mut metadata = HashMap::new();
metadata.insert("language".to_string(), "php".to_string());
let normalized_type = Self::normalize_concept_type(concept_type);
metadata.insert("kind".to_string(), normalized_type.to_string());
metadata.insert("start_column".to_string(), start_col.to_string());
metadata.insert("end_column".to_string(), end_col.to_string());
if let Some(visibility) = Self::extract_visibility(node, content) {
metadata.insert("visibility".to_string(), visibility);
}
if Self::has_modifier(node, "static") {
metadata.insert("static".to_string(), "true".to_string());
}
if Self::has_modifier(node, "abstract") {
metadata.insert("abstract".to_string(), "true".to_string());
}
if Self::has_modifier(node, "final") {
metadata.insert("final".to_string(), "true".to_string());
}
if let Some(return_type) = Self::extract_return_type(node, content) {
metadata.insert("return_type".to_string(), return_type);
}
if let Some(annotation) = Self::extract_type_annotation(node, content) {
metadata.insert("type".to_string(), annotation);
}
// Docblock parsing
if let Some(docblock) = Self::extract_docblock(node, content) {
if !docblock.description.is_empty() {
metadata.insert("docblock.description".to_string(), docblock.description);
}
if !docblock.params.is_empty() {
metadata.insert("docblock.params".to_string(), docblock.params.join("|"));
}
if let Some(ret) = docblock.returns {
metadata.insert("docblock.return".to_string(), ret);
}
if !docblock.throws.is_empty() {
metadata.insert("docblock.throws".to_string(), docblock.throws.join("|"));
}
}
// Traits used within classes
if normalized_type == "class" {
let traits = Self::collect_traits(node, content);
if !traits.is_empty() {
metadata.insert("traits".to_string(), traits.join(","));
}
}
Ok(Some(SemanticConcept {
id: format!("php::{}::{}::{}", concept_type, file_path, name),
name,
concept_type: normalized_type.to_string(),
confidence: 0.85,
file_path: file_path.to_string(),
line_range: LineRange {
start: start_line,
end: end_line,
},
relationships: HashMap::new(),
metadata,
}))
}
fn normalize_concept_type(concept_type: &str) -> &str {
match concept_type {
"class_declaration" => "class",
"interface_declaration" => "interface",
"trait_declaration" => "trait",
"enum_declaration" => "enum",
other => other,
}
}
fn extract_name(node: Node<'_>, content: &str) -> Result<String, ParseError> {
if let Some(named) = node.child_by_field_name("name") {
if let Some(text) = NameExtractor::extract_node_text(named, content) {
return Ok(text.to_string());
}
}
let fallback =
NameExtractor::extract_name_from_node(node, content).map_err(ParseError::from_reason)?;
if !fallback.is_empty() {
return Ok(fallback);
}
if let Some(var_node) = NameExtractor::find_child_by_kind(node, "variable_name") {
if let Some(text) = NameExtractor::extract_node_text(var_node, content) {
return Ok(text.trim_start_matches('$').to_string());
}
}
Ok(String::new())
}
fn extract_visibility(node: Node<'_>, content: &str) -> Option<String> {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if matches!(child.kind(), "public" | "protected" | "private") {
return NameExtractor::extract_node_text(child, content).map(|s| s.to_string());
}
}
None
}
fn has_modifier(node: Node<'_>, modifier: &str) -> bool {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == modifier {
return true;
}
}
false
}
fn extract_return_type(node: Node<'_>, content: &str) -> Option<String> {
if let Some(return_type) = node.child_by_field_name("return_type") {
if let Some(text) = NameExtractor::extract_node_text(return_type, content) {
return Some(text.to_string());
}
}
None
}
fn extract_type_annotation(node: Node<'_>, content: &str) -> Option<String> {
if let Some(annotation) = node.child_by_field_name("type") {
if let Some(text) = NameExtractor::extract_node_text(annotation, content) {
return Some(text.to_string());
}
}
None
}
fn extract_docblock(node: Node<'_>, content: &str) -> Option<DocBlock> {
let docblock_text = Self::find_docblock_above(node, content)?;
DocBlock::parse(&docblock_text)
}
fn find_docblock_above(node: Node<'_>, content: &str) -> Option<String> {
let start_byte = node.start_byte();
let mut index = start_byte;
while index > 0 {
let slice = &content[..index];
if let Some(pos) = slice.rfind("/**") {
// Only capture content from /** to the current search position (before the node)
let comment = &slice[pos..index];
if comment.contains("*/") {
let lines: Vec<&str> = comment.lines().collect();
if let Some(last_line) = lines.last() {
if last_line.trim().ends_with("*/") {
return Some(comment.to_string());
}
}
}
index = pos.saturating_sub(1);
} else {
break;
}
}
None
}
fn collect_traits(node: Node<'_>, content: &str) -> Vec<String> {
let mut traits = Vec::new();
let mut cursor = node.walk();
// Look for trait_use_clause nodes in the class body
for child in node.children(&mut cursor) {
if child.kind() == "trait_use_clause" {
let mut clause_cursor = child.walk();
for clause_child in child.children(&mut clause_cursor) {
if matches!(clause_child.kind(), "qualified_name" | "name") {
if let Some(name) = NameExtractor::extract_node_text(clause_child, content) {
traits.push(name.to_string());
}
}
}
}
}
traits
}
fn extract_anonymous_class(
&self,
node: Node<'_>,
file_path: &str,
_content: &str,
) -> Result<Option<SemanticConcept>, ParseError> {
let (start_line, start_col, end_line, end_col) = NameExtractor::get_position_info(node);
let mut metadata = HashMap::new();
metadata.insert("language".to_string(), "php".to_string());
metadata.insert("kind".to_string(), "class".to_string());
metadata.insert("anonymous".to_string(), "true".to_string());
metadata.insert("start_column".to_string(), start_col.to_string());
metadata.insert("end_column".to_string(), end_col.to_string());
Ok(Some(SemanticConcept {
id: format!("php::anonymous_class::{}::L{}", file_path, start_line),
name: "anonymous_class".to_string(),
concept_type: "class".to_string(),
confidence: 0.75,
file_path: file_path.to_string(),
line_range: LineRange {
start: start_line,
end: end_line,
},
relationships: HashMap::new(),
metadata,
}))
}
fn extract_arrow_function(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
) -> Result<Option<SemanticConcept>, ParseError> {
let (start_line, start_col, end_line, end_col) = NameExtractor::get_position_info(node);
let mut metadata = HashMap::new();
metadata.insert("language".to_string(), "php".to_string());
metadata.insert("kind".to_string(), "function".to_string());
metadata.insert("arrow_function".to_string(), "true".to_string());
metadata.insert("start_column".to_string(), start_col.to_string());
metadata.insert("end_column".to_string(), end_col.to_string());
if let Some(return_type) = Self::extract_return_type(node, content) {
metadata.insert("return_type".to_string(), return_type);
}
Ok(Some(SemanticConcept {
id: format!("php::arrow_function::{}::L{}", file_path, start_line),
name: "arrow_function".to_string(),
concept_type: "function".to_string(),
confidence: 0.75,
file_path: file_path.to_string(),
line_range: LineRange {
start: start_line,
end: end_line,
},
relationships: HashMap::new(),
metadata,
}))
}
fn extract_attribute(
&self,
node: Node<'_>,
file_path: &str,
content: &str,
) -> Result<Option<SemanticConcept>, ParseError> {
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
if child.kind() == "attribute" {
if let Some(name_node) = child.child_by_field_name("name") {
if let Some(attr_name) = NameExtractor::extract_node_text(name_node, content) {
let (start_line, start_col, end_line, end_col) = NameExtractor::get_position_info(child);
let mut metadata = HashMap::new();
metadata.insert("language".to_string(), "php".to_string());
metadata.insert("kind".to_string(), "attribute".to_string());
metadata.insert("start_column".to_string(), start_col.to_string());
metadata.insert("end_column".to_string(), end_col.to_string());
return Ok(Some(SemanticConcept {
id: format!("php::attribute::{}::{}", file_path, attr_name),
name: attr_name.to_string(),
concept_type: "attribute".to_string(),
confidence: 0.8,
file_path: file_path.to_string(),
line_range: LineRange {
start: start_line,
end: end_line,
},
relationships: HashMap::new(),
metadata,
}));
}
}
}
}
Ok(None)
}
}
struct DocBlock {
description: String,
params: Vec<String>,
returns: Option<String>,
throws: Vec<String>,
}
impl DocBlock {
fn parse(content: &str) -> Option<Self> {
let mut description = Vec::new();
let mut params = Vec::new();
let mut returns = None;
let mut throws = Vec::new();
let lines: Vec<&str> = content
.split('\n')
.map(|line| line.trim_start_matches(['*', '/', ' '].as_ref()))
.collect();
for line in lines {
if line.starts_with("@param") {
params.push(line.to_string());
} else if line.starts_with("@return") {
returns = Some(line.to_string());
} else if line.starts_with("@throws") {
throws.push(line.to_string());
} else if !line.is_empty() && !line.starts_with('@') {
description.push(line.to_string());
}
}
Some(Self {
description: description.join(" "),
params,
returns,
throws,
})
}
}
pub fn extract_php_concepts(
root_node: Node<'_>,
file_path: &str,
content: &str,
) -> Result<Vec<SemanticConcept>, ParseError> {
let extractor = PhpExtractor::new();
let mut concepts = Vec::new();
let mut cursor = root_node.walk();
for child in root_node.children(&mut cursor) {
extractor.extract_concepts(child, file_path, content, &mut concepts)?;
}
Ok(concepts)
}
pub fn extract_php_docblocks(content: &str) -> Vec<String> {
let docblock_pattern = Regex::new(r"/\*\*(?s).*?\*/").unwrap();
docblock_pattern
.find_iter(content)
.map(|mat| mat.as_str().to_string())
.collect()
}
| rust | MIT | 9cbed0d63a52d61fcd66d0d651b8c4d7f787ba78 | 2026-01-04T20:19:30.317431Z | false |
Gelbpunkt/tokio-websockets | https://github.com/Gelbpunkt/tokio-websockets/blob/618599d15e9ee2a9a7191d081a56d90eae4a811a/src/tls.rs | src/tls.rs | //! Wrapper types for TLS functionality, abstracting over [`rustls`] and
//! [`native-tls`] connector and stream types.
//!
//! [`native-tls`]: tokio_native_tls::native_tls
//! [`rustls`]: tokio_rustls::rustls
#[cfg(any(
feature = "rustls-webpki-roots",
feature = "rustls-native-roots",
feature = "rustls-platform-verifier"
))]
use std::sync::Arc;
use std::{
fmt::{Debug, Formatter, Result as FmtResult},
io,
pin::Pin,
task::{Context, Poll},
};
#[cfg(any(
feature = "rustls-webpki-roots",
feature = "rustls-native-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
))]
use rustls_pki_types::ServerName;
#[cfg(feature = "rustls-platform-verifier")]
use rustls_platform_verifier::BuilderVerifierExt;
use tokio::io::{AsyncRead, AsyncWrite, ReadBuf};
#[cfg(any(
feature = "rustls-native-roots",
feature = "rustls-webpki-roots",
feature = "rustls-platform-verifier"
))]
use tokio_rustls::rustls::ClientConfig;
use crate::Error;
/// A reusable TLS connector for wrapping streams.
pub enum Connector {
/// Plain (non-TLS) connector.
Plain,
/// [`native-tls`] TLS connector.
///
/// [`native-tls`]: tokio_native_tls::native_tls
#[cfg(feature = "native-tls")]
NativeTls(tokio_native_tls::TlsConnector),
/// [`rustls`] TLS connector.
///
/// [`rustls`]: tokio_rustls::rustls
#[cfg(any(
feature = "rustls-native-roots",
feature = "rustls-webpki-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
))]
Rustls(tokio_rustls::TlsConnector),
}
impl Debug for Connector {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
match self {
Self::Plain => f.write_str("Connector::Plain"),
#[cfg(feature = "native-tls")]
Self::NativeTls(connector) => connector.fmt(f),
#[cfg(any(
feature = "rustls-native-roots",
feature = "rustls-webpki-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
))]
Self::Rustls(_) => f.write_str("Connector::Rustls"),
}
}
}
/// A stream that might be protected with TLS.
#[allow(clippy::large_enum_variant)] // Only one or two of these will be used
#[derive(Debug)]
pub enum MaybeTlsStream<S> {
/// Unencrypted socket stream.
Plain(S),
/// Encrypted socket stream using [`native-tls`].
///
/// [`native-tls`]: tokio_native_tls::native_tls
#[cfg(feature = "native-tls")]
NativeTls(tokio_native_tls::TlsStream<S>),
/// Encrypted socket stream using [`rustls`].
///
/// [`rustls`]: tokio_rustls::rustls
#[cfg(any(
feature = "rustls-native-roots",
feature = "rustls-webpki-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
))]
Rustls(tokio_rustls::client::TlsStream<S>),
}
impl<S: AsyncRead + AsyncWrite + Unpin> AsyncRead for MaybeTlsStream<S> {
fn poll_read(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<io::Result<()>> {
match self.get_mut() {
Self::Plain(s) => Pin::new(s).poll_read(cx, buf),
#[cfg(feature = "native-tls")]
Self::NativeTls(s) => Pin::new(s).poll_read(cx, buf),
#[cfg(any(
feature = "rustls-native-roots",
feature = "rustls-webpki-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
))]
Self::Rustls(s) => Pin::new(s).poll_read(cx, buf),
}
}
}
impl<S: AsyncRead + AsyncWrite + Unpin> AsyncWrite for MaybeTlsStream<S> {
fn poll_write(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &[u8],
) -> Poll<Result<usize, io::Error>> {
match self.get_mut() {
Self::Plain(s) => Pin::new(s).poll_write(cx, buf),
#[cfg(feature = "native-tls")]
Self::NativeTls(s) => Pin::new(s).poll_write(cx, buf),
#[cfg(any(
feature = "rustls-native-roots",
feature = "rustls-webpki-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
))]
Self::Rustls(s) => Pin::new(s).poll_write(cx, buf),
}
}
fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), io::Error>> {
match self.get_mut() {
Self::Plain(s) => Pin::new(s).poll_flush(cx),
#[cfg(feature = "native-tls")]
Self::NativeTls(s) => Pin::new(s).poll_flush(cx),
#[cfg(any(
feature = "rustls-native-roots",
feature = "rustls-webpki-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
))]
Self::Rustls(s) => Pin::new(s).poll_flush(cx),
}
}
fn poll_shutdown(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), io::Error>> {
match self.get_mut() {
Self::Plain(s) => Pin::new(s).poll_shutdown(cx),
#[cfg(feature = "native-tls")]
Self::NativeTls(s) => Pin::new(s).poll_shutdown(cx),
#[cfg(any(
feature = "rustls-native-roots",
feature = "rustls-webpki-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
))]
Self::Rustls(s) => Pin::new(s).poll_shutdown(cx),
}
}
fn poll_write_vectored(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
bufs: &[io::IoSlice<'_>],
) -> Poll<Result<usize, io::Error>> {
match self.get_mut() {
Self::Plain(s) => Pin::new(s).poll_write_vectored(cx, bufs),
#[cfg(feature = "native-tls")]
Self::NativeTls(s) => Pin::new(s).poll_write_vectored(cx, bufs),
#[cfg(any(
feature = "rustls-native-roots",
feature = "rustls-webpki-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
))]
Self::Rustls(s) => Pin::new(s).poll_write_vectored(cx, bufs),
}
}
fn is_write_vectored(&self) -> bool {
match self {
Self::Plain(s) => s.is_write_vectored(),
#[cfg(feature = "native-tls")]
Self::NativeTls(s) => s.is_write_vectored(),
#[cfg(any(
feature = "rustls-native-roots",
feature = "rustls-webpki-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
))]
Self::Rustls(s) => s.is_write_vectored(),
}
}
}
impl Connector {
/// Creates a new `Connector` with the underlying TLS library specified in
/// the feature flags.
///
/// # Errors
///
/// This method returns an [`Error`] when creating the underlying TLS
/// connector fails.
///
/// # Panics
///
/// Panics if no rustls crypto provider is installed and rustls is unable
/// to determine a default crypto provider from its feature flags.
pub fn new() -> Result<Self, Error> {
#[cfg(not(any(
feature = "native-tls",
feature = "rustls-webpki-roots",
feature = "rustls-native-roots",
feature = "rustls-platform-verifier"
)))]
{
Ok(Self::Plain)
}
#[cfg(all(
feature = "native-tls",
not(any(
feature = "rustls-webpki-roots",
feature = "rustls-native-roots",
feature = "rustls-platform-verifier"
))
))]
{
Ok(Self::NativeTls(
tokio_native_tls::native_tls::TlsConnector::new()?.into(),
))
}
#[cfg(any(
feature = "rustls-webpki-roots",
feature = "rustls-native-roots",
feature = "rustls-platform-verifier"
))]
{
let config_builder = ClientConfig::builder();
#[cfg(feature = "rustls-platform-verifier")]
let config_builder = config_builder.with_platform_verifier()?;
#[cfg(not(feature = "rustls-platform-verifier"))]
let config_builder = {
let mut roots = tokio_rustls::rustls::RootCertStore::empty();
#[cfg(feature = "rustls-native-roots")]
{
#[cfg_attr(feature = "rustls-webpki-roots", allow(unused))]
let rustls_native_certs::CertificateResult { certs, errors, .. } =
rustls_native_certs::load_native_certs();
// Not finding any native roots is not fatal if webpki roots are enabled
#[cfg(not(feature = "rustls-webpki-roots"))]
if certs.is_empty() {
return Err(Error::NoNativeRootCertificatesFound(errors));
}
for cert in certs {
roots.add(cert)?;
}
}
#[cfg(feature = "rustls-webpki-roots")]
{
roots.extend(webpki_roots::TLS_SERVER_ROOTS.iter().cloned());
};
config_builder.with_root_certificates(roots)
};
Ok(Self::Rustls(tokio_rustls::TlsConnector::from(Arc::new(
config_builder.with_no_client_auth(),
))))
}
}
/// Wraps a given stream with a layer of TLS.
///
/// # Errors
///
/// This method returns an [`Error`] if the TLS handshake fails.
#[cfg_attr(
not(any(
feature = "native-tls",
feature = "rustls-webpki-roots",
feature = "rustls-native-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
)),
allow(unused_variables, clippy::unused_async)
)]
pub async fn wrap<S: AsyncRead + AsyncWrite + Unpin>(
&self,
domain: &str,
stream: S,
) -> Result<MaybeTlsStream<S>, Error> {
match self {
Self::Plain => Ok(MaybeTlsStream::Plain(stream)),
#[cfg(feature = "native-tls")]
Self::NativeTls(connector) => Ok(MaybeTlsStream::NativeTls(
connector.connect(domain, stream).await?,
)),
#[cfg(any(
feature = "rustls-webpki-roots",
feature = "rustls-native-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
))]
Self::Rustls(connector) => Ok(MaybeTlsStream::Rustls(
connector
.connect(ServerName::try_from(domain)?.to_owned(), stream)
.await?,
)),
}
}
}
| rust | MIT | 618599d15e9ee2a9a7191d081a56d90eae4a811a | 2026-01-04T20:19:32.035255Z | false |
Gelbpunkt/tokio-websockets | https://github.com/Gelbpunkt/tokio-websockets/blob/618599d15e9ee2a9a7191d081a56d90eae4a811a/src/lib.rs | src/lib.rs | #![deny(
clippy::pedantic,
clippy::missing_docs_in_private_items,
clippy::missing_errors_doc,
rustdoc::broken_intra_doc_links,
warnings
)]
#![cfg_attr(docsrs, feature(doc_cfg))]
// Required for NEON on 32-bit ARM until stable
#![cfg_attr(
all(feature = "nightly", target_arch = "arm"),
feature(
stdarch_arm_neon_intrinsics,
stdarch_arm_feature_detection,
arm_target_feature
)
)]
// Required for VSX until stable
#![cfg_attr(
all(
feature = "nightly",
any(target_arch = "powerpc64", target_arch = "powerpc")
),
feature(
stdarch_powerpc,
stdarch_powerpc_feature_detection,
powerpc_target_feature
)
)]
// Required for s390x vectors until stable
#![cfg_attr(
all(feature = "nightly", target_arch = "s390x"),
feature(stdarch_s390x, s390x_target_feature)
)]
// Required for LASX until stable
#![cfg_attr(
all(feature = "nightly", target_arch = "loongarch64"),
feature(stdarch_loongarch)
)]
#![cfg_attr(all(feature = "client", feature = "nightly"), feature(random))]
#![doc = include_str!("../README.md")]
// If the client or server implementation is enabled, at least one SHA1 backend
// is required.
#[cfg(all(
any(feature = "client", feature = "server"),
not(any(
feature = "ring",
feature = "aws_lc_rs",
feature = "openssl",
feature = "sha1_smol"
))
))]
compile_error!("client and server implementation require at least one SHA1 backend");
#[cfg(feature = "client")]
pub mod client;
pub mod error;
mod mask;
pub mod proto;
#[cfg(feature = "client")]
mod rand;
#[cfg(feature = "client")]
pub mod resolver;
#[cfg(feature = "server")]
pub mod server;
#[cfg(any(feature = "client", feature = "server"))]
mod sha;
pub mod tls;
#[cfg(any(feature = "client", feature = "server"))]
pub mod upgrade;
mod utf8;
#[cfg(feature = "client")]
pub use client::Builder as ClientBuilder;
pub use error::Error;
pub use proto::{CloseCode, Config, Limits, Message, Payload, WebSocketStream};
#[cfg(feature = "server")]
pub use server::Builder as ServerBuilder;
pub use tls::{Connector, MaybeTlsStream};
| rust | MIT | 618599d15e9ee2a9a7191d081a56d90eae4a811a | 2026-01-04T20:19:32.035255Z | false |
Gelbpunkt/tokio-websockets | https://github.com/Gelbpunkt/tokio-websockets/blob/618599d15e9ee2a9a7191d081a56d90eae4a811a/src/sha.rs | src/sha.rs | //! Unified abstraction over all supported SHA-1 backends.
#[cfg(all(feature = "aws_lc_rs", not(feature = "openssl")))]
use aws_lc_rs::digest;
#[cfg(feature = "openssl")]
use openssl::sha::Sha1;
#[cfg(all(feature = "ring", not(feature = "aws_lc_rs"), not(feature = "openssl")))]
use ring::digest;
#[cfg(all(
feature = "sha1_smol",
not(feature = "ring"),
not(feature = "aws_lc_rs"),
not(feature = "openssl")
))]
use sha1_smol::Sha1;
/// The Globally Unique Identifier (GUID) used in the WebSocket protocol (see [the RFC](https://datatracker.ietf.org/doc/html/rfc6455#section-1.3)).
const GUID: &str = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11";
/// Calculate the SHA-1 digest of a WebSocket key and the GUID using the
/// [`sha1_smol`] crate.
#[cfg(all(
feature = "sha1_smol",
not(feature = "aws_lc_rs"),
not(feature = "ring"),
not(feature = "openssl")
))]
pub fn digest(key: &[u8]) -> [u8; 20] {
let mut s = Sha1::new();
s.update(key);
s.update(GUID.as_bytes());
s.digest().bytes()
}
/// Calculate the SHA-1 digest of a WebSocket key and the GUID using the
/// [`ring`] crate.
#[cfg(all(feature = "ring", not(feature = "aws_lc_rs"), not(feature = "openssl")))]
pub fn digest(key: &[u8]) -> [u8; 20] {
let mut ctx = digest::Context::new(&digest::SHA1_FOR_LEGACY_USE_ONLY);
ctx.update(key);
ctx.update(GUID.as_bytes());
ctx.finish().as_ref().try_into().unwrap()
}
/// Calculate the SHA-1 digest of a WebSocket key and the GUID using the
/// [`aws-lc-rs`] crate.
#[cfg(all(feature = "aws_lc_rs", not(feature = "openssl")))]
pub fn digest(key: &[u8]) -> [u8; 20] {
let mut ctx = digest::Context::new(&digest::SHA1_FOR_LEGACY_USE_ONLY);
ctx.update(key);
ctx.update(GUID.as_bytes());
ctx.finish().as_ref().try_into().unwrap()
}
/// Calculate the SHA-1 digest of a WebSocket key and the GUID using the
/// [`openssl`] crate.
#[cfg(feature = "openssl")]
pub fn digest(key: &[u8]) -> [u8; 20] {
let mut hasher = Sha1::new();
hasher.update(key);
hasher.update(GUID.as_bytes());
hasher.finish()
}
| rust | MIT | 618599d15e9ee2a9a7191d081a56d90eae4a811a | 2026-01-04T20:19:32.035255Z | false |
Gelbpunkt/tokio-websockets | https://github.com/Gelbpunkt/tokio-websockets/blob/618599d15e9ee2a9a7191d081a56d90eae4a811a/src/mask.rs | src/mask.rs | //! This module contains six implementations of WebSocket frame masking and
//! unmasking, all of them using the same algorithm and methods:
//! - One AVX512-based implementation that masks 64 bytes per cycle (requires
//! nightly rust)
//! - One AVX2-based implementation that masks 32 bytes per cycle
//! - One SSE2-based implementation that masks 16 bytes per cycle
//! - One NEON-based implementation that masks 16 bytes per cycle (requires
//! nightly rust on 32-bit ARM)
//! - One AltiVec-based implementation that masks 16 bytes per cycle (requires
//! nightly rust)
//! - One IBM z13 vector facility based implementation that masks 16 bytes per
//! cycle (requires nightly rust)
//! - One LASX based implementation that masks 32 bytes per cycle (requires
//! nightly rust)
//! - A fallback implementation that masks 8 bytes per cycle
//!
//! The SIMD implementations will be used if CPU support for them is detected at
//! runtime or enabled at compile time via compiler flags.
/// (Un-)masks input bytes with the framing key using AVX512.
///
/// This will use a fallback implementation for less than 64 bytes. For
/// sufficiently large inputs, it masks in chunks of 64 bytes per
/// instruction, applying the fallback method on all remaining data.
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
#[allow(clippy::incompatible_msrv)] // nightly feature gated, stable since 1.89.0
#[target_feature(enable = "avx512f")]
unsafe fn frame_avx512(key: &mut [u8; 4], input: &mut [u8]) {
#[cfg(target_arch = "x86")]
use std::arch::x86::{__m512i, _mm512_set1_epi32, _mm512_xor_si512};
#[cfg(target_arch = "x86_64")]
use std::arch::x86_64::{__m512i, _mm512_set1_epi32, _mm512_xor_si512};
unsafe {
let (prefix, aligned_data, suffix) = input.align_to_mut::<__m512i>();
// Run fallback implementation on unaligned prefix data
if !prefix.is_empty() {
fallback_frame(key, prefix);
}
if !aligned_data.is_empty() {
let mask = _mm512_set1_epi32(i32::from_ne_bytes(*key));
for block in aligned_data {
*block = _mm512_xor_si512(*block, mask);
}
}
// Run fallback implementation on unaligned suffix data
if !suffix.is_empty() {
fallback_frame(key, suffix);
}
}
}
/// (Un-)masks input bytes with the framing key using AVX2.
///
/// This will use a fallback implementation for less than 32 bytes. For
/// sufficiently large inputs, it masks in chunks of 32 bytes per
/// instruction, applying the fallback method on all remaining data.
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
#[target_feature(enable = "avx2")]
unsafe fn frame_avx2(key: &mut [u8; 4], input: &mut [u8]) {
#[cfg(target_arch = "x86")]
use std::arch::x86::{__m256i, _mm256_set1_epi32, _mm256_xor_si256};
#[cfg(target_arch = "x86_64")]
use std::arch::x86_64::{__m256i, _mm256_set1_epi32, _mm256_xor_si256};
unsafe {
let (prefix, aligned_data, suffix) = input.align_to_mut::<__m256i>();
// Run fallback implementation on unaligned prefix data
if !prefix.is_empty() {
fallback_frame(key, prefix);
}
if !aligned_data.is_empty() {
let mask = _mm256_set1_epi32(i32::from_ne_bytes(*key));
for block in aligned_data {
*block = _mm256_xor_si256(*block, mask);
}
}
// Run fallback implementation on unaligned suffix data
if !suffix.is_empty() {
fallback_frame(key, suffix);
}
}
}
/// (Un-)masks input bytes with the framing key using SSE2.
///
/// This will use a fallback implementation for less than 16 bytes. For
/// sufficiently large inputs, it masks in chunks of 16 bytes per
/// instruction, applying the fallback method on all remaining data.
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
#[target_feature(enable = "sse2")]
unsafe fn frame_sse2(key: &mut [u8; 4], input: &mut [u8]) {
#[cfg(target_arch = "x86")]
use std::arch::x86::{__m128i, _mm_set1_epi32, _mm_xor_si128};
#[cfg(target_arch = "x86_64")]
use std::arch::x86_64::{__m128i, _mm_set1_epi32, _mm_xor_si128};
unsafe {
let (prefix, aligned_data, suffix) = input.align_to_mut::<__m128i>();
// Run fallback implementation on unaligned prefix data
if !prefix.is_empty() {
fallback_frame(key, prefix);
}
if !aligned_data.is_empty() {
let mask = _mm_set1_epi32(i32::from_ne_bytes(*key));
for block in aligned_data {
*block = _mm_xor_si128(*block, mask);
}
}
// Run fallback implementation on unaligned suffix data
if !suffix.is_empty() {
fallback_frame(key, suffix);
}
}
}
/// (Un-)masks input bytes with the framing key using NEON.
///
/// This will use a fallback implementation for less than 16 bytes. For
/// sufficiently large inputs, it masks in chunks of 16 bytes per
/// instruction, applying the fallback method on all remaining data.
#[cfg(any(all(feature = "nightly", target_arch = "arm"), target_arch = "aarch64"))]
#[target_feature(enable = "neon")]
unsafe fn frame_neon(key: &mut [u8; 4], input: &mut [u8]) {
#[cfg(target_arch = "aarch64")]
use std::arch::aarch64::{uint8x16_t, veorq_u8, vld1q_dup_s32, vreinterpretq_u8_s32};
#[cfg(target_arch = "arm")]
use std::arch::arm::{uint8x16_t, veorq_u8, vld1q_dup_s32, vreinterpretq_u8_s32};
unsafe {
let (prefix, aligned_data, suffix) = input.align_to_mut::<uint8x16_t>();
// Run fallback implementation on unaligned prefix data
if !prefix.is_empty() {
fallback_frame(key, prefix);
}
if !aligned_data.is_empty() {
let key_i32 = key.as_ptr().cast::<i32>().read_unaligned();
let mask = vreinterpretq_u8_s32(vld1q_dup_s32(&key_i32));
for block in aligned_data {
*block = veorq_u8(*block, mask);
}
}
// Run fallback implementation on unaligned suffix data
if !suffix.is_empty() {
fallback_frame(key, suffix);
}
}
}
/// (Un-)masks input bytes with the framing key using AltiVec.
///
/// This will use a fallback implementation for less than 16 bytes. For
/// sufficiently large inputs, it masks in chunks of 16 bytes per
/// instruction, applying the fallback method on all remaining data.
#[cfg(all(
feature = "nightly",
any(target_arch = "powerpc", target_arch = "powerpc64")
))]
#[target_feature(enable = "altivec")]
unsafe fn frame_altivec(key: &mut [u8; 4], input: &mut [u8]) {
#[cfg(target_arch = "powerpc")]
use std::arch::powerpc::{vec_splats, vec_xor, vector_unsigned_char};
#[cfg(target_arch = "powerpc64")]
use std::arch::powerpc64::{vec_splats, vec_xor, vector_unsigned_char};
use std::mem::transmute;
unsafe {
let (prefix, aligned_data, suffix) = input.align_to_mut::<vector_unsigned_char>();
// Run fallback implementation on unaligned prefix data
if !prefix.is_empty() {
fallback_frame(key, prefix);
}
if !aligned_data.is_empty() {
// SAFETY: 4x i32 to 16x u8 is safe
let mask: vector_unsigned_char = transmute(vec_splats(i32::from_ne_bytes(*key)));
for block in aligned_data {
*block = vec_xor(*block, mask);
}
}
// Run fallback implementation on unaligned suffix data
if !suffix.is_empty() {
fallback_frame(key, suffix);
}
}
}
/// (Un-)masks input bytes with the framing key using s390x vectors.
///
/// This will use a fallback implementation for less than 16 bytes. For
/// sufficiently large inputs, it masks in chunks of 16 bytes per
/// instruction, applying the fallback method on all remaining data.
#[cfg(all(feature = "nightly", target_arch = "s390x"))]
#[target_feature(enable = "vector")]
unsafe fn frame_s390x_vector(key: &mut [u8; 4], input: &mut [u8]) {
use std::{
arch::s390x::{vec_splats, vec_xor, vector_signed_int, vector_unsigned_char},
mem::transmute,
};
unsafe {
let (prefix, aligned_data, suffix) = input.align_to_mut::<vector_unsigned_char>();
// Run fallback implementation on unaligned prefix data
if !prefix.is_empty() {
fallback_frame(key, prefix);
}
if !aligned_data.is_empty() {
// SAFETY: 4x i32 to 16x u8 is safe
let mask: vector_unsigned_char = transmute(vec_splats::<i32, vector_signed_int>(
i32::from_ne_bytes(*key),
));
for block in aligned_data {
*block = vec_xor(*block, mask);
}
}
// Run fallback implementation on unaligned suffix data
if !suffix.is_empty() {
fallback_frame(key, suffix);
}
}
}
/// (Un-)masks input bytes with the framing key using LASX.
///
/// This will use a fallback implementation for less than 32 bytes. For
/// sufficiently large inputs, it masks in chunks of 32 bytes per
/// instruction, applying the fallback method on all remaining data.
#[cfg(all(feature = "nightly", target_arch = "loongarch64"))]
#[target_feature(enable = "lasx")]
unsafe fn frame_lasx_vector(key: &mut [u8; 4], input: &mut [u8]) {
use std::arch::loongarch64::{lasx_xvld, lasx_xvxor_v, m256i};
unsafe {
let (prefix, aligned_data, suffix) = input.align_to_mut::<m256i>();
// Run fallback implementation on unaligned prefix data
if !prefix.is_empty() {
fallback_frame(key, prefix);
}
if !aligned_data.is_empty() {
let key_vector = [i32::from_ne_bytes(*key); 8];
let mask = lasx_xvld::<0>(key_vector.as_ptr().cast());
for block in aligned_data {
*block = lasx_xvxor_v(*block, mask);
}
}
// Run fallback implementation on unaligned suffix data
if !suffix.is_empty() {
fallback_frame(key, suffix);
}
}
}
/// (Un-)masks input bytes with the framing key using LSX.
///
/// This will use a fallback implementation for less than 16 bytes. For
/// sufficiently large inputs, it masks in chunks of 16 bytes per
/// instruction, applying the fallback method on all remaining data.
#[cfg(all(feature = "nightly", target_arch = "loongarch64"))]
#[target_feature(enable = "lsx")]
unsafe fn frame_lsx_vector(key: &mut [u8; 4], input: &mut [u8]) {
use std::arch::loongarch64::{lsx_vld, lsx_vxor_v, m128i};
unsafe {
let (prefix, aligned_data, suffix) = input.align_to_mut::<m128i>();
// Run fallback implementation on unaligned prefix data
if !prefix.is_empty() {
fallback_frame(key, prefix);
}
if !aligned_data.is_empty() {
let key_vector = [i32::from_ne_bytes(*key); 4];
let mask = lsx_vld(key_vector.as_ptr().cast(), 0);
for block in aligned_data {
*block = lsx_vxor_v(*block, mask);
}
}
// Run fallback implementation on unaligned suffix data
if !suffix.is_empty() {
fallback_frame(key, suffix);
}
}
}
/// Rotates the mask in-place by a certain amount of bytes.
#[allow(clippy::cast_possible_truncation)] // offset % 4 is within u32 bounds
fn rotate_mask(key: &mut [u8; 4], offset: usize) {
*key = u32::from_be_bytes(*key)
.rotate_left((offset % key.len()) as u32 * u8::BITS)
.to_be_bytes();
}
/// (Un-)masks input bytes with the framing key, one byte at once.
///
/// See [`fallback_frame`] for more details.
fn one_byte_at_once(key: &mut [u8; 4], input: &mut [u8]) {
for (index, byte) in input.iter_mut().enumerate() {
*byte ^= key[index % key.len()];
}
rotate_mask(key, input.len());
}
/// (Un-)masks input bytes with the framing key.
///
/// This is used as the internal implementation in non-SIMD builds and as a
/// fallback in SIMD builds.
fn fallback_frame(key: &mut [u8; 4], input: &mut [u8]) {
let (prefix, aligned_data, suffix) = unsafe { input.align_to_mut::<u64>() };
// Run fallback implementation on unaligned prefix data
if !prefix.is_empty() {
one_byte_at_once(key, prefix);
}
if !aligned_data.is_empty() {
let masking_key = u64::from(u32::from_ne_bytes(*key));
let mask = (masking_key << u32::BITS) | masking_key;
for block in aligned_data {
*block ^= mask;
}
}
// Run fallback implementation on unaligned suffix data
if !suffix.is_empty() {
one_byte_at_once(key, suffix);
}
}
/// (Un-)masks input bytes with the framing key.
#[inline]
pub fn frame(key: &mut [u8; 4], input: &mut [u8]) {
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
{
use std::arch::is_x86_feature_detected;
if is_x86_feature_detected!("avx512f") {
return unsafe { frame_avx512(key, input) };
} else if is_x86_feature_detected!("avx2") {
return unsafe { frame_avx2(key, input) };
} else if is_x86_feature_detected!("sse2") {
return unsafe { frame_sse2(key, input) };
}
}
#[cfg(all(feature = "nightly", target_arch = "arm"))]
{
use std::arch::is_arm_feature_detected;
if is_arm_feature_detected!("neon") {
return unsafe { frame_neon(key, input) };
}
}
#[cfg(target_arch = "aarch64")]
{
use std::arch::is_aarch64_feature_detected;
if is_aarch64_feature_detected!("neon") {
return unsafe { frame_neon(key, input) };
}
}
#[cfg(all(feature = "nightly", target_arch = "powerpc"))]
{
use std::arch::is_powerpc_feature_detected;
if is_powerpc_feature_detected!("altivec") {
return unsafe { frame_altivec(key, input) };
}
}
#[cfg(all(feature = "nightly", target_arch = "powerpc64"))]
{
use std::arch::is_powerpc64_feature_detected;
if is_powerpc64_feature_detected!("altivec") {
return unsafe { frame_altivec(key, input) };
}
}
#[cfg(all(feature = "nightly", target_arch = "s390x"))]
{
use std::arch::is_s390x_feature_detected;
if is_s390x_feature_detected!("vector") {
return unsafe { frame_s390x_vector(key, input) };
}
}
#[cfg(all(feature = "nightly", target_arch = "loongarch64"))]
{
use std::arch::is_loongarch_feature_detected;
if is_loongarch_feature_detected!("lasx") {
return unsafe { frame_lasx_vector(key, input) };
} else if is_loongarch_feature_detected!("lsx") {
return unsafe { frame_lsx_vector(key, input) };
}
}
fallback_frame(key, input);
}
#[cfg(all(test, feature = "client", feature = "fastrand"))]
#[test]
fn test_mask() {
use crate::rand::get_mask;
let mut random_data: Vec<u8> = std::iter::repeat_with(|| fastrand::u8(..))
.take(1024)
.collect();
// Mess around with the data to ensure we have unaligned input
let data = &mut random_data[2..998];
let mut data_clone = data.to_vec();
let mut mask = [0; 4];
get_mask(&mut mask);
let mut mask2 = mask;
frame(&mut mask, data);
one_byte_at_once(&mut mask2, &mut data_clone);
assert_eq!(&data, &data_clone);
}
| rust | MIT | 618599d15e9ee2a9a7191d081a56d90eae4a811a | 2026-01-04T20:19:32.035255Z | false |
Gelbpunkt/tokio-websockets | https://github.com/Gelbpunkt/tokio-websockets/blob/618599d15e9ee2a9a7191d081a56d90eae4a811a/src/client.rs | src/client.rs | //! Implementation of a WebSocket client.
//!
//! This can be used in three ways:
//! - By letting the library connect to a remote URI and performing a HTTP/1.1
//! Upgrade handshake, via [`Builder::connect`]
//! - By letting the library perform a HTTP/1.1 Upgrade handshake on an
//! established stream, via [`Builder::connect_on`]
//! - By performing the handshake yourself and then using
//! [`Builder::take_over`] to let it take over a WebSocket stream
use std::{future::poll_fn, io, pin::Pin, str::FromStr};
use base64::{Engine, engine::general_purpose};
use futures_core::Stream;
use http::{
HeaderMap, HeaderValue, Uri,
header::{self, HeaderName},
};
use tokio::{
io::{AsyncRead, AsyncWrite, AsyncWriteExt},
net::TcpStream,
};
use tokio_util::codec::FramedRead;
use crate::{
Connector, Error, MaybeTlsStream, WebSocketStream,
proto::{Config, Limits, Role},
resolver::{self, Resolver},
upgrade::{self, server_response},
};
/// Generates a new, random 16-byte WebSocket key and encodes it as base64.
pub(crate) fn make_key() -> [u8; 24] {
let mut key_base64 = [0; 24];
let key_bytes = crate::rand::get_key();
// SAFETY: We know that 16 bytes will be 24 bytes base64-encoded
unsafe {
general_purpose::STANDARD
.encode_slice(key_bytes, &mut key_base64)
.unwrap_unchecked()
};
key_base64
}
/// Guesses the port to connect on for a URI. If none is specified, port 443
/// will be used for TLS, 80 for plain HTTP.
fn default_port(uri: &Uri) -> Option<u16> {
if let Some(port) = uri.port_u16() {
return Some(port);
}
let scheme = uri.scheme_str();
match scheme {
Some("https" | "wss") => Some(443),
Some("http" | "ws") => Some(80),
_ => None,
}
}
/// List of headers added by the client which will cause an error
/// if added by the user:
///
/// - `host`
/// - `upgrade`
/// - `connection`
/// - `sec-websocket-key`
/// - `sec-websocket-version`
pub const DISALLOWED_HEADERS: &[HeaderName] = &[
header::HOST,
header::UPGRADE,
header::CONNECTION,
header::SEC_WEBSOCKET_KEY,
header::SEC_WEBSOCKET_VERSION,
];
/// Builds a HTTP/1.1 Upgrade request for a URI with extra headers and a
/// WebSocket key.
fn build_request(uri: &Uri, key: &[u8], headers: &HeaderMap) -> Vec<u8> {
let mut buf = Vec::new();
buf.extend_from_slice(b"GET ");
buf.extend_from_slice(uri.path().as_bytes());
if let Some(query) = uri.query() {
buf.extend_from_slice(b"?");
buf.extend_from_slice(query.as_bytes());
}
buf.extend_from_slice(b" HTTP/1.1\r\n");
if let Some(host) = uri.host() {
buf.extend_from_slice(b"Host: ");
buf.extend_from_slice(host.as_bytes());
if let Some(port) = uri.port_u16() {
buf.extend_from_slice(b":");
buf.extend_from_slice(port.to_string().as_bytes());
}
buf.extend_from_slice(b"\r\n");
}
buf.extend_from_slice(b"Upgrade: websocket\r\nConnection: Upgrade\r\nSec-WebSocket-Key: ");
buf.extend_from_slice(key);
buf.extend_from_slice(b"\r\nSec-WebSocket-Version: 13\r\n");
for (name, value) in headers {
buf.extend_from_slice(name.as_str().as_bytes());
buf.extend_from_slice(b": ");
buf.extend_from_slice(value.as_bytes());
buf.extend_from_slice(b"\r\n");
}
buf.extend_from_slice(b"\r\n");
buf
}
/// Builder for WebSocket client connections.
pub struct Builder<'a, R: Resolver = resolver::Gai> {
/// URI to connect to, required unless connecting to an established
/// WebSocket stream.
uri: Option<Uri>,
/// A TLS connector to use for the connection. If not set and required, a
/// new one will be created.
connector: Option<&'a Connector>,
/// A DNS resolver to use for looking up the hostname.
resolver: R,
/// Configuration for the WebSocket stream.
config: Config,
/// Limits to impose on the WebSocket stream.
limits: Limits,
/// Headers to be sent with the upgrade request.
headers: HeaderMap,
}
impl Builder<'_> {
/// Creates a [`Builder`] with all defaults that is not configured to
/// connect to any server.
#[must_use]
pub fn new() -> Self {
Self {
uri: None,
connector: None,
resolver: resolver::Gai,
config: Config::default(),
limits: Limits::default(),
headers: HeaderMap::new(),
}
}
/// Creates a [`Builder`] that connects to a given URI. This URI must use
/// the `ws` or `wss` schemes.
///
/// This method never fails as the URI has already been parsed.
#[must_use]
pub fn from_uri(uri: Uri) -> Self {
Self {
uri: Some(uri),
connector: None,
resolver: resolver::Gai,
config: Config::default(),
limits: Limits::default(),
headers: HeaderMap::new(),
}
}
}
impl<'a, R: Resolver> Builder<'a, R> {
/// Sets the [`Uri`] to connect to. This URI must use the `ws` or `wss`
/// schemes.
///
/// # Errors
///
/// This method returns a [`http::uri::InvalidUri`] error if URI parsing
/// fails.
pub fn uri(mut self, uri: &str) -> Result<Self, http::uri::InvalidUri> {
self.uri = Some(Uri::from_str(uri)?);
Ok(self)
}
/// Sets the TLS connector for the client.
///
/// By default, the client will create a new one for each connection instead
/// of reusing one.
#[must_use]
pub fn connector(mut self, connector: &'a Connector) -> Self {
self.connector = Some(connector);
self
}
/// Sets the DNS resolver for the client.
///
/// By default, the client will use the [`Gai`] resolver, a wrapper around
/// the blocking `getaddrinfo` syscall.
///
/// [`Gai`]: resolver::Gai
#[must_use]
pub fn resolver<NewR: Resolver>(self, resolver: NewR) -> Builder<'a, NewR> {
let Builder {
uri,
connector,
resolver: _,
config,
limits,
headers,
} = self;
Builder {
uri,
connector,
resolver,
config,
limits,
headers,
}
}
/// Sets the configuration for the WebSocket stream.
#[must_use]
pub fn config(mut self, config: Config) -> Self {
self.config = config;
self
}
/// Sets the limits for the WebSocket stream.
#[must_use]
pub fn limits(mut self, limits: Limits) -> Self {
self.limits = limits;
self
}
/// Adds an extra HTTP header to the handshake request.
///
/// # Errors
///
/// Returns [`Error::DisallowedHeader`] if the header is in
/// the [`DISALLOWED_HEADERS`] list.
pub fn add_header(mut self, name: HeaderName, value: HeaderValue) -> Result<Self, Error> {
if DISALLOWED_HEADERS.contains(&name) {
return Err(Error::DisallowedHeader);
}
self.headers.insert(name, value);
Ok(self)
}
/// Establishes a connection to the WebSocket server. This requires a URI to
/// be configured via [`Builder::uri`].
///
/// # Errors
///
/// This method returns an [`Error`] if connecting to the server fails or no
/// URI has been configured.
pub async fn connect(
&self,
) -> Result<
(
WebSocketStream<MaybeTlsStream<TcpStream>>,
upgrade::Response,
),
Error,
> {
let uri = self.uri.as_ref().ok_or(Error::NoUriConfigured)?;
// Uri::host contains square brackets around IPv6 addresses, which is required
// by the RFC: https://datatracker.ietf.org/doc/html/rfc3986#section-3.2.2
// These, however, do not resolve.
let host = uri
.host()
.ok_or(Error::CannotResolveHost)?
.trim_start_matches('[')
.trim_end_matches(']');
let port = default_port(uri).unwrap_or(80);
let addr = self.resolver.resolve(host, port).await?;
let stream = TcpStream::connect(&addr).await?;
let stream = if uri.scheme_str() == Some("wss") {
if let Some(connector) = self.connector {
connector.wrap(host, stream).await?
} else {
let connector = Connector::new()?;
connector.wrap(host, stream).await?
}
} else if uri.scheme_str() == Some("ws") {
Connector::Plain.wrap(host, stream).await?
} else {
return Err(Error::UnsupportedScheme);
};
self.connect_on(stream).await
}
/// Takes over an already established stream and uses it to send and receive
/// WebSocket messages. This requires a URI to be configured via
/// [`Builder::uri`].
///
/// This method assumes that the TLS connection has already been
/// established, if needed. It sends an HTTP upgrade request and waits
/// for an HTTP Switching Protocols response before proceeding.
///
/// # Errors
///
/// This method returns an [`Error`] if writing or reading from the stream
/// fails or no URI has been configured.
pub async fn connect_on<S: AsyncRead + AsyncWrite + Unpin>(
&self,
mut stream: S,
) -> Result<(WebSocketStream<S>, upgrade::Response), Error> {
let uri = self.uri.as_ref().ok_or(Error::NoUriConfigured)?;
let key_base64 = make_key();
let upgrade_codec = server_response::Codec::new(&key_base64);
let request = build_request(uri, &key_base64, &self.headers);
stream.write_all(&request).await?;
stream.flush().await?;
let mut framed = FramedRead::new(stream, upgrade_codec);
let res = poll_fn(|cx| Pin::new(&mut framed).poll_next(cx))
.await
.ok_or(Error::Io(io::ErrorKind::UnexpectedEof.into()))??;
Ok((
WebSocketStream::from_framed(framed, Role::Client, self.config, self.limits),
res,
))
}
/// Takes over an already established stream that has already performed a
/// HTTP upgrade handshake and uses it to send and receive WebSocket
/// messages.
///
/// This method will not perform a TLS handshake or a HTTP upgrade
/// handshake, it assumes the stream is ready to use for writing and
/// reading the WebSocket protocol.
pub fn take_over<S: AsyncRead + AsyncWrite + Unpin>(&self, stream: S) -> WebSocketStream<S> {
WebSocketStream::from_raw_stream(stream, Role::Client, self.config, self.limits)
}
}
impl Default for Builder<'_> {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use futures_util::StreamExt;
use static_assertions::assert_impl_all;
use super::Builder;
use crate::{Error, proto::ProtocolError};
assert_impl_all!(Builder: Send, Sync);
#[tokio::test]
async fn control_payload_limit_receive() {
#[rustfmt::skip]
let overlong: [&[u8]; 3]= [
// Ping with 126 byte payload.
&[137, 126, 0, 126, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
// Pong with 126 byte payload.
&[138, 126, 0, 126, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
// Close with 124 byte reason.
&[136, 126, 0, 126, 3, 232, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97]
];
for mut message in overlong {
let mut stream =
Builder::new().take_over(tokio::io::join(&mut message, tokio::io::empty()));
let result = stream.next().await.unwrap();
assert!(matches!(
result,
Err(Error::Protocol(ProtocolError::InvalidPayloadLength))
));
}
}
}
| rust | MIT | 618599d15e9ee2a9a7191d081a56d90eae4a811a | 2026-01-04T20:19:32.035255Z | false |
Gelbpunkt/tokio-websockets | https://github.com/Gelbpunkt/tokio-websockets/blob/618599d15e9ee2a9a7191d081a56d90eae4a811a/src/error.rs | src/error.rs | //! General error type used in the crate.
use std::{fmt, io};
#[cfg(any(
feature = "rustls-webpki-roots",
feature = "rustls-native-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
))]
use rustls_pki_types::InvalidDnsNameError;
#[cfg(feature = "native-tls")]
use tokio_native_tls::native_tls;
use crate::proto::ProtocolError;
/// Generic error when using WebSockets with this crate.
#[derive(Debug)]
#[non_exhaustive]
pub enum Error {
/// Attempted to read from or write to a closed stream.
AlreadyClosed,
/// DNS lookup failed.
CannotResolveHost,
/// Attempted to connect a client to a remote without configured URI.
#[cfg(feature = "client")]
NoUriConfigured,
/// Attempted to add a disallowed header.
#[cfg(any(feature = "client", feature = "server"))]
DisallowedHeader,
/// WebSocket protocol violation.
Protocol(ProtocolError),
/// Payload length limit was exceeded.
PayloadTooLong { len: usize, max_len: usize },
/// I/O error.
Io(io::Error),
/// TLS error originating in [`native_tls`].
#[cfg(feature = "native-tls")]
NativeTls(native_tls::Error),
/// Attempted to connect to an invalid DNS name.
#[cfg(any(
feature = "rustls-webpki-roots",
feature = "rustls-native-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
))]
InvalidDNSName(InvalidDnsNameError),
/// A general rustls error.
#[cfg(any(
feature = "rustls-webpki-roots",
feature = "rustls-native-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
))]
Rustls(tokio_rustls::rustls::Error),
/// An unsupported, i.e. not `ws` or `wss`, or no URI scheme was specified.
#[cfg(feature = "client")]
UnsupportedScheme,
/// The HTTP/1.1 Upgrade failed.
#[cfg(any(feature = "client", feature = "server"))]
Upgrade(crate::upgrade::Error),
/// No native root certificates were found and no other root certificate
/// source was enabled.
#[cfg(all(
not(feature = "rustls-webpki-roots"),
feature = "rustls-native-roots",
not(feature = "rustls-platform-verifier"),
))]
NoNativeRootCertificatesFound(Vec<rustls_native_certs::Error>),
}
#[cfg(feature = "native-tls")]
impl From<native_tls::Error> for Error {
fn from(err: native_tls::Error) -> Self {
Self::NativeTls(err)
}
}
impl From<ProtocolError> for Error {
fn from(err: ProtocolError) -> Self {
Self::Protocol(err)
}
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Self {
Self::Io(err)
}
}
#[cfg(any(
feature = "rustls-webpki-roots",
feature = "rustls-native-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
))]
impl From<InvalidDnsNameError> for Error {
fn from(err: InvalidDnsNameError) -> Self {
Self::InvalidDNSName(err)
}
}
#[cfg(any(
feature = "rustls-webpki-roots",
feature = "rustls-native-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
))]
impl From<tokio_rustls::rustls::Error> for Error {
fn from(err: tokio_rustls::rustls::Error) -> Self {
Self::Rustls(err)
}
}
#[cfg(any(feature = "client", feature = "server"))]
impl From<crate::upgrade::Error> for Error {
fn from(err: crate::upgrade::Error) -> Self {
Self::Upgrade(err)
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Error::AlreadyClosed => {
f.write_str("attempted to send message after closing connection")
}
Error::CannotResolveHost => f.write_str("client DNS lookup failed"),
#[cfg(feature = "client")]
Error::NoUriConfigured => f.write_str("client has no URI configured"),
#[cfg(any(feature = "client", feature = "server"))]
Error::DisallowedHeader => f.write_str("attempted to add disallowed header"),
Error::Protocol(e) => e.fmt(f),
Error::PayloadTooLong { len, max_len } => {
f.write_str("payload length of ")?;
len.fmt(f)?;
f.write_str(" exceeds the limit of ")?;
max_len.fmt(f)
}
Error::Io(e) => e.fmt(f),
#[cfg(feature = "native-tls")]
Error::NativeTls(e) => e.fmt(f),
#[cfg(any(
feature = "rustls-webpki-roots",
feature = "rustls-native-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
))]
Error::InvalidDNSName(_) => f.write_str("invalid DNS name"),
#[cfg(any(
feature = "rustls-webpki-roots",
feature = "rustls-native-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
))]
Error::Rustls(e) => e.fmt(f),
#[cfg(feature = "client")]
Error::UnsupportedScheme => f.write_str("unsupported or no URI scheme used"),
#[cfg(any(feature = "client", feature = "server"))]
Error::Upgrade(e) => e.fmt(f),
#[cfg(all(
not(feature = "rustls-webpki-roots"),
feature = "rustls-native-roots",
not(feature = "rustls-platform-verifier")
))]
Error::NoNativeRootCertificatesFound(e) => {
f.write_str("no native root certificates were found, errors encountered: ")?;
std::fmt::Debug::fmt(e, f)
}
}
}
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
Error::AlreadyClosed | Error::CannotResolveHost | Error::PayloadTooLong { .. } => None,
#[cfg(feature = "client")]
Error::NoUriConfigured => None,
#[cfg(any(feature = "client", feature = "server"))]
Error::DisallowedHeader => None,
#[cfg(all(
not(feature = "rustls-webpki-roots"),
feature = "rustls-native-roots",
not(feature = "rustls-platform-verifier")
))]
Error::NoNativeRootCertificatesFound(e) => Some(e.first()?),
#[cfg(feature = "client")]
Error::UnsupportedScheme => None,
Error::Protocol(e) => Some(e),
Error::Io(e) => Some(e),
#[cfg(feature = "native-tls")]
Error::NativeTls(e) => Some(e),
#[cfg(any(
feature = "rustls-webpki-roots",
feature = "rustls-native-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
))]
Error::InvalidDNSName(e) => Some(e),
#[cfg(any(
feature = "rustls-webpki-roots",
feature = "rustls-native-roots",
feature = "rustls-platform-verifier",
feature = "rustls-bring-your-own-connector"
))]
Error::Rustls(e) => Some(e),
#[cfg(any(feature = "client", feature = "server"))]
Error::Upgrade(e) => Some(e),
}
}
}
| rust | MIT | 618599d15e9ee2a9a7191d081a56d90eae4a811a | 2026-01-04T20:19:32.035255Z | false |
Gelbpunkt/tokio-websockets | https://github.com/Gelbpunkt/tokio-websockets/blob/618599d15e9ee2a9a7191d081a56d90eae4a811a/src/rand.rs | src/rand.rs | //! Random numbers generation utilities required in WebSocket clients.
#[cfg(not(any(
feature = "fastrand",
feature = "getrandom",
feature = "nightly",
feature = "rand"
)))]
compile_error!(
"Using the `client` feature requires enabling a random number generator implementation via one of the following features: `fastrand`, `getrandom`, `nightly` or `rand`."
);
/// Random numbers generation utilities using [`std::random`].
#[cfg(all(
feature = "nightly",
not(feature = "fastrand"),
not(feature = "getrandom"),
not(feature = "rand")
))]
mod imp {
use std::random::RandomSource;
/// Generate a random 16-byte WebSocket key.
pub fn get_key() -> [u8; 16] {
let mut bytes = [0; 16];
std::random::DefaultRandomSource.fill_bytes(&mut bytes);
bytes
}
/// Generate a random 4-byte WebSocket mask.
pub fn get_mask(dst: &mut [u8; 4]) {
std::random::DefaultRandomSource.fill_bytes(dst);
}
}
/// Random numbers generation utilities using [`fastrand`].
#[cfg(all(
feature = "fastrand",
not(feature = "getrandom"),
not(feature = "rand")
))]
mod imp {
/// Generate a random 16-byte WebSocket key.
pub fn get_key() -> [u8; 16] {
fastrand::u128(..).to_ne_bytes()
}
/// Generate a random 4-byte WebSocket mask.
pub fn get_mask(dst: &mut [u8; 4]) {
fastrand::fill(dst);
}
}
/// Random numbers generation utilities using [`getrandom`].
#[cfg(all(feature = "getrandom", not(feature = "rand")))]
mod imp {
/// Generate a random 16-byte WebSocket key.
pub fn get_key() -> [u8; 16] {
let mut bytes = [0; 16];
getrandom::fill(&mut bytes).expect("Failed to get random bytes, consider using `rand` or `fastrand` instead of `getrandom` if this persists");
bytes
}
/// Generate a random 4-byte WebSocket mask.
pub fn get_mask(dst: &mut [u8; 4]) {
getrandom::fill(dst).expect("Failed to get random bytes, consider using `rand` or `fastrand` instead of `getrandom` if this persists");
}
}
/// Random numbers generation utilities using [`rand`].
#[cfg(feature = "rand")]
mod imp {
/// Generate a random 16-byte WebSocket key.
pub fn get_key() -> [u8; 16] {
let mut bytes = [0; 16];
rand::fill(&mut bytes);
bytes
}
/// Generate a random 4-byte WebSocket mask.
pub fn get_mask(dst: &mut [u8; 4]) {
rand::fill(dst);
}
}
pub use imp::{get_key, get_mask};
| rust | MIT | 618599d15e9ee2a9a7191d081a56d90eae4a811a | 2026-01-04T20:19:32.035255Z | false |
Gelbpunkt/tokio-websockets | https://github.com/Gelbpunkt/tokio-websockets/blob/618599d15e9ee2a9a7191d081a56d90eae4a811a/src/resolver.rs | src/resolver.rs | //! Abstractions over DNS resolvers.
use std::{future::Future, net::SocketAddr};
use crate::Error;
/// Trait for a DNS resolver to resolve hostnames and ports to IP addresses.
pub trait Resolver: Send {
/// Resolve a hostname and port to an IP address, asynchronously.
fn resolve(
&self,
host: &str,
port: u16,
) -> impl Future<Output = Result<SocketAddr, Error>> + Send;
}
/// A [`Resolver`] that uses the blocking `getaddrinfo` syscall in the tokio
/// threadpool.
pub struct Gai;
impl Resolver for Gai {
async fn resolve(&self, host: &str, port: u16) -> Result<SocketAddr, Error> {
let host = host.to_owned();
tokio::net::lookup_host((host, port))
.await
.map_err(|_| Error::CannotResolveHost)?
.next()
.ok_or(Error::CannotResolveHost)
}
}
| rust | MIT | 618599d15e9ee2a9a7191d081a56d90eae4a811a | 2026-01-04T20:19:32.035255Z | false |
Gelbpunkt/tokio-websockets | https://github.com/Gelbpunkt/tokio-websockets/blob/618599d15e9ee2a9a7191d081a56d90eae4a811a/src/server.rs | src/server.rs | //! Implementation of a WebSocket server.
//!
//! This can be used in two ways:
//! - By letting the library perform a HTTP/1.1 Upgrade handshake on an
//! established stream, via [`Builder::accept`]
//! - By performing the handshake yourself and then using [`Builder::serve`]
//! to let it take over a WebSocket stream
use std::{future::poll_fn, io, pin::Pin};
use futures_core::Stream;
use http::{HeaderMap, HeaderName, HeaderValue, header};
use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};
use tokio_util::codec::FramedRead;
use crate::{
Error, WebSocketStream,
proto::{Config, Limits, Role},
upgrade::client_request,
};
/// HTTP/1.1 400 Bad Request response payload.
const BAD_REQUEST: &[u8] = b"HTTP/1.1 400 Bad Request\r\n\r\n";
/// List of headers added by the server which will cause an error
/// if added by the user:
///
/// - `host`
/// - `upgrade`
/// - `connection`
/// - `sec-websocket-accept`
pub const DISALLOWED_HEADERS: &[HeaderName] = &[
header::UPGRADE,
header::CONNECTION,
header::SEC_WEBSOCKET_ACCEPT,
];
/// Builder for WebSocket server connections.
pub struct Builder {
/// Configuration for the WebSocket stream.
config: Config,
/// Limits to impose on the WebSocket stream.
limits: Limits,
/// Headers to be sent with the switching protocols response.
headers: HeaderMap,
}
impl Default for Builder {
fn default() -> Self {
Self::new()
}
}
impl Builder {
/// Creates a [`Builder`] that can be used to create a [`WebSocketStream`]
/// to receive messages at the server end.
#[must_use]
pub fn new() -> Self {
Self {
config: Config::default(),
limits: Limits::default(),
headers: HeaderMap::new(),
}
}
/// Sets the configuration for the WebSocket stream.
#[must_use]
pub fn config(mut self, config: Config) -> Self {
self.config = config;
self
}
/// Sets the limits for the WebSocket stream.
#[must_use]
pub fn limits(mut self, limits: Limits) -> Self {
self.limits = limits;
self
}
/// Adds an extra HTTP header to the switching protocols response.
///
/// # Errors
///
/// Returns [`Error::DisallowedHeader`] if the header is in
/// the [`DISALLOWED_HEADERS`] list.
pub fn add_header(mut self, name: HeaderName, value: HeaderValue) -> Result<Self, Error> {
if DISALLOWED_HEADERS.contains(&name) {
return Err(Error::DisallowedHeader);
}
self.headers.insert(name, value);
Ok(self)
}
/// Perform a HTTP upgrade handshake on an already established stream and
/// uses it to send and receive WebSocket messages.
///
/// # Errors
///
/// This method returns an [`Error`] if the handshake fails.
pub async fn accept<S: AsyncRead + AsyncWrite + Unpin>(
&self,
stream: S,
) -> Result<(http::Request<()>, WebSocketStream<S>), Error> {
let mut framed = FramedRead::new(
stream,
client_request::Codec {
response_headers: &self.headers,
},
);
let reply = poll_fn(|cx| Pin::new(&mut framed).poll_next(cx)).await;
match reply {
Some(Ok((request, response))) => {
framed.get_mut().write_all(&response).await?;
Ok((
request,
WebSocketStream::from_framed(framed, Role::Server, self.config, self.limits),
))
}
Some(Err(e)) => {
framed.get_mut().write_all(BAD_REQUEST).await?;
Err(e)
}
None => Err(Error::Io(io::ErrorKind::UnexpectedEof.into())),
}
}
/// Takes over an already established stream and uses it to send and receive
/// WebSocket messages.
///
/// This does not perform a HTTP upgrade handshake.
pub fn serve<S: AsyncRead + AsyncWrite + Unpin>(&self, stream: S) -> WebSocketStream<S> {
WebSocketStream::from_raw_stream(stream, Role::Server, self.config, self.limits)
}
}
| rust | MIT | 618599d15e9ee2a9a7191d081a56d90eae4a811a | 2026-01-04T20:19:32.035255Z | false |
Gelbpunkt/tokio-websockets | https://github.com/Gelbpunkt/tokio-websockets/blob/618599d15e9ee2a9a7191d081a56d90eae4a811a/src/utf8.rs | src/utf8.rs | //! UTF-8 validation and parsing helpers that abstract over [`simdutf8`].
use std::hint::unreachable_unchecked;
use crate::proto::ProtocolError;
/// Converts a slice of bytes to a string slice. This will use SIMD acceleration
/// if available.
///
/// # Errors
///
/// Returns a [`ProtocolError`] if the input is invalid UTF-8.
#[inline]
pub fn parse_str(input: &[u8]) -> Result<&str, ProtocolError> {
simdutf8::basic::from_utf8(input).map_err(|_| ProtocolError::InvalidUtf8)
}
/// A streaming UTF-8 validator.
#[derive(Debug)]
pub(crate) struct Validator {
/// Buffer for a partial codepoint. This is four bytes large to copy the
/// missing bytes into the buffer and reuse the allocation.
partial_codepoint: [u8; 4],
/// Length of the partial codepoint currently stored.
partial_codepoint_len: usize,
}
impl Validator {
/// Creates a new validator.
#[cfg(any(feature = "client", feature = "server"))]
pub fn new() -> Self {
Self {
partial_codepoint: [0; 4],
partial_codepoint_len: 0,
}
}
/// The length of the partial codepoint, once complete.
#[inline]
fn complete_codepoint_len(&self) -> usize {
match self.partial_codepoint[0] {
// 0b0xxxxxxx (single-byte code point)
0b0000_0000..=0b0111_1111 => 1,
// 0b110xxxxx (two-byte code point)
0b1100_0000..=0b1101_1111 => 2,
// 0b1110xxxx (three-byte code point)
0b1110_0000..=0b1110_1111 => 3,
// 0b11110xxx (four-byte code point)
0b1111_0000..=0b1111_0111 => 4,
// Invalid first byte.
// SAFETY: The first byte must be valid UTF-8, otherwise from_str would return
// a FromUtf8Error with error_len() that is Some(_)
_ => unsafe { unreachable_unchecked() },
}
}
/// Resets the validator state.
#[inline]
pub fn reset(&mut self) {
self.partial_codepoint_len = 0;
}
/// Feeds bytes into the streaming validator. Returns `Ok` if the input is
/// valid UTF-8, if `is_complete` is true, even if the input has incomplete
/// codepoints. Subsequent calls will validate incomplete codepoints
/// unless [`Self::reset`] is called in between.
pub fn feed(&mut self, input: &[u8], is_complete: bool) -> Result<(), ProtocolError> {
// If we have a partial codepoint, complete it
let remaining_bytes = if self.partial_codepoint_len == 0 {
input
} else {
let available_bytes = input.len();
if available_bytes == 0 && !is_complete {
return Ok(());
}
let missing_bytes = self.complete_codepoint_len() - self.partial_codepoint_len;
let bytes_to_copy = available_bytes.min(missing_bytes);
let codepoint_len_after_copy = self.partial_codepoint_len + bytes_to_copy;
// Copy the missing codepoint bytes to the partial codepoint
unsafe {
self.partial_codepoint
.get_unchecked_mut(self.partial_codepoint_len..codepoint_len_after_copy)
}
.copy_from_slice(&input[..bytes_to_copy]);
// If we know that the codepoint is complete, we can use the basic variant
if available_bytes >= missing_bytes {
if simdutf8::basic::from_utf8(unsafe {
self.partial_codepoint
.get_unchecked(..codepoint_len_after_copy)
})
.is_err()
{
return Err(ProtocolError::InvalidUtf8);
}
} else {
match simdutf8::compat::from_utf8(unsafe {
self.partial_codepoint
.get_unchecked(..codepoint_len_after_copy)
}) {
Ok(_) => {}
Err(utf8_error) if utf8_error.error_len().is_some() => {
return Err(ProtocolError::InvalidUtf8);
}
Err(_) => {
self.partial_codepoint_len = codepoint_len_after_copy;
if is_complete {
return Err(ProtocolError::InvalidUtf8);
}
return Ok(());
}
}
}
self.reset();
&input[bytes_to_copy..]
};
// Validate the entire rest of the input
if is_complete {
self.reset();
match simdutf8::basic::from_utf8(remaining_bytes) {
Ok(_) => Ok(()),
Err(_) => Err(ProtocolError::InvalidUtf8),
}
} else {
match simdutf8::compat::from_utf8(remaining_bytes) {
Ok(_) => Ok(()),
Err(utf8_error) if utf8_error.error_len().is_some() => {
Err(ProtocolError::InvalidUtf8)
}
Err(utf8_error) => {
// Incomplete input, copy the partial codepoints to the validator
self.partial_codepoint_len = remaining_bytes.len() - utf8_error.valid_up_to();
unsafe {
self.partial_codepoint
.get_unchecked_mut(..self.partial_codepoint_len)
.copy_from_slice(
remaining_bytes.get_unchecked(utf8_error.valid_up_to()..),
);
}
Ok(())
}
}
}
}
}
| rust | MIT | 618599d15e9ee2a9a7191d081a56d90eae4a811a | 2026-01-04T20:19:32.035255Z | false |
Gelbpunkt/tokio-websockets | https://github.com/Gelbpunkt/tokio-websockets/blob/618599d15e9ee2a9a7191d081a56d90eae4a811a/src/upgrade/client_request.rs | src/upgrade/client_request.rs | //! A [`Codec`] to parse client HTTP Upgrade handshakes and validate them.
use std::str::FromStr;
use base64::{Engine, engine::general_purpose::STANDARD};
use bytes::{Buf, BytesMut};
use http::{HeaderMap, header::SET_COOKIE};
use httparse::Request;
use tokio_util::codec::Decoder;
use crate::{sha::digest, upgrade::Error};
/// A static HTTP/1.1 101 Switching Protocols response up until the
/// `Sec-WebSocket-Accept` header value.
const SWITCHING_PROTOCOLS_BODY: &[u8] = b"HTTP/1.1 101 Switching Protocols\r\nUpgrade: websocket\r\nConnection: Upgrade\r\nSec-WebSocket-Accept: ";
/// Returns whether an ASCII byte slice is contained in another one, ignoring
/// captalization.
fn contains_ignore_ascii_case(mut haystack: &[u8], needle: &[u8]) -> bool {
if needle.is_empty() {
return true;
}
while haystack.len() >= needle.len() {
if haystack[..needle.len()].eq_ignore_ascii_case(needle) {
return true;
}
haystack = &haystack[1..];
}
false
}
/// A client's opening handshake.
struct ClientRequest {
/// The SHA-1 digest of the `Sec-WebSocket-Key` header.
ws_accept: [u8; 20],
}
impl ClientRequest {
/// Parses the client's opening handshake.
///
/// # Errors
///
/// This method fails when a header required for the WebSocket protocol is
/// missing in the handshake.
pub fn parse<'a, F>(header: F) -> Result<Self, Error>
where
F: Fn(&'static str) -> Option<&'a str> + 'a,
{
let find_header = |name| header(name).ok_or(super::Error::MissingHeader(name));
let check_header = |name, expected, err| {
let actual = find_header(name)?;
if actual.eq_ignore_ascii_case(expected) {
Ok(())
} else {
Err(err)
}
};
let check_header_contains = |name, expected: &str, err| {
let actual = find_header(name)?;
if contains_ignore_ascii_case(actual.as_bytes(), expected.as_bytes()) {
Ok(())
} else {
Err(err)
}
};
check_header("Upgrade", "websocket", Error::UpgradeNotWebSocket)?;
check_header_contains("Connection", "Upgrade", Error::ConnectionNotUpgrade)?;
check_header(
"Sec-WebSocket-Version",
"13",
Error::UnsupportedWebSocketVersion,
)?;
let key = find_header("Sec-WebSocket-Key")?;
let ws_accept = digest(key.as_bytes());
Ok(Self { ws_accept })
}
/// Returns the value that the client expects to see in the server's
/// `Sec-WebSocket-Accept` header.
#[must_use]
pub fn ws_accept(&self) -> String {
STANDARD.encode(self.ws_accept)
}
}
/// A codec that implements a [`Decoder`] for HTTP/1.1 upgrade requests and
/// yields the request and a HTTP/1.1 response to reply with.
///
/// It does not implement an [`Encoder`].
///
/// [`Encoder`]: tokio_util::codec::Encoder
pub struct Codec<'a> {
/// List of headers to add to the Switching Protocols response.
pub response_headers: &'a HeaderMap,
}
impl Decoder for Codec<'_> {
type Error = crate::Error;
type Item = (http::Request<()>, Vec<u8>);
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
let mut headers = [httparse::EMPTY_HEADER; 64];
let mut request = Request::new(&mut headers);
let status = request.parse(src).map_err(Error::Parsing)?;
if !status.is_complete() {
return Ok(None);
}
let request_len = status.unwrap();
let mut builder = http::request::Builder::new();
if let Some(m) = request.method {
let method =
http::method::Method::from_bytes(m.as_bytes()).expect("httparse method is valid");
builder = builder.method(method);
}
if let Some(uri) = request.path {
builder = builder.uri(uri);
}
match request.version {
Some(0) => builder = builder.version(http::Version::HTTP_10),
Some(1) => builder = builder.version(http::Version::HTTP_11),
_ => Err(Error::Parsing(httparse::Error::Version))?,
}
let mut header_map = http::HeaderMap::with_capacity(request.headers.len());
for header in request.headers {
let name = http::HeaderName::from_str(header.name)
.map_err(|_| Error::Parsing(httparse::Error::HeaderName))?;
let value = http::HeaderValue::from_bytes(header.value)
.map_err(|_| Error::Parsing(httparse::Error::HeaderValue))?;
header_map.insert(name, value);
}
// You have to build the request before you can assign headers: https://github.com/hyperium/http/issues/91
let mut request = builder
.body(())
.expect("httparse sees the request as valid");
*request.headers_mut() = header_map;
let ws_accept =
ClientRequest::parse(|name| request.headers().get(name).and_then(|h| h.to_str().ok()))?
.ws_accept();
src.advance(request_len);
// Preallocate the size without extra headers
let mut resp = Vec::with_capacity(SWITCHING_PROTOCOLS_BODY.len() + ws_accept.len() + 4);
resp.extend_from_slice(SWITCHING_PROTOCOLS_BODY);
resp.extend_from_slice(ws_accept.as_bytes());
resp.extend_from_slice(b"\r\n");
for name in self.response_headers.keys() {
let values = self.response_headers.get_all(name).iter();
if name == SET_COOKIE {
// Set-Cookie is treated differently because if multiple values are present,
// multiple header entries should be used rather than one
for value in values {
resp.extend_from_slice(name.as_str().as_bytes());
resp.extend_from_slice(b": ");
resp.extend_from_slice(value.as_bytes());
resp.extend_from_slice(b"\r\n");
}
} else {
// All other header values of the same key should be concatenated with a comma
resp.extend_from_slice(name.as_str().as_bytes());
resp.extend_from_slice(b": ");
let mut values = values.peekable();
while let Some(value) = values.next() {
resp.extend_from_slice(value.as_bytes());
if values.peek().is_some() {
resp.push(b',');
}
}
resp.extend_from_slice(b"\r\n");
}
}
resp.extend_from_slice(b"\r\n");
Ok(Some((request, resp)))
}
}
| rust | MIT | 618599d15e9ee2a9a7191d081a56d90eae4a811a | 2026-01-04T20:19:32.035255Z | false |
Gelbpunkt/tokio-websockets | https://github.com/Gelbpunkt/tokio-websockets/blob/618599d15e9ee2a9a7191d081a56d90eae4a811a/src/upgrade/mod.rs | src/upgrade/mod.rs | //! HTTP upgrade request and response generation and validation helpers.
use std::fmt;
#[cfg(feature = "server")]
pub(crate) mod client_request;
#[cfg(feature = "client")]
pub(crate) mod server_response;
/// A parsed HTTP/1.1 101 Switching Protocols response.
/// These responses typically do not contain a body, therefore it is omitted.
#[cfg(feature = "client")]
pub type Response = http::Response<()>;
/// Errors that occur during the HTTP upgrade handshake between client and
/// server.
#[derive(Debug)]
#[non_exhaustive]
pub enum Error {
/// Header required in the request or response is not present.
MissingHeader(&'static str),
/// `Upgrade` header sent by the client does not match "websocket".
UpgradeNotWebSocket,
/// `Connection` header sent by the client does not contain "Upgrade".
ConnectionNotUpgrade,
/// `Sec-WebSocket-Version` header sent by the client is not supported by
/// the server.
UnsupportedWebSocketVersion,
/// Failed to parse client request or server response.
Parsing(httparse::Error),
/// Server did not return a HTTP Switching Protocols response.
DidNotSwitchProtocols(u16),
/// Server returned a `Sec-WebSocket-Accept` that is not compatible with the
/// `Sec-WebSocket-Key` sent by the client.
WrongWebSocketAccept,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Error::MissingHeader(header) => {
f.write_str("missing required header: ")?;
f.write_str(header)
}
Error::UpgradeNotWebSocket => f.write_str("upgrade header value was not websocket"),
Error::ConnectionNotUpgrade => f.write_str("connection header value was not upgrade"),
Error::UnsupportedWebSocketVersion => f.write_str("unsupported WebSocket version"),
Error::Parsing(e) => e.fmt(f),
Error::DidNotSwitchProtocols(status) => {
f.write_str("expected HTTP 101 Switching Protocols, got status code ")?;
f.write_fmt(format_args!("{status}"))
}
Error::WrongWebSocketAccept => f.write_str("mismatching Sec-WebSocket-Accept header"),
}
}
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
Error::MissingHeader(_)
| Error::UpgradeNotWebSocket
| Error::ConnectionNotUpgrade
| Error::UnsupportedWebSocketVersion
| Error::DidNotSwitchProtocols(_)
| Error::WrongWebSocketAccept => None,
Error::Parsing(e) => Some(e),
}
}
}
impl From<httparse::Error> for Error {
fn from(err: httparse::Error) -> Self {
Self::Parsing(err)
}
}
| rust | MIT | 618599d15e9ee2a9a7191d081a56d90eae4a811a | 2026-01-04T20:19:32.035255Z | false |
Gelbpunkt/tokio-websockets | https://github.com/Gelbpunkt/tokio-websockets/blob/618599d15e9ee2a9a7191d081a56d90eae4a811a/src/upgrade/server_response.rs | src/upgrade/server_response.rs | //! A [`Codec`] to perform a HTTP Upgrade handshake with a server and validate
//! the response.
use std::str::FromStr;
use base64::{Engine, engine::general_purpose::STANDARD};
use bytes::{Buf, BytesMut};
use http::{HeaderValue, StatusCode, header::HeaderName};
use httparse::{Header, Response};
use tokio_util::codec::Decoder;
use crate::{sha::digest, upgrade::Error};
/// HTTP status code for Switching Protocols.
const SWITCHING_PROTOCOLS: u16 = 101;
/// Find a header in an array of headers by name, ignoring ASCII case.
fn header<'a, 'header: 'a>(
headers: &'a [Header<'header>],
name: &'static str,
) -> Result<&'header [u8], Error> {
let header = headers
.iter()
.find(|header| header.name.eq_ignore_ascii_case(name))
.ok_or(Error::MissingHeader(name))?;
Ok(header.value)
}
/// [`Decoder`] for parsing the server's response to the client's HTTP
/// `Connection: Upgrade` request.
pub struct Codec {
/// The SHA-1 digest of the `Sec-WebSocket-Key` header.
ws_accept: [u8; 20],
}
impl Codec {
/// Returns a new [`Codec`].
///
/// The `key` parameter provides the string passed to the server via the
/// HTTP `Sec-WebSocket-Key` header.
#[must_use]
pub fn new(key: &[u8]) -> Self {
Self {
ws_accept: digest(key),
}
}
}
impl Decoder for Codec {
type Error = crate::Error;
type Item = super::Response;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
let mut headers = [httparse::EMPTY_HEADER; 25];
let mut response = Response::new(&mut headers);
let status = response.parse(src).map_err(Error::Parsing)?;
if !status.is_complete() {
return Ok(None);
}
let response_len = status.unwrap();
let code = response.code.unwrap();
if code != SWITCHING_PROTOCOLS {
return Err(crate::Error::Upgrade(Error::DidNotSwitchProtocols(code)));
}
let ws_accept_header = header(response.headers, "Sec-WebSocket-Accept")?;
let mut ws_accept = [0; 20];
STANDARD
.decode_slice_unchecked(ws_accept_header, &mut ws_accept)
.map_err(|_| Error::WrongWebSocketAccept)?;
if self.ws_accept != ws_accept {
return Err(crate::Error::Upgrade(Error::WrongWebSocketAccept));
}
let mut parsed_response = http::Response::new(());
*parsed_response.status_mut() =
StatusCode::from_u16(code).map_err(|_| Error::Parsing(httparse::Error::Status))?;
match response.version {
Some(0) => *parsed_response.version_mut() = http::Version::HTTP_10,
Some(1) => *parsed_response.version_mut() = http::Version::HTTP_11,
_ => Err(Error::Parsing(httparse::Error::Version))?,
}
let header_map = parsed_response.headers_mut();
header_map.reserve(response.headers.len());
for header in response.headers {
let name = HeaderName::from_str(header.name)
.map_err(|_| Error::Parsing(httparse::Error::HeaderName))?;
let value = HeaderValue::from_bytes(header.value)
.map_err(|_| Error::Parsing(httparse::Error::HeaderValue))?;
header_map.insert(name, value);
}
src.advance(response_len);
Ok(Some(parsed_response))
}
}
| rust | MIT | 618599d15e9ee2a9a7191d081a56d90eae4a811a | 2026-01-04T20:19:32.035255Z | false |
Gelbpunkt/tokio-websockets | https://github.com/Gelbpunkt/tokio-websockets/blob/618599d15e9ee2a9a7191d081a56d90eae4a811a/src/proto/stream.rs | src/proto/stream.rs | //! Frame aggregating abstraction over the low-level [`super::codec`]
//! implementation that provides [`futures_sink::Sink`] and
//! [`futures_core::Stream`] implementations that take [`Message`] as a
//! parameter.
use std::{
collections::VecDeque,
io::{self, IoSlice},
mem::{replace, take},
pin::Pin,
task::{Context, Poll, Waker, ready},
};
use bytes::{Buf, BytesMut};
use futures_core::Stream;
use futures_sink::Sink;
use tokio::io::{AsyncRead, AsyncWrite};
use tokio_util::{codec::FramedRead, io::poll_write_buf};
#[cfg(any(feature = "client", feature = "server"))]
use super::types::Role;
use super::{
Config, Limits,
codec::WebSocketProtocol,
types::{Frame, Message, OpCode, Payload, StreamState},
};
use crate::{CloseCode, Error};
/// Helper struct for storing a frame header, the header size and payload.
#[derive(Debug)]
struct EncodedFrame {
/// Encoded frame header and mask.
header: [u8; 14],
/// Potentially masked message payload, ready for writing to the I/O.
payload: Payload,
}
impl EncodedFrame {
/// Whether or not this frame is masked.
#[inline]
fn is_masked(&self) -> bool {
self.header[1] >> 7 != 0
}
/// Returns the length of the combined header and mask in bytes.
#[inline]
fn header_len(&self) -> usize {
let mask_bytes = if self.is_masked() { 4 } else { 0 };
match self.header[1] & 127 {
127 => 10 + mask_bytes,
126 => 4 + mask_bytes,
_ => 2 + mask_bytes,
}
}
/// Total length of the frame.
fn len(&self) -> usize {
self.header_len() + self.payload.len()
}
}
/// Queued up frames that are being sent.
#[derive(Debug)]
struct FrameQueue {
/// Queue of outgoing frames to send. Some parts of the first item may have
/// been sent already.
queue: VecDeque<EncodedFrame>,
/// Amount of partial bytes written of the first frame in the queue.
bytes_written: usize,
/// Total amount of bytes remaining to be sent in the frame queue.
pending_bytes: usize,
}
impl FrameQueue {
/// Creates a new, empty [`FrameQueue`].
#[cfg(any(feature = "client", feature = "server"))]
fn new() -> Self {
Self {
queue: VecDeque::with_capacity(1),
bytes_written: 0,
pending_bytes: 0,
}
}
/// Queue a frame to be sent.
fn push(&mut self, item: EncodedFrame) {
self.pending_bytes += item.len();
self.queue.push_back(item);
}
}
impl Buf for FrameQueue {
fn remaining(&self) -> usize {
self.pending_bytes
}
fn chunk(&self) -> &[u8] {
if let Some(frame) = self.queue.front() {
if self.bytes_written >= frame.header_len() {
unsafe {
frame
.payload
.get_unchecked(self.bytes_written - frame.header_len()..)
}
} else {
&frame.header[self.bytes_written..frame.header_len()]
}
} else {
&[]
}
}
fn advance(&mut self, mut cnt: usize) {
self.pending_bytes -= cnt;
cnt += self.bytes_written;
while cnt > 0 {
let item = self
.queue
.front()
.expect("advance called with too long count");
let item_len = item.len();
if cnt >= item_len {
self.queue.pop_front();
self.bytes_written = 0;
cnt -= item_len;
} else {
self.bytes_written = cnt;
return;
}
}
}
fn chunks_vectored<'a>(&'a self, dst: &mut [io::IoSlice<'a>]) -> usize {
let mut n = 0;
for (idx, frame) in self.queue.iter().enumerate() {
if n >= dst.len() {
break;
}
if idx == 0 {
if frame.header_len() > self.bytes_written {
dst[n] = IoSlice::new(&frame.header[self.bytes_written..frame.header_len()]);
n += 1;
}
if !frame.payload.is_empty() && n < dst.len() {
dst[n] = IoSlice::new(unsafe {
frame
.payload
.get_unchecked(self.bytes_written.saturating_sub(frame.header_len())..)
});
n += 1;
}
} else {
dst[n] = IoSlice::new(&frame.header[..frame.header_len()]);
n += 1;
if !frame.payload.is_empty() && n < dst.len() {
dst[n] = IoSlice::new(&frame.payload);
n += 1;
}
}
}
n
}
}
/// A WebSocket stream that full messages can be read from and written to.
///
/// The stream implements [`futures_sink::Sink`] and [`futures_core::Stream`].
///
/// You must use a [`ClientBuilder`] or [`ServerBuilder`] to
/// obtain a WebSocket stream.
///
/// For usage examples, see the top level crate documentation, which showcases a
/// simple echo server and client.
///
/// [`ClientBuilder`]: crate::ClientBuilder
/// [`ServerBuilder`]: crate::ServerBuilder
#[allow(clippy::module_name_repetitions)]
#[derive(Debug)]
pub struct WebSocketStream<T> {
/// The underlying stream using the [`WebSocketProtocol`] to read and write
/// full frames.
inner: FramedRead<T, WebSocketProtocol>,
/// Configuration for the stream.
config: Config,
/// The [`StreamState`] of the current stream.
state: StreamState,
/// Payload of the full message that is being assembled.
partial_payload: BytesMut,
/// Opcode of the full message that is being assembled.
partial_opcode: OpCode,
/// Buffer that outgoing frame headers are formatted into.
header_buf: [u8; 14],
/// Queue of outgoing frames to send.
frame_queue: FrameQueue,
/// Waker used for currently actively polling
/// [`WebSocketStream::poll_flush`] until completion.
flushing_waker: Option<Waker>,
}
impl<T> WebSocketStream<T>
where
T: AsyncRead + AsyncWrite + Unpin,
{
/// Create a new [`WebSocketStream`] from a raw stream.
#[cfg(any(feature = "client", feature = "server"))]
pub(crate) fn from_raw_stream(stream: T, role: Role, config: Config, limits: Limits) -> Self {
Self {
inner: FramedRead::new(stream, WebSocketProtocol::new(role, limits)),
config,
state: StreamState::Active,
partial_payload: BytesMut::new(),
partial_opcode: OpCode::Continuation,
header_buf: [0; 14],
frame_queue: FrameQueue::new(),
flushing_waker: None,
}
}
/// Create a new [`WebSocketStream`] from an existing [`FramedRead`]. This
/// allows for reusing the internal buffer of the [`FramedRead`] object.
#[cfg(any(feature = "client", feature = "server"))]
pub(crate) fn from_framed<U>(
framed: FramedRead<T, U>,
role: Role,
config: Config,
limits: Limits,
) -> Self {
Self {
inner: framed.map_decoder(|_| WebSocketProtocol::new(role, limits)),
config,
state: StreamState::Active,
partial_payload: BytesMut::new(),
partial_opcode: OpCode::Continuation,
header_buf: [0; 14],
frame_queue: FrameQueue::new(),
flushing_waker: None,
}
}
/// Returns a reference to the underlying I/O stream wrapped by this stream.
///
/// Care should be taken not to tamper with the stream of data to avoid
/// corrupting the stream of frames.
pub fn get_ref(&self) -> &T {
self.inner.get_ref()
}
/// Returns a mutable reference to the underlying I/O stream wrapped by this
/// stream.
///
/// Care should be taken not to tamper with the stream of data to avoid
/// corrupting the stream of frames.
pub fn get_mut(&mut self) -> &mut T {
self.inner.get_mut()
}
/// Returns a reference to the inner websocket limits.
pub fn limits(&self) -> &Limits {
&self.inner.decoder().limits
}
/// Returns a mutable reference to the inner websocket limits.
pub fn limits_mut(&mut self) -> &mut Limits {
&mut self.inner.decoder_mut().limits
}
/// Consumes the `WebSocketStream`, returning its underlying I/O stream.
pub fn into_inner(self) -> T {
self.inner.into_inner()
}
/// Attempt to pull out the next frame from the [`Framed`] this stream and
/// from that update the stream's internal state.
///
/// # Errors
///
/// This method returns an [`Error`] if reading from the stream fails or a
/// protocol violation is encountered.
fn poll_next_frame(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Result<Frame, Error>>> {
// In the case of Active or ClosedByUs, we want to receive more messages from
// the remote. In the case of ClosedByPeer, we have to flush to make sure our
// close acknowledge goes through.
if self.state == StreamState::CloseAcknowledged {
return Poll::Ready(None);
} else if self.state == StreamState::ClosedByPeer {
ready!(self.as_mut().poll_flush(cx))?;
self.state = StreamState::CloseAcknowledged;
return Poll::Ready(None);
}
// If there are pending items, try to flush the sink.
// Futures only store a single waker. If we use poll_flush(cx) here, the stored
// waker (i.e. usually that of the write task) is replaced with our waker (i.e.
// that of the read task) and our write task may never get woken up again. We
// circumvent this by not calling poll_flush at all if poll_flush is polled by
// another task at the moment.
if self.frame_queue.has_remaining() {
let waker = self.flushing_waker.clone();
_ = self.as_mut().poll_flush(&mut Context::from_waker(
waker.as_ref().unwrap_or(cx.waker()),
))?;
}
let frame = match ready!(Pin::new(&mut self.inner).poll_next(cx)) {
Some(Ok(frame)) => frame,
Some(Err(e)) => {
if matches!(e, Error::Io(_)) || self.state == StreamState::ClosedByUs {
self.state = StreamState::CloseAcknowledged;
} else {
self.state = StreamState::ClosedByPeer;
match &e {
Error::Protocol(e) => self.queue_frame(Frame::from(e)),
Error::PayloadTooLong { max_len, .. } => self.queue_frame(
Message::close(
Some(CloseCode::MESSAGE_TOO_BIG),
&format!("max length: {max_len}"),
)
.into(),
),
_ => {}
}
}
return Poll::Ready(Some(Err(e)));
}
None => return Poll::Ready(None),
};
match frame.opcode {
OpCode::Close => match self.state {
StreamState::Active => {
self.state = StreamState::ClosedByPeer;
let mut frame = frame.clone();
frame.payload.truncate(2);
self.queue_frame(frame);
}
StreamState::ClosedByPeer | StreamState::CloseAcknowledged => {
debug_assert!(false, "unexpected StreamState");
}
StreamState::ClosedByUs => {
self.state = StreamState::CloseAcknowledged;
}
},
OpCode::Ping if self.state == StreamState::Active => {
let mut frame = frame.clone();
frame.opcode = OpCode::Pong;
self.queue_frame(frame);
}
_ => {}
}
Poll::Ready(Some(Ok(frame)))
}
/// Masks and queues a frame for sending when [`poll_flush`] gets called.
fn queue_frame(
&mut self,
#[cfg_attr(not(feature = "client"), allow(unused_mut))] mut frame: Frame,
) {
if frame.opcode == OpCode::Close && self.state != StreamState::ClosedByPeer {
self.state = StreamState::ClosedByUs;
}
#[cfg_attr(not(feature = "client"), allow(unused_variables))]
let mask = frame.encode(&mut self.header_buf);
#[cfg(feature = "client")]
{
if self.inner.decoder().role == Role::Client {
let mut payload = BytesMut::from(frame.payload);
crate::rand::get_mask(mask);
// mask::frame will mutate the mask in-place, but we want to send the original
// mask. This is essentially a u32, so copying it is cheap and easier than
// special-casing this in the masking implementation.
// &mut *mask won't work, the compiler will optimize the deref/copy away
let mut mask_copy = *mask;
crate::mask::frame(&mut mask_copy, &mut payload);
frame.payload = Payload::from(payload);
self.header_buf[1] |= 1 << 7;
}
}
let item = EncodedFrame {
header: self.header_buf,
payload: frame.payload,
};
self.frame_queue.push(item);
}
/// Sets the waker that is currently flushing to a new one and does nothing
/// if the waker is the same.
fn set_flushing_waker(&mut self, waker: &Waker) {
if !self
.flushing_waker
.as_ref()
.is_some_and(|w| w.will_wake(waker))
{
self.flushing_waker = Some(waker.clone());
}
}
}
impl<T> Stream for WebSocketStream<T>
where
T: AsyncRead + AsyncWrite + Unpin,
{
type Item = Result<Message, Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let max_len = self.inner.decoder().limits.max_payload_len;
loop {
let (opcode, payload, fin) = match ready!(self.as_mut().poll_next_frame(cx)?) {
Some(frame) => (frame.opcode, frame.payload, frame.is_final),
None => return Poll::Ready(None),
};
let len = self.partial_payload.len() + payload.len();
if opcode != OpCode::Continuation {
if fin {
return Poll::Ready(Some(Ok(Message { opcode, payload })));
}
self.partial_opcode = opcode;
self.partial_payload = BytesMut::from(payload);
} else if len > max_len {
return Poll::Ready(Some(Err(Error::PayloadTooLong { len, max_len })));
} else {
self.partial_payload.extend_from_slice(&payload);
}
if fin {
break;
}
}
let opcode = replace(&mut self.partial_opcode, OpCode::Continuation);
let mut payload = Payload::from(take(&mut self.partial_payload));
payload.set_utf8_validated(opcode == OpCode::Text);
Poll::Ready(Some(Ok(Message { opcode, payload })))
}
}
// The tokio-util implementation of a sink uses a buffer which start_send
// appends to and poll_flush tries to write from. This makes sense, but comes
// with a hefty performance penalty when sending large payloads, since this adds
// a memmove from the payload to the buffer. We completely avoid that overhead
// by storing messages in a deque.
impl<T> Sink<Message> for WebSocketStream<T>
where
T: AsyncRead + AsyncWrite + Unpin,
{
type Error = Error;
fn poll_ready(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
// tokio-util calls poll_flush when more than 8096 bytes are pending, otherwise
// it returns Ready. We will just replicate that behavior
if self.frame_queue.remaining() >= self.config.flush_threshold {
self.as_mut().poll_flush(cx)
} else {
Poll::Ready(Ok(()))
}
}
fn start_send(mut self: Pin<&mut Self>, item: Message) -> Result<(), Self::Error> {
if self.state != StreamState::Active {
return Err(Error::AlreadyClosed);
}
if item.opcode.is_control() || item.payload.len() <= self.config.frame_size {
let frame: Frame = item.into();
self.queue_frame(frame);
} else {
// Chunk the message into frames
for frame in item.into_frames(self.config.frame_size) {
self.queue_frame(frame);
}
}
Ok(())
}
fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
// Borrow checker hacks... It needs this to understand that we can separately
// borrow the fields of the struct mutably
let this = self.get_mut();
let frame_queue = &mut this.frame_queue;
let io = this.inner.get_mut();
let flushing_waker = &mut this.flushing_waker;
while frame_queue.has_remaining() {
let n = match poll_write_buf(Pin::new(io), cx, frame_queue) {
Poll::Ready(Ok(n)) => n,
Poll::Ready(Err(e)) => {
*flushing_waker = None;
this.state = StreamState::CloseAcknowledged;
return Poll::Ready(Err(Error::Io(e)));
}
Poll::Pending => {
this.set_flushing_waker(cx.waker());
return Poll::Pending;
}
};
if n == 0 {
*flushing_waker = None;
this.state = StreamState::CloseAcknowledged;
return Poll::Ready(Err(Error::Io(io::ErrorKind::WriteZero.into())));
}
}
match Pin::new(io).poll_flush(cx) {
Poll::Ready(Ok(())) => {
*flushing_waker = None;
Poll::Ready(Ok(()))
}
Poll::Ready(Err(e)) => {
*flushing_waker = None;
this.state = StreamState::CloseAcknowledged;
Poll::Ready(Err(Error::Io(e)))
}
Poll::Pending => {
this.set_flushing_waker(cx.waker());
Poll::Pending
}
}
}
fn poll_close(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
if self.state == StreamState::Active {
self.queue_frame(Frame::DEFAULT_CLOSE);
}
while ready!(self.as_mut().poll_next(cx)).is_some() {}
ready!(self.as_mut().poll_flush(cx))?;
Pin::new(self.inner.get_mut())
.poll_shutdown(cx)
.map_err(Error::Io)
}
}
| rust | MIT | 618599d15e9ee2a9a7191d081a56d90eae4a811a | 2026-01-04T20:19:32.035255Z | false |
Gelbpunkt/tokio-websockets | https://github.com/Gelbpunkt/tokio-websockets/blob/618599d15e9ee2a9a7191d081a56d90eae4a811a/src/proto/codec.rs | src/proto/codec.rs | //! Implementation of a tokio-util [`Decoder`] for WebSocket
//! frames. The [`Encoder`] is a placeholder and unreachable, since tokio-util's
//! internal buffer used in the encoder comes with a hefty performance penalty
//! for large payloads due to the required memmove. Instead, we implement our
//! own, zero-copy implementation in the sink implementation of the
//! [`WebSocketStream`].
//!
//! [`WebSocketStream`]: super::WebSocketStream
use bytes::{Buf, BytesMut};
use tokio_util::codec::Decoder;
use super::types::{Frame, Limits, OpCode, Role};
use crate::{
CloseCode, Error, Payload, mask,
proto::ProtocolError,
utf8::{self, Validator},
};
/// Maximum size of a frame header (2 + 8 + 4).
const MAX_FRAME_HEADER_SIZE: usize = 14;
/// The actual implementation of the WebSocket byte-level protocol.
/// It provides a [`Decoder`] for single frames that must be assembled by a
/// client such as the [`WebSocketStream`] later.
///
/// [`WebSocketStream`]: super::WebSocketStream
#[derive(Debug)]
pub(super) struct WebSocketProtocol {
/// The [`Role`] this implementation should assume for the stream.
pub(super) role: Role,
/// The [`Limits`] imposed on this stream.
pub(super) limits: Limits,
/// Opcode of the full message.
fragmented_message_opcode: OpCode,
/// Index up to which the payload was processed (unmasked and validated).
payload_processed: usize,
/// UTF-8 validator.
validator: Validator,
}
impl WebSocketProtocol {
/// Creates a new WebSocket codec.
#[cfg(any(feature = "client", feature = "server"))]
pub(super) fn new(role: Role, limits: Limits) -> Self {
Self {
role,
limits,
fragmented_message_opcode: OpCode::Continuation,
payload_processed: 0,
validator: Validator::new(),
}
}
}
/// Macro that gets a range of a buffer. It returns `Ok(None)` and reserves
/// missing capacity of the buffer if it is too small.
macro_rules! get_buf_if_space {
($buf:expr, $range:expr) => {
if let Some(cont) = $buf.get($range) {
cont
} else {
$buf.reserve(MAX_FRAME_HEADER_SIZE - $range.len());
return Ok(None);
}
};
}
impl Decoder for WebSocketProtocol {
type Error = Error;
type Item = Frame;
#[allow(clippy::cast_possible_truncation, clippy::too_many_lines)]
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
// Opcode and payload length must be present
let first_two_bytes = get_buf_if_space!(src, 0..2);
// Bit 0
let fin = first_two_bytes[0] >> 7 != 0;
// Bits 1-3
let rsv = first_two_bytes[0] & 0x70;
if rsv != 0 {
return Err(Error::Protocol(ProtocolError::InvalidRsv));
}
// Bits 4-7
let opcode = OpCode::try_from(first_two_bytes[0] & 0xF)?;
if opcode.is_control() {
if !fin {
return Err(Error::Protocol(ProtocolError::FragmentedControlFrame));
}
} else if self.fragmented_message_opcode == OpCode::Continuation {
if opcode == OpCode::Continuation {
return Err(Error::Protocol(ProtocolError::InvalidOpcode));
}
} else if opcode != OpCode::Continuation {
return Err(Error::Protocol(ProtocolError::InvalidOpcode));
}
// Bit 0
let masked = first_two_bytes[1] >> 7 != 0;
if masked && self.role == Role::Client {
return Err(Error::Protocol(ProtocolError::UnexpectedMaskedFrame));
} else if !masked && self.role == Role::Server {
return Err(Error::Protocol(ProtocolError::UnexpectedUnmaskedFrame));
}
// Bits 1-7
let mut payload_length = (first_two_bytes[1] & 127) as usize;
let mut offset = 2;
// Close frames must be at least 2 bytes in length
if opcode == OpCode::Close && payload_length == 1 {
return Err(Error::Protocol(ProtocolError::InvalidPayloadLength));
} else if payload_length > 125 {
if opcode.is_control() {
return Err(Error::Protocol(ProtocolError::InvalidPayloadLength));
}
if payload_length == 126 {
// A conversion from 2 u8s to a u16 cannot fail
payload_length =
u16::from_be_bytes(get_buf_if_space!(src, 2..4).try_into().unwrap()) as usize;
if payload_length <= 125 {
return Err(Error::Protocol(ProtocolError::InvalidPayloadLength));
}
offset = 4;
} else if payload_length == 127 {
// A conversion from 8 u8s to a u64 cannot fail
payload_length =
u64::from_be_bytes(get_buf_if_space!(src, 2..10).try_into().unwrap()) as usize;
if u16::try_from(payload_length).is_ok() {
return Err(Error::Protocol(ProtocolError::InvalidPayloadLength));
}
offset = 10;
} else {
debug_assert!(false, "7 bit value expected to be <= 127");
}
}
if payload_length > self.limits.max_payload_len {
return Err(Error::PayloadTooLong {
len: payload_length,
max_len: self.limits.max_payload_len,
});
}
if masked {
offset += 4;
if src.len() < offset {
src.reserve(MAX_FRAME_HEADER_SIZE - 4);
return Ok(None);
}
}
if payload_length != 0 {
let is_text = opcode == OpCode::Text
|| (opcode == OpCode::Continuation
&& self.fragmented_message_opcode == OpCode::Text);
let payload_available = (src.len() - offset).min(payload_length);
let is_complete = payload_available == payload_length;
let payload = if masked && (is_complete || is_text) {
let (l, r) = unsafe { src.split_at_mut_unchecked(offset) };
let mask = unsafe { l.get_unchecked_mut(l.len() - 4..).try_into().unwrap() };
// SAFETY: self.payload_processed <= payload_length
let payload =
unsafe { r.get_unchecked_mut(self.payload_processed..payload_available) };
mask::frame(mask, payload);
payload
} else {
// SAFETY: self.payload_processed <= payload_length
unsafe {
src.get_unchecked_mut(
offset + self.payload_processed..offset + payload_available,
)
}
};
if is_text {
self.validator.feed(payload, is_complete && fin)?;
self.payload_processed = payload_available;
}
if !is_complete {
src.reserve(payload_length - payload_available);
return Ok(None);
}
if opcode == OpCode::Close {
// SAFETY: Close frames with a non-zero payload length are validated to not have
// a length of 1
// A conversion from two u8s to a u16 cannot fail
let code = CloseCode::try_from(u16::from_be_bytes(unsafe {
src.get_unchecked(offset..offset + 2).try_into().unwrap()
}))?;
if code.is_reserved() {
return Err(Error::Protocol(ProtocolError::InvalidCloseCode));
}
// SAFETY: payload_length <= src.len()
let _reason = utf8::parse_str(unsafe {
src.get_unchecked(offset + 2..offset + payload_length)
})?;
}
}
// Advance the offset into the payload body
src.advance(offset);
// Take the payload
let mut payload = Payload::from(src.split_to(payload_length));
payload.set_utf8_validated(opcode == OpCode::Text && fin);
// It is possible to receive intermediate control frames between a large other
// frame. We therefore can't simply reset the fragmented opcode after we receive
// a "final" frame.
if (fin && opcode == OpCode::Continuation) || (!fin && opcode != OpCode::Continuation) {
// Full chunked message received (and opcode is Continuation)
// or first frame of a multi-frame message received
self.fragmented_message_opcode = opcode;
}
// In all other cases, we have either a continuation or control frame, neither
// of which change change the opcode being assembled
self.payload_processed = 0;
Ok(Some(Frame {
opcode,
payload,
is_final: fin,
}))
}
}
| rust | MIT | 618599d15e9ee2a9a7191d081a56d90eae4a811a | 2026-01-04T20:19:32.035255Z | false |
Gelbpunkt/tokio-websockets | https://github.com/Gelbpunkt/tokio-websockets/blob/618599d15e9ee2a9a7191d081a56d90eae4a811a/src/proto/error.rs | src/proto/error.rs | //! WebSocket protocol error type.
use std::fmt;
/// Error encountered on protocol violations by the other end of the connection.
#[allow(clippy::module_name_repetitions)]
#[derive(Debug)]
#[non_exhaustive]
pub enum ProtocolError {
/// A fragmented control frame was received.
FragmentedControlFrame,
/// An invalid close code has been received.
InvalidCloseCode,
/// An invalid opcode was received.
InvalidOpcode,
/// An invalid payload length was received.
InvalidPayloadLength,
/// An invalid RSV was received. This is used by extensions, which are
/// currently unsupported.
InvalidRsv,
/// An invalid UTF-8 segment was received when valid UTF-8 was expected.
InvalidUtf8,
/// A masked frame was unexpectedly received.
UnexpectedMaskedFrame,
/// An unmasked frame was unexpectedly received.
UnexpectedUnmaskedFrame,
}
impl ProtocolError {
/// Stringify this variant.
pub(super) const fn as_str(&self) -> &'static str {
match self {
ProtocolError::FragmentedControlFrame => "fragmented control frame",
ProtocolError::InvalidCloseCode => "invalid close code",
ProtocolError::InvalidOpcode => "invalid opcode",
ProtocolError::InvalidPayloadLength => "invalid payload length",
ProtocolError::InvalidRsv => "invalid extension",
ProtocolError::InvalidUtf8 => "invalid utf-8",
ProtocolError::UnexpectedMaskedFrame => "unexpected masked frame",
ProtocolError::UnexpectedUnmaskedFrame => "unexpected unmasked frame",
}
}
}
impl fmt::Display for ProtocolError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
impl std::error::Error for ProtocolError {}
| rust | MIT | 618599d15e9ee2a9a7191d081a56d90eae4a811a | 2026-01-04T20:19:32.035255Z | false |
Gelbpunkt/tokio-websockets | https://github.com/Gelbpunkt/tokio-websockets/blob/618599d15e9ee2a9a7191d081a56d90eae4a811a/src/proto/types.rs | src/proto/types.rs | //! Types required for the WebSocket protocol implementation.
use std::{fmt, mem::replace, num::NonZeroU16, ops::Deref};
use bytes::{BufMut, Bytes, BytesMut};
use super::error::ProtocolError;
use crate::utf8;
/// The opcode of a WebSocket frame. It denotes the type of the frame or an
/// assembled message.
///
/// A fully assembled [`Message`] will never have a continuation opcode.
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub(super) enum OpCode {
/// A continuation opcode. This will never be encountered in a full
/// [`Message`].
Continuation,
/// A text opcode.
Text,
/// A binary opcode.
Binary,
/// A close opcode.
Close,
/// A ping opcode.
Ping,
/// A pong opcode.
Pong,
}
impl OpCode {
/// Whether this is a control opcode (i.e. close, ping or pong).
pub(super) fn is_control(self) -> bool {
matches!(self, Self::Close | Self::Ping | Self::Pong)
}
}
impl TryFrom<u8> for OpCode {
type Error = ProtocolError;
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(Self::Continuation),
1 => Ok(Self::Text),
2 => Ok(Self::Binary),
8 => Ok(Self::Close),
9 => Ok(Self::Ping),
10 => Ok(Self::Pong),
_ => Err(ProtocolError::InvalidOpcode),
}
}
}
impl From<OpCode> for u8 {
fn from(value: OpCode) -> Self {
match value {
OpCode::Continuation => 0,
OpCode::Text => 1,
OpCode::Binary => 2,
OpCode::Close => 8,
OpCode::Ping => 9,
OpCode::Pong => 10,
}
}
}
/// Close status code.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct CloseCode(NonZeroU16);
// rustfmt reorders these alphabetically
#[rustfmt::skip]
impl CloseCode {
/// Normal closure, meaning that the purpose for which the connection was
/// established has been fulfilled.
pub const NORMAL_CLOSURE: Self = Self::constant(1000);
/// Endpoint is "going away", such as a server going down or a browser
/// having navigated away from a page.
pub const GOING_AWAY: Self = Self::constant(1001);
/// Endpoint is terminating the connection due to a protocol error.
pub const PROTOCOL_ERROR: Self = Self::constant(1002);
/// Endpoint is terminating the connection because it has received a type of
/// data it cannot accept.
pub const UNSUPPORTED_DATA: Self = Self::constant(1003);
/// No status code was actually present.
pub const NO_STATUS_RECEIVED: Self = Self::constant(1005);
/// Endpoint is terminating the connection because it has received data
/// within a message that was not consistent with the type of the message.
pub const INVALID_FRAME_PAYLOAD_DATA: Self = Self::constant(1007);
/// Endpoint is terminating the connection because it has received a message
/// that violates its policy.
pub const POLICY_VIOLATION: Self = Self::constant(1008);
/// Endpoint is terminating the connection because it has received a message
/// that is too big for it to process.
pub const MESSAGE_TOO_BIG: Self = Self::constant(1009);
/// Client is terminating the connection because it has expected the server
/// to negotiate one or more extension, but the server didn't return them in
/// the response message of the WebSocket handshake.
pub const MANDATORY_EXTENSION: Self = Self::constant(1010);
/// Server is terminating the connection because it encountered an
/// unexpected condition that prevented it from fulfilling the request.
pub const INTERNAL_SERVER_ERROR: Self = Self::constant(1011);
/// Service is restarted. A client may reconnect, and if it choses to do,
/// should reconnect using a randomized delay of 5--30s.
pub const SERVICE_RESTART: Self = Self::constant(1012);
/// Service is experiencing overload. A client should only connect to a
/// different IP (when there are multiple for the target) or reconnect to
/// the same IP upon user action.
pub const SERVICE_OVERLOAD: Self = Self::constant(1013);
/// The server was acting as a gateway or proxy and received an invalid
/// response from the upstream server. This is similar to the HTTP 502
/// status code.
pub const BAD_GATEWAY: Self = Self::constant(1014);
}
impl CloseCode {
/// Try to construct [`CloseCode`] from `u16`
///
/// Returns `None` if `code` is not a valid `CloseCode`
const fn try_from_u16(code: u16) -> Option<Self> {
match code {
1000..=1015 | 3000..=4999 => Some(Self(NonZeroU16::new(code).unwrap())),
0..=999 | 1016..=2999 | 5000..=u16::MAX => None,
}
}
/// Try to construct [`CloseCode`] from `u16`
///
/// Panics if `code` is not a valid `CloseCode`
const fn constant(code: u16) -> Self {
Self::try_from_u16(code).unwrap()
}
/// Whether the close code is reserved and cannot be sent over the wire.
#[must_use]
pub fn is_reserved(self) -> bool {
match self.0.get() {
1004 | 1005 | 1006 | 1015 => true,
1000..=4999 => false,
// `TryFrom` is the only way to acquire self and it errors for these values
0..=999 | 5000..=u16::MAX => {
debug_assert!(false, "unexpected CloseCode");
false
}
}
}
}
impl From<CloseCode> for u16 {
fn from(value: CloseCode) -> Self {
value.0.get()
}
}
impl TryFrom<u16> for CloseCode {
type Error = ProtocolError;
fn try_from(value: u16) -> Result<Self, Self::Error> {
Self::try_from_u16(value).ok_or(ProtocolError::InvalidCloseCode)
}
}
/// The websocket message payload storage.
///
/// Payloads can be created by using the `From<T>` implementations.
///
/// Sending the payloads or calling [`Into<BytesMut>`] is zero-copy, except when
/// sending a payload created from a static slice or when the payload buffer is
/// not unique. All conversions to other types are zero-cost.
///
/// [`Into<BytesMut>`]: #impl-From<Payload>-for-BytesMut
#[derive(Clone)]
pub struct Payload {
/// The raw payload data.
data: Bytes,
/// Whether the payload data was validated to be valid UTF-8.
utf8_validated: bool,
}
impl Payload {
/// Creates a new shared `Payload` from a static slice.
const fn from_static(bytes: &'static [u8]) -> Self {
Self {
data: Bytes::from_static(bytes),
utf8_validated: false,
}
}
/// Marks whether the payload contents were validated to be valid UTF-8.
pub(super) fn set_utf8_validated(&mut self, value: bool) {
self.utf8_validated = value;
}
/// Shortens the buffer, keeping the first `len` bytes and dropping the
/// rest.
pub(super) fn truncate(&mut self, len: usize) {
self.data.truncate(len);
}
/// Splits the buffer into two at the given index.
fn split_to(&mut self, at: usize) -> Self {
// This is only used by the outgoing message frame iterator, so we do not care
// about the value of utf8_validated. For the sake of correctness (in case we
// split a utf8 codepoint), we set it to false.
self.utf8_validated = false;
Self {
data: self.data.split_to(at),
utf8_validated: false,
}
}
}
impl Deref for Payload {
type Target = [u8];
fn deref(&self) -> &Self::Target {
&self.data
}
}
impl fmt::Debug for Payload {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("Payload").field(&self.data).finish()
}
}
impl From<Bytes> for Payload {
fn from(value: Bytes) -> Self {
Self {
data: value,
utf8_validated: false,
}
}
}
impl From<BytesMut> for Payload {
fn from(value: BytesMut) -> Self {
Self {
data: value.freeze(),
utf8_validated: false,
}
}
}
impl From<Payload> for Bytes {
fn from(value: Payload) -> Self {
value.data
}
}
impl From<Payload> for BytesMut {
fn from(value: Payload) -> Self {
value.data.into()
}
}
impl From<Vec<u8>> for Payload {
fn from(value: Vec<u8>) -> Self {
// BytesMut::from_iter goes through a specialization in std if the iterator is a
// Vec, effectively allowing us to use BytesMut::from_vec which isn't
// exposed in bytes. See https://github.com/tokio-rs/bytes/issues/723 for details.
Self {
data: BytesMut::from_iter(value).freeze(),
utf8_validated: false,
}
}
}
impl From<String> for Payload {
fn from(value: String) -> Self {
// See From<Vec<u8>> impl for reasoning behind this.
Self {
data: BytesMut::from_iter(value.into_bytes()).freeze(),
utf8_validated: true,
}
}
}
impl From<&'static [u8]> for Payload {
fn from(value: &'static [u8]) -> Self {
Self {
data: Bytes::from_static(value),
utf8_validated: false,
}
}
}
impl From<&'static str> for Payload {
fn from(value: &'static str) -> Self {
Self {
data: Bytes::from_static(value.as_bytes()),
utf8_validated: true,
}
}
}
/// A WebSocket message. This is cheaply clonable and uses [`Payload`] as the
/// payload storage underneath.
///
/// Received messages are always validated prior to dealing with them, so all
/// the type casting methods are either almost or fully zero cost.
#[derive(Debug, Clone)]
pub struct Message {
/// The [`OpCode`] of the message.
pub(super) opcode: OpCode,
/// The payload of the message.
pub(super) payload: Payload,
}
impl Message {
/// Create a new text message. The payload contents must be valid UTF-8.
#[must_use]
pub fn text<P: Into<Payload>>(payload: P) -> Self {
Self {
opcode: OpCode::Text,
payload: payload.into(),
}
}
/// Create a new binary message.
#[must_use]
pub fn binary<P: Into<Payload>>(payload: P) -> Self {
Self {
opcode: OpCode::Binary,
payload: payload.into(),
}
}
/// Create a new close message. If an non-empty reason is specified, a
/// [`CloseCode`] must be specified for it to be included.
///
/// # Panics
/// - If the `code` is reserved so it cannot be sent.
/// - If `code` is present and the `reason` exceeds 123 bytes, the
/// protocol-imposed limit.
#[must_use]
#[track_caller]
pub fn close(code: Option<CloseCode>, reason: &str) -> Self {
let mut payload = BytesMut::with_capacity((2 + reason.len()) * usize::from(code.is_some()));
if let Some(code) = code {
assert!(!code.is_reserved());
payload.put_u16(code.into());
assert!(reason.len() <= 123);
payload.extend_from_slice(reason.as_bytes());
}
Self {
opcode: OpCode::Close,
payload: payload.into(),
}
}
/// Create a new ping message.
///
/// # Panics
/// If the payload exceeds 125 bytes, the protocol-imposed limit.
#[must_use]
#[track_caller]
pub fn ping<P: Into<Payload>>(payload: P) -> Self {
let payload = payload.into();
assert!(payload.len() <= 125);
Self {
opcode: OpCode::Ping,
payload,
}
}
/// Create a new pong message.
///
/// # Panics
/// If the payload exceeds 125 bytes, the protocol-imposed limit.
#[must_use]
#[track_caller]
pub fn pong<P: Into<Payload>>(payload: P) -> Self {
let payload = payload.into();
assert!(payload.len() <= 125);
Self {
opcode: OpCode::Pong,
payload,
}
}
/// Whether the message is a text message.
#[must_use]
pub fn is_text(&self) -> bool {
self.opcode == OpCode::Text
}
/// Whether the message is a binary message.
#[must_use]
pub fn is_binary(&self) -> bool {
self.opcode == OpCode::Binary
}
/// Whether the message is a close message.
#[must_use]
pub fn is_close(&self) -> bool {
self.opcode == OpCode::Close
}
/// Whether the message is a ping message.
#[must_use]
pub fn is_ping(&self) -> bool {
self.opcode == OpCode::Ping
}
/// Whether the message is a pong message.
#[must_use]
pub fn is_pong(&self) -> bool {
self.opcode == OpCode::Pong
}
/// Returns the message payload and consumes the message, regardless of
/// type.
#[must_use]
pub fn into_payload(self) -> Payload {
self.payload
}
/// Returns a reference to the message payload, regardless of message type.
pub fn as_payload(&self) -> &Payload {
&self.payload
}
/// Returns a reference to the message payload as a string if it is a text
/// message.
///
/// # Panics
///
/// This method will panic when the message was created via
/// [`Message::text`] with invalid UTF-8.
pub fn as_text(&self) -> Option<&str> {
// SAFETY: Received messages were validated to be valid UTF-8, otherwise
// we check if it is valid UTF-8.
(self.opcode == OpCode::Text).then(|| {
assert!(
self.payload.utf8_validated || utf8::parse_str(&self.payload).is_ok(),
"called as_text on message created from payload with invalid utf-8"
);
unsafe { std::str::from_utf8_unchecked(&self.payload) }
})
}
/// Returns the [`CloseCode`] and close reason if the message is a close
/// message.
pub fn as_close(&self) -> Option<(CloseCode, &str)> {
(self.opcode == OpCode::Close).then(|| {
let code = if self.payload.is_empty() {
CloseCode::NO_STATUS_RECEIVED
} else {
// SAFETY: Opcode is Close with a non-empty payload so it's at least 2 bytes
// long
unsafe {
CloseCode::try_from(u16::from_be_bytes(
self.payload
.get_unchecked(0..2)
.try_into()
.unwrap_unchecked(),
))
.unwrap_unchecked()
}
};
// SAFETY: Opcode is Close so the rest of the payload is valid UTF-8
let reason =
unsafe { std::str::from_utf8_unchecked(self.payload.get(2..).unwrap_or_default()) };
(code, reason)
})
}
/// Returns an iterator over frames of `frame_size` length to split this
/// message into.
pub(super) fn into_frames(self, frame_size: usize) -> MessageFrames {
MessageFrames {
frame_size,
payload: self.payload,
opcode: self.opcode,
}
}
}
/// Iterator over frames of a chunked message.
pub(super) struct MessageFrames {
/// Iterator over payload chunks.
frame_size: usize,
/// The full message payload this iterates over.
payload: Payload,
/// Opcode for the next frame.
opcode: OpCode,
}
impl Iterator for MessageFrames {
type Item = Frame;
fn next(&mut self) -> Option<Self::Item> {
let is_empty = self.payload.is_empty() && self.opcode == OpCode::Continuation;
(!is_empty).then(|| {
let payload = self
.payload
.split_to(self.frame_size.min(self.payload.len()));
Frame {
opcode: replace(&mut self.opcode, OpCode::Continuation),
is_final: self.payload.is_empty(),
payload,
}
})
}
}
/// Configuration for limitations on reading of [`Message`]s from a
/// [`WebSocketStream`] to prevent high memory usage caused by malicious actors.
///
/// [`WebSocketStream`]: super::WebSocketStream
#[derive(Debug, Clone, Copy)]
pub struct Limits {
/// The maximum allowed payload length. The default is 64 MiB.
pub(super) max_payload_len: usize,
}
impl Limits {
/// A limit configuration without any limits.
#[must_use]
pub fn unlimited() -> Self {
Self {
max_payload_len: usize::MAX,
}
}
/// Sets the maximum allowed payload length. `None` equals no limit.
///
/// The default is 64 MiB.
#[must_use]
pub fn max_payload_len(mut self, size: Option<usize>) -> Self {
self.set_max_payload_len(size);
self
}
/// See [`max_payload_len`](Self::max_payload_len).
pub fn set_max_payload_len(&mut self, size: Option<usize>) {
self.max_payload_len = size.unwrap_or(usize::MAX);
}
}
impl Default for Limits {
fn default() -> Self {
Self {
max_payload_len: 64 * 1024 * 1024,
}
}
}
/// Low-level configuration for a [`WebSocketStream`] that allows configuring
/// behavior for sending and receiving messages.
///
/// [`WebSocketStream`]: super::WebSocketStream
#[derive(Debug, Clone, Copy)]
pub struct Config {
/// Frame payload size to split outgoing messages into.
///
/// Consider decreasing this if the remote imposes a limit on the frame
/// payload size. The default is 4MiB.
pub(super) frame_size: usize,
/// Threshold of queued up bytes after which the underlying I/O is flushed
/// before the sink is declared ready. The default is 8 KiB.
pub(super) flush_threshold: usize,
}
impl Config {
/// Set the frame payload size to split outgoing messages into.
///
/// Consider decreasing this if the remote imposes a limit on the frame
/// payload size. The default is 4MiB.
///
/// # Panics
///
/// If `frame_size` is `0`.
#[must_use]
pub fn frame_size(mut self, frame_size: usize) -> Self {
assert_ne!(frame_size, 0, "frame_size must be non-zero");
self.frame_size = frame_size;
self
}
/// Sets the threshold of queued up bytes after which the underlying I/O is
/// flushed before the sink is declared ready. The default is 8 KiB.
#[must_use]
pub fn flush_threshold(mut self, threshold: usize) -> Self {
self.flush_threshold = threshold;
self
}
}
impl Default for Config {
fn default() -> Self {
Self {
frame_size: 4 * 1024 * 1024,
flush_threshold: 8 * 1024,
}
}
}
/// Role assumed by the [`WebSocketStream`] in a connection.
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub(crate) enum Role {
/// The client end.
Client,
/// The server end.
Server,
}
/// The connection state of the stream.
#[derive(Debug, PartialEq)]
pub(super) enum StreamState {
/// The connection is fully active and no close has been initiated.
Active,
/// The connection has been closed by the peer, but not yet acknowledged by
/// us.
ClosedByPeer,
/// The connection has been closed by us, but not yet acknowledged.
ClosedByUs,
/// The close has been acknowledged by the end that did not initiate the
/// close.
CloseAcknowledged,
}
/// A frame of a WebSocket [`Message`].
#[derive(Clone, Debug)]
pub(super) struct Frame {
/// The [`OpCode`] of the frame.
pub opcode: OpCode,
/// Whether this is the last frame of a message.
pub is_final: bool,
/// The payload bytes of the frame.
pub payload: Payload,
}
impl Frame {
/// Default close frame.
#[allow(clippy::declare_interior_mutable_const)]
pub const DEFAULT_CLOSE: Self = Self {
opcode: OpCode::Close,
is_final: true,
payload: Payload::from_static(&CloseCode::NORMAL_CLOSURE.0.get().to_be_bytes()),
};
/// Encode the frame head into `out`, returning a subslice where the mask
/// should be written to.
pub fn encode<'a>(&self, out: &'a mut [u8; 14]) -> &'a mut [u8; 4] {
out[0] = (u8::from(self.is_final) << 7) | u8::from(self.opcode);
let mask_slice = if u16::try_from(self.payload.len()).is_err() {
out[1] = 127;
let len = u64::try_from(self.payload.len()).unwrap();
out[2..10].copy_from_slice(&len.to_be_bytes());
&mut out[10..14]
} else if self.payload.len() > 125 {
out[1] = 126;
let len = u16::try_from(self.payload.len()).expect("checked by previous branch");
out[2..4].copy_from_slice(&len.to_be_bytes());
&mut out[4..8]
} else {
out[1] = u8::try_from(self.payload.len()).expect("checked by previous branch");
&mut out[2..6]
};
mask_slice.try_into().unwrap()
}
}
impl From<Message> for Frame {
fn from(value: Message) -> Self {
Self {
opcode: value.opcode,
is_final: true,
payload: value.payload,
}
}
}
impl From<&ProtocolError> for Frame {
fn from(val: &ProtocolError) -> Self {
match val {
ProtocolError::InvalidUtf8 => {
Message::close(Some(CloseCode::INVALID_FRAME_PAYLOAD_DATA), "invalid utf8")
}
_ => Message::close(Some(CloseCode::PROTOCOL_ERROR), val.as_str()),
}
.into()
}
}
| rust | MIT | 618599d15e9ee2a9a7191d081a56d90eae4a811a | 2026-01-04T20:19:32.035255Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.