repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1 value | license stringclasses 7 values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2 classes |
|---|---|---|---|---|---|---|---|---|
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-rpc/src/lib.rs | lapce-rpc/src/lib.rs | #![allow(clippy::manual_clamp)]
pub mod buffer;
pub mod core;
pub mod counter;
pub mod dap_types;
pub mod file;
pub mod file_line;
mod parse;
pub mod plugin;
pub mod proxy;
pub mod source_control;
pub mod stdio;
pub mod style;
pub mod terminal;
pub use parse::{Call, RequestId, RpcObject};
use serde::{Deserialize, Serialize};
pub use stdio::stdio_transport;
#[derive(Debug)]
pub enum RpcMessage<Req, Notif, Resp> {
Request(RequestId, Req),
Response(RequestId, Resp),
Notification(Notif),
Error(RequestId, RpcError),
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RpcError {
pub code: i64,
pub message: String,
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-rpc/src/source_control.rs | lapce-rpc/src/source_control.rs | use std::path::PathBuf;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)]
pub struct DiffInfo {
pub head: String,
pub branches: Vec<String>,
pub tags: Vec<String>,
pub diffs: Vec<FileDiff>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)]
pub enum FileDiff {
Modified(PathBuf),
Added(PathBuf),
Deleted(PathBuf),
Renamed(PathBuf, PathBuf),
}
impl FileDiff {
pub fn path(&self) -> &PathBuf {
match &self {
FileDiff::Modified(p)
| FileDiff::Added(p)
| FileDiff::Deleted(p)
| FileDiff::Renamed(_, p) => p,
}
}
pub fn kind(&self) -> FileDiffKind {
match self {
FileDiff::Modified(_) => FileDiffKind::Modified,
FileDiff::Added(_) => FileDiffKind::Added,
FileDiff::Deleted(_) => FileDiffKind::Deleted,
FileDiff::Renamed(_, _) => FileDiffKind::Renamed,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum FileDiffKind {
Modified,
Added,
Deleted,
Renamed,
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-rpc/src/parse.rs | lapce-rpc/src/parse.rs | use anyhow::{Result, anyhow};
use serde::de::DeserializeOwned;
use serde_json::Value;
#[derive(Debug, Clone)]
pub struct RpcObject(pub Value);
pub type RequestId = u64;
#[derive(Debug, Clone, PartialEq, Eq)]
/// An RPC call, which may be either a notification or a request.
pub enum Call<N, R> {
/// An id and an RPC Request
Request(RequestId, R),
/// An RPC Notification
Notification(N),
}
impl RpcObject {
pub fn get_id(&self) -> Option<RequestId> {
self.0.get("id").and_then(Value::as_u64)
}
pub fn is_response(&self) -> bool {
self.0.get("id").is_some() && self.0.get("method").is_none()
}
pub fn into_rpc<N, R>(self) -> Result<Call<N, R>>
where
N: DeserializeOwned,
R: DeserializeOwned,
{
let id = self.get_id();
match id {
Some(id) => match serde_json::from_value::<R>(self.0) {
Ok(resp) => Ok(Call::Request(id, resp)),
Err(err) => Err(anyhow!(err)),
},
None => {
let result = serde_json::from_value::<N>(self.0)?;
Ok(Call::Notification(result))
}
}
}
pub fn into_response(mut self) -> Result<Result<Value, Value>, String> {
let _ = self
.get_id()
.ok_or_else(|| "Response requires 'id' field.".to_string())?;
if self.0.get("result").is_some() == self.0.get("error").is_some() {
return Err("RPC response must contain exactly one of\
'error' or 'result' fields."
.into());
}
let result = self.0.as_object_mut().and_then(|obj| obj.remove("result"));
match result {
Some(r) => Ok(Ok(r)),
None => {
let error = self
.0
.as_object_mut()
.and_then(|obj| obj.remove("error"))
.unwrap();
Ok(Err(error))
}
}
}
}
impl From<Value> for RpcObject {
fn from(v: Value) -> RpcObject {
RpcObject(v)
}
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-rpc/src/stdio.rs | lapce-rpc/src/stdio.rs | use std::{
io::{self, BufRead, Write},
thread,
};
use anyhow::Result;
use crossbeam_channel::{Receiver, Sender};
use serde::{Serialize, de::DeserializeOwned};
use serde_json::{Value, json};
use crate::{RpcError, RpcMessage, RpcObject};
pub fn stdio_transport<W, R, Req1, Notif1, Resp1, Req2, Notif2, Resp2>(
mut writer: W,
writer_receiver: Receiver<RpcMessage<Req2, Notif2, Resp2>>,
mut reader: R,
reader_sender: Sender<RpcMessage<Req1, Notif1, Resp1>>,
) where
W: 'static + Write + Send,
R: 'static + BufRead + Send,
Req1: 'static + Serialize + DeserializeOwned + Send + Sync,
Notif1: 'static + Serialize + DeserializeOwned + Send + Sync,
Resp1: 'static + Serialize + DeserializeOwned + Send + Sync,
Req2: 'static + Serialize + DeserializeOwned + Send + Sync,
Notif2: 'static + Serialize + DeserializeOwned + Send + Sync,
Resp2: 'static + Serialize + DeserializeOwned + Send + Sync,
{
thread::spawn(move || {
for value in writer_receiver {
if write_msg(&mut writer, value).is_err() {
return;
};
}
});
thread::spawn(move || -> Result<()> {
loop {
if let Some(msg) = read_msg(&mut reader)? {
reader_sender.send(msg)?;
}
}
});
}
pub fn write_msg<W, Req, Notif, Resp>(
out: &mut W,
msg: RpcMessage<Req, Notif, Resp>,
) -> io::Result<()>
where
W: Write,
Req: Serialize,
Notif: Serialize,
Resp: Serialize,
{
let value = match msg {
RpcMessage::Request(id, req) => {
let mut msg = serde_json::to_value(&req)?;
msg.as_object_mut()
.ok_or(io::ErrorKind::NotFound)?
.insert("id".into(), id.into());
msg
}
RpcMessage::Response(id, resp) => {
json!({
"id": id,
"result": resp,
})
}
RpcMessage::Notification(n) => serde_json::to_value(n)?,
RpcMessage::Error(id, err) => {
json!({
"id": id,
"error": err,
})
}
};
let msg = format!("{}\n", serde_json::to_string(&value)?);
out.write_all(msg.as_bytes())?;
out.flush()?;
Ok(())
}
pub fn read_msg<R, Req, Notif, Resp>(
inp: &mut R,
) -> io::Result<Option<RpcMessage<Req, Notif, Resp>>>
where
R: BufRead,
Req: DeserializeOwned,
Notif: DeserializeOwned,
Resp: DeserializeOwned,
{
let mut buf = String::new();
let _ = inp.read_line(&mut buf)?;
let value: Value = serde_json::from_str(&buf)?;
match parse_value(value) {
Ok(msg) => Ok(Some(msg)),
Err(e) => {
tracing::error!("receive rpc from stdio error: {e:#}");
Ok(None)
}
}
}
fn parse_value<Req, Notif, Resp>(
value: Value,
) -> io::Result<RpcMessage<Req, Notif, Resp>>
where
Req: DeserializeOwned,
Notif: DeserializeOwned,
Resp: DeserializeOwned,
{
let object = RpcObject(value);
let is_response = object.is_response();
let msg = if is_response {
let id = object.get_id().ok_or(io::ErrorKind::NotFound)?;
let resp = object
.into_response()
.map_err(|_| io::ErrorKind::NotFound)?;
match resp {
Ok(value) => {
let resp: Resp = serde_json::from_value(value)?;
RpcMessage::Response(id, resp)
}
Err(value) => {
let err: RpcError = serde_json::from_value(value)?;
RpcMessage::Error(id, err)
}
}
} else {
match object.get_id() {
Some(id) => {
let req: Req = serde_json::from_value(object.0)?;
RpcMessage::Request(id, req)
}
None => {
let notif: Notif = serde_json::from_value(object.0)?;
RpcMessage::Notification(notif)
}
}
};
Ok(msg)
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-rpc/src/core.rs | lapce-rpc/src/core.rs | use std::{
collections::HashMap,
path::PathBuf,
sync::{
Arc,
atomic::{AtomicU64, Ordering},
},
};
use crossbeam_channel::{Receiver, Sender};
use lsp_types::{
CancelParams, CompletionResponse, LogMessageParams, ProgressParams,
PublishDiagnosticsParams, ShowMessageParams, SignatureHelp,
};
use parking_lot::Mutex;
use serde::{Deserialize, Serialize};
use crate::{
RequestId, RpcError, RpcMessage,
dap_types::{
self, DapId, RunDebugConfig, Scope, StackFrame, Stopped, ThreadId, Variable,
},
file::PathObject,
plugin::{PluginId, VoltInfo, VoltMetadata},
proxy::ProxyStatus,
source_control::DiffInfo,
terminal::TermId,
};
pub enum CoreRpc {
Request(RequestId, CoreRequest),
Notification(Box<CoreNotification>), // Box it since clippy complains
Shutdown,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum FileChanged {
Change(String),
Delete,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "method", content = "params")]
pub enum CoreNotification {
ProxyStatus {
status: ProxyStatus,
},
OpenFileChanged {
path: PathBuf,
content: FileChanged,
},
CompletionResponse {
request_id: usize,
input: String,
resp: CompletionResponse,
plugin_id: PluginId,
},
SignatureHelpResponse {
request_id: usize,
resp: SignatureHelp,
plugin_id: PluginId,
},
OpenPaths {
paths: Vec<PathObject>,
},
WorkspaceFileChange,
PublishDiagnostics {
diagnostics: PublishDiagnosticsParams,
},
ServerStatus {
params: ServerStatusParams,
},
WorkDoneProgress {
progress: ProgressParams,
},
ShowMessage {
title: String,
message: ShowMessageParams,
},
LogMessage {
message: LogMessageParams,
target: String,
},
LspCancel {
params: CancelParams,
},
HomeDir {
path: PathBuf,
},
VoltInstalled {
volt: VoltMetadata,
icon: Option<Vec<u8>>,
},
VoltInstalling {
volt: VoltInfo,
error: String,
},
VoltRemoving {
volt: VoltMetadata,
error: String,
},
VoltRemoved {
volt: VoltInfo,
only_installing: bool,
},
DiffInfo {
diff: DiffInfo,
},
UpdateTerminal {
term_id: TermId,
content: Vec<u8>,
},
TerminalLaunchFailed {
term_id: TermId,
error: String,
},
TerminalProcessId {
term_id: TermId,
process_id: Option<u32>,
},
TerminalProcessStopped {
term_id: TermId,
exit_code: Option<i32>,
},
RunInTerminal {
config: RunDebugConfig,
},
Log {
level: LogLevel,
message: String,
target: Option<String>,
},
DapStopped {
dap_id: DapId,
stopped: Stopped,
stack_frames: HashMap<ThreadId, Vec<StackFrame>>,
variables: Vec<(Scope, Vec<Variable>)>,
},
DapContinued {
dap_id: DapId,
},
DapBreakpointsResp {
dap_id: DapId,
path: PathBuf,
breakpoints: Vec<dap_types::Breakpoint>,
},
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum CoreRequest {}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "method", content = "params")]
pub enum CoreResponse {}
pub type CoreMessage = RpcMessage<CoreRequest, CoreNotification, CoreResponse>;
pub trait CoreHandler {
fn handle_notification(&mut self, rpc: CoreNotification);
fn handle_request(&mut self, id: RequestId, rpc: CoreRequest);
}
#[derive(Clone)]
pub struct CoreRpcHandler {
tx: Sender<CoreRpc>,
rx: Receiver<CoreRpc>,
id: Arc<AtomicU64>,
#[allow(clippy::type_complexity)]
pending: Arc<Mutex<HashMap<u64, Sender<Result<CoreResponse, RpcError>>>>>,
}
impl CoreRpcHandler {
pub fn new() -> Self {
let (tx, rx) = crossbeam_channel::unbounded();
Self {
tx,
rx,
id: Arc::new(AtomicU64::new(0)),
pending: Arc::new(Mutex::new(HashMap::new())),
}
}
pub fn mainloop<H>(&self, handler: &mut H)
where
H: CoreHandler,
{
for msg in &self.rx {
match msg {
CoreRpc::Request(id, rpc) => {
handler.handle_request(id, rpc);
}
CoreRpc::Notification(rpc) => {
handler.handle_notification(*rpc);
}
CoreRpc::Shutdown => {
return;
}
}
}
}
pub fn rx(&self) -> &Receiver<CoreRpc> {
&self.rx
}
pub fn handle_response(
&self,
id: RequestId,
response: Result<CoreResponse, RpcError>,
) {
let tx = { self.pending.lock().remove(&id) };
if let Some(tx) = tx {
if let Err(err) = tx.send(response) {
tracing::error!("{:?}", err);
}
}
}
pub fn request(&self, request: CoreRequest) -> Result<CoreResponse, RpcError> {
let (tx, rx) = crossbeam_channel::bounded(1);
let id = self.id.fetch_add(1, Ordering::Relaxed);
{
let mut pending = self.pending.lock();
pending.insert(id, tx);
}
if let Err(err) = self.tx.send(CoreRpc::Request(id, request)) {
tracing::error!("{:?}", err);
}
rx.recv().unwrap_or_else(|_| {
Err(RpcError {
code: 0,
message: "io error".to_string(),
})
})
}
pub fn shutdown(&self) {
if let Err(err) = self.tx.send(CoreRpc::Shutdown) {
tracing::error!("{:?}", err);
}
}
pub fn notification(&self, notification: CoreNotification) {
if let Err(err) = self.tx.send(CoreRpc::Notification(Box::new(notification)))
{
tracing::error!("{:?}", err);
}
}
pub fn workspace_file_change(&self) {
self.notification(CoreNotification::WorkspaceFileChange);
}
pub fn diff_info(&self, diff: DiffInfo) {
self.notification(CoreNotification::DiffInfo { diff });
}
pub fn open_file_changed(&self, path: PathBuf, content: FileChanged) {
self.notification(CoreNotification::OpenFileChanged { path, content });
}
pub fn completion_response(
&self,
request_id: usize,
input: String,
resp: CompletionResponse,
plugin_id: PluginId,
) {
self.notification(CoreNotification::CompletionResponse {
request_id,
input,
resp,
plugin_id,
});
}
pub fn signature_help_response(
&self,
request_id: usize,
resp: SignatureHelp,
plugin_id: PluginId,
) {
self.notification(CoreNotification::SignatureHelpResponse {
request_id,
resp,
plugin_id,
});
}
pub fn volt_installed(&self, volt: VoltMetadata, icon: Option<Vec<u8>>) {
self.notification(CoreNotification::VoltInstalled { volt, icon });
}
pub fn volt_installing(&self, volt: VoltInfo, error: String) {
self.notification(CoreNotification::VoltInstalling { volt, error });
}
pub fn volt_removing(&self, volt: VoltMetadata, error: String) {
self.notification(CoreNotification::VoltRemoving { volt, error });
}
pub fn volt_removed(&self, volt: VoltInfo, only_installing: bool) {
self.notification(CoreNotification::VoltRemoved {
volt,
only_installing,
});
}
pub fn run_in_terminal(&self, config: RunDebugConfig) {
self.notification(CoreNotification::RunInTerminal { config });
}
pub fn log(&self, level: LogLevel, message: String, target: Option<String>) {
self.notification(CoreNotification::Log {
level,
message,
target,
});
}
pub fn publish_diagnostics(&self, diagnostics: PublishDiagnosticsParams) {
self.notification(CoreNotification::PublishDiagnostics { diagnostics });
}
pub fn server_status(&self, params: ServerStatusParams) {
self.notification(CoreNotification::ServerStatus { params });
}
pub fn work_done_progress(&self, progress: ProgressParams) {
self.notification(CoreNotification::WorkDoneProgress { progress });
}
pub fn show_message(&self, title: String, message: ShowMessageParams) {
self.notification(CoreNotification::ShowMessage { title, message });
}
pub fn log_message(&self, message: LogMessageParams, target: String) {
self.notification(CoreNotification::LogMessage { message, target });
}
pub fn cancel(&self, params: CancelParams) {
self.notification(CoreNotification::LspCancel { params });
}
pub fn terminal_process_id(&self, term_id: TermId, process_id: Option<u32>) {
self.notification(CoreNotification::TerminalProcessId {
term_id,
process_id,
});
}
pub fn terminal_process_stopped(&self, term_id: TermId, exit_code: Option<i32>) {
self.notification(CoreNotification::TerminalProcessStopped {
term_id,
exit_code,
});
}
pub fn terminal_launch_failed(&self, term_id: TermId, error: String) {
self.notification(CoreNotification::TerminalLaunchFailed { term_id, error });
}
pub fn update_terminal(&self, term_id: TermId, content: Vec<u8>) {
self.notification(CoreNotification::UpdateTerminal { term_id, content });
}
pub fn dap_stopped(
&self,
dap_id: DapId,
stopped: Stopped,
stack_frames: HashMap<ThreadId, Vec<StackFrame>>,
variables: Vec<(Scope, Vec<Variable>)>,
) {
self.notification(CoreNotification::DapStopped {
dap_id,
stopped,
stack_frames,
variables,
});
}
pub fn dap_continued(&self, dap_id: DapId) {
self.notification(CoreNotification::DapContinued { dap_id });
}
pub fn dap_breakpoints_resp(
&self,
dap_id: DapId,
path: PathBuf,
breakpoints: Vec<dap_types::Breakpoint>,
) {
self.notification(CoreNotification::DapBreakpointsResp {
dap_id,
path,
breakpoints,
});
}
pub fn home_dir(&self, path: PathBuf) {
self.notification(CoreNotification::HomeDir { path });
}
}
impl Default for CoreRpcHandler {
fn default() -> Self {
Self::new()
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum LogLevel {
Info = 0,
Warn = 1,
Error = 2,
Debug = 3,
Trace = 4,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct ServerStatusParams {
health: String,
quiescent: bool,
pub message: Option<String>,
}
impl ServerStatusParams {
pub fn is_ok(&self) -> bool {
self.health.as_str() == "ok"
}
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-rpc/src/file.rs | lapce-rpc/src/file.rs | use std::{
cmp::{Ord, Ordering, PartialOrd},
collections::HashMap,
path::{Path, PathBuf},
};
use serde::{Deserialize, Serialize};
/// UTF8 line and column-offset
#[derive(
Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize,
)]
pub struct LineCol {
pub line: usize,
pub column: usize,
}
#[derive(
Default, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize,
)]
pub struct PathObject {
pub path: PathBuf,
pub linecol: Option<LineCol>,
pub is_dir: bool,
}
impl PathObject {
pub fn new(
path: PathBuf,
is_dir: bool,
line: usize,
column: usize,
) -> PathObject {
PathObject {
path,
is_dir,
linecol: Some(LineCol { line, column }),
}
}
pub fn from_path(path: PathBuf, is_dir: bool) -> PathObject {
PathObject {
path,
is_dir,
linecol: None,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum FileNodeViewKind {
/// An actual file/directory
Path(PathBuf),
/// We are renaming the file at this path
Renaming { path: PathBuf, err: Option<String> },
/// We are naming a new file/directory
Naming { err: Option<String> },
Duplicating {
/// The path that is being duplicated
source: PathBuf,
err: Option<String>,
},
}
impl FileNodeViewKind {
pub fn path(&self) -> Option<&Path> {
match self {
Self::Path(path) => Some(path),
Self::Renaming { path, .. } => Some(path),
Self::Naming { .. } => None,
Self::Duplicating { source, .. } => Some(source),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum NamingState {
/// Actively naming
Naming,
/// Application of the naming is pending
Pending,
/// There's an active error with the typed name
Err { err: String },
}
impl NamingState {
pub fn is_accepting_input(&self) -> bool {
match self {
Self::Naming | Self::Err { .. } => true,
Self::Pending => false,
}
}
pub fn is_err(&self) -> bool {
match self {
Self::Naming | Self::Pending => false,
Self::Err { .. } => true,
}
}
pub fn err(&self) -> Option<&str> {
match self {
Self::Err { err } => Some(err.as_str()),
_ => None,
}
}
pub fn set_ok(&mut self) {
*self = Self::Naming;
}
pub fn set_pending(&mut self) {
*self = Self::Pending;
}
pub fn set_err(&mut self, err: String) {
*self = Self::Err { err };
}
}
/// Stores the state of any in progress rename of a path.
///
/// The `editor_needs_reset` field is `true` if the rename editor should have its contents reset
/// when the view function next runs.
#[derive(Debug, Clone)]
pub struct Renaming {
pub state: NamingState,
/// Original file's path
pub path: PathBuf,
pub editor_needs_reset: bool,
}
#[derive(Debug, Clone)]
pub struct NewNode {
pub state: NamingState,
/// If true, then we are creating a directory
pub is_dir: bool,
/// The folder that the file/directory is being created within
pub base_path: PathBuf,
pub editor_needs_reset: bool,
}
#[derive(Debug, Clone)]
pub struct Duplicating {
pub state: NamingState,
/// Path to the item being duplicated
pub path: PathBuf,
pub editor_needs_reset: bool,
}
#[derive(Debug, Clone)]
pub enum Naming {
None,
Renaming(Renaming),
NewNode(NewNode),
Duplicating(Duplicating),
}
impl Naming {
pub fn state(&self) -> Option<&NamingState> {
match self {
Self::None => None,
Self::Renaming(rename) => Some(&rename.state),
Self::NewNode(state) => Some(&state.state),
Self::Duplicating(state) => Some(&state.state),
}
}
pub fn state_mut(&mut self) -> Option<&mut NamingState> {
match self {
Self::None => None,
Self::Renaming(rename) => Some(&mut rename.state),
Self::NewNode(state) => Some(&mut state.state),
Self::Duplicating(state) => Some(&mut state.state),
}
}
pub fn is_accepting_input(&self) -> bool {
self.state().is_some_and(NamingState::is_accepting_input)
}
pub fn editor_needs_reset(&self) -> bool {
match self {
Naming::None => false,
Naming::Renaming(rename) => rename.editor_needs_reset,
Naming::NewNode(state) => state.editor_needs_reset,
Naming::Duplicating(state) => state.editor_needs_reset,
}
}
pub fn set_editor_needs_reset(&mut self, needs_reset: bool) {
match self {
Naming::None => {}
Naming::Renaming(rename) => rename.editor_needs_reset = needs_reset,
Naming::NewNode(state) => state.editor_needs_reset = needs_reset,
Naming::Duplicating(state) => state.editor_needs_reset = needs_reset,
}
}
pub fn set_ok(&mut self) {
if let Some(state) = self.state_mut() {
state.set_ok();
}
}
pub fn set_pending(&mut self) {
if let Some(state) = self.state_mut() {
state.set_pending();
}
}
pub fn set_err(&mut self, err: String) {
if let Some(state) = self.state_mut() {
state.set_err(err);
}
}
pub fn as_renaming(&self) -> Option<&Renaming> {
match self {
Naming::Renaming(rename) => Some(rename),
_ => None,
}
}
/// The extra node that should be added after the node at `path`
pub fn extra_node(
&self,
is_dir: bool,
level: usize,
path: &Path,
) -> Option<FileNodeViewData> {
match self {
Naming::NewNode(n) if n.base_path == path => Some(FileNodeViewData {
kind: FileNodeViewKind::Naming {
err: n.state.err().map(ToString::to_string),
},
is_dir: n.is_dir,
is_root: false,
open: false,
level: level + 1,
}),
Naming::Duplicating(d) if d.path == path => Some(FileNodeViewData {
kind: FileNodeViewKind::Duplicating {
source: d.path.to_path_buf(),
err: d.state.err().map(ToString::to_string),
},
is_dir,
is_root: false,
open: false,
level: level + 1,
}),
_ => None,
}
}
}
#[derive(Debug, Clone)]
pub struct FileNodeViewData {
pub kind: FileNodeViewKind,
pub is_dir: bool,
pub is_root: bool,
pub open: bool,
pub level: usize,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct FileNodeItem {
pub path: PathBuf,
pub is_dir: bool,
/// Whether the directory's children have been read.
/// Does nothing if not a directory.
pub read: bool,
/// Whether the directory is open in the explorer view.
pub open: bool,
pub children: HashMap<PathBuf, FileNodeItem>,
/// The number of child (directories) that are open themselves
/// Used for sizing of the explorer list
pub children_open_count: usize,
}
impl PartialOrd for FileNodeItem {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for FileNodeItem {
fn cmp(&self, other: &Self) -> Ordering {
match (self.is_dir, other.is_dir) {
(true, false) => Ordering::Less,
(false, true) => Ordering::Greater,
_ => {
let [self_file_name, other_file_name] = [&self.path, &other.path]
.map(|path| {
path.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_lowercase()
});
human_sort::compare(&self_file_name, &other_file_name)
}
}
}
}
impl FileNodeItem {
/// Collect the children, sorted by name.
/// Note: this will be empty if the directory has not been read.
pub fn sorted_children(&self) -> Vec<&FileNodeItem> {
let mut children = self.children.values().collect::<Vec<&FileNodeItem>>();
children.sort();
children
}
/// Collect the children, sorted by name.
/// Note: this will be empty if the directory has not been read.
pub fn sorted_children_mut(&mut self) -> Vec<&mut FileNodeItem> {
let mut children = self
.children
.values_mut()
.collect::<Vec<&mut FileNodeItem>>();
children.sort();
children
}
/// Returns an iterator over the ancestors of `path`, starting with the first descendant of `prefix`.
///
/// # Example:
/// (ignored because the function is private but I promise this passes)
/// ```rust,ignore
/// # use lapce_rpc::file::FileNodeItem;
/// # use std::path::{Path, PathBuf};
/// # use std::collections::HashMap;
/// #
/// let node_item = FileNodeItem {
/// path_buf: PathBuf::from("/pre/fix"),
/// // ...
/// # is_dir: true,
/// # read: false,
/// # open: false,
/// # children: HashMap::new(),
/// # children_open_count: 0,
///};
/// let mut iter = node_item.ancestors_rev(Path::new("/pre/fix/foo/bar")).unwrap();
/// assert_eq!(Some(Path::new("/pre/fix/foo")), iter.next());
/// assert_eq!(Some(Path::new("/pre/fix/foo/bar")), iter.next());
/// ```
fn ancestors_rev<'a>(
&self,
path: &'a Path,
) -> Option<impl Iterator<Item = &'a Path> + use<'a>> {
let take = if let Ok(suffix) = path.strip_prefix(&self.path) {
suffix.components().count()
} else {
return None;
};
#[allow(clippy::needless_collect)] // Ancestors is not reversible
let ancestors = path.ancestors().take(take).collect::<Vec<&Path>>();
Some(ancestors.into_iter().rev())
}
/// Recursively get the node at `path`.
pub fn get_file_node(&self, path: &Path) -> Option<&FileNodeItem> {
self.ancestors_rev(path)?
.try_fold(self, |node, path| node.children.get(path))
}
/// Recursively get the (mutable) node at `path`.
pub fn get_file_node_mut(&mut self, path: &Path) -> Option<&mut FileNodeItem> {
self.ancestors_rev(path)?
.try_fold(self, |node, path| node.children.get_mut(path))
}
/// Remove a specific child from the node.
/// The path is recursive and will remove the child from parent indicated by the path.
pub fn remove_child(&mut self, path: &Path) -> Option<FileNodeItem> {
let parent = path.parent()?;
let node = self.get_file_node_mut(parent)?;
let node = node.children.remove(path)?;
for p in path.ancestors() {
self.update_node_count(p);
}
Some(node)
}
/// Add a new (unread & unopened) child to the node.
pub fn add_child(&mut self, path: &Path, is_dir: bool) -> Option<()> {
let parent = path.parent()?;
let node = self.get_file_node_mut(parent)?;
node.children.insert(
PathBuf::from(path),
FileNodeItem {
path: PathBuf::from(path),
is_dir,
read: false,
open: false,
children: HashMap::new(),
children_open_count: 0,
},
);
for p in path.ancestors() {
self.update_node_count(p);
}
Some(())
}
/// Set the children of the node.
/// Note: this opens the node.
pub fn set_item_children(
&mut self,
path: &Path,
children: HashMap<PathBuf, FileNodeItem>,
) {
if let Some(node) = self.get_file_node_mut(path) {
node.open = true;
node.read = true;
node.children = children;
}
for p in path.ancestors() {
self.update_node_count(p);
}
}
pub fn update_node_count_recursive(&mut self, path: &Path) {
for current_path in path.ancestors() {
self.update_node_count(current_path);
}
}
pub fn update_node_count(&mut self, path: &Path) -> Option<()> {
let node = self.get_file_node_mut(path)?;
if node.is_dir {
node.children_open_count = if node.open {
node.children
.values()
.map(|item| item.children_open_count + 1)
.sum::<usize>()
} else {
0
};
}
None
}
pub fn append_view_slice(
&self,
view_items: &mut Vec<FileNodeViewData>,
naming: &Naming,
min: usize,
max: usize,
current: usize,
level: usize,
) -> usize {
if current > max {
return current;
}
if current + self.children_open_count < min {
return current + self.children_open_count;
}
if current >= min {
let kind = if let Naming::Renaming(r) = &naming {
if r.path == self.path {
FileNodeViewKind::Renaming {
path: self.path.clone(),
err: r.state.err().map(ToString::to_string),
}
} else {
FileNodeViewKind::Path(self.path.clone())
}
} else {
FileNodeViewKind::Path(self.path.clone())
};
view_items.push(FileNodeViewData {
kind,
is_dir: self.is_dir,
is_root: level == 1,
open: self.open,
level,
});
}
self.append_children_view_slice(view_items, naming, min, max, current, level)
}
/// Calculate the row where the file resides
pub fn find_file_at_line(&self, file_path: &Path) -> (bool, f64) {
let mut line = 0.0;
if !self.open {
return (false, line);
}
for item in self.sorted_children() {
line += 1.0;
match (item.is_dir, item.open, item.path == file_path) {
(_, _, true) => {
return (true, line);
}
(true, true, _) => {
let (found, item_position) = item.find_file_at_line(file_path);
line += item_position;
if found {
return (true, line);
}
}
_ => {}
}
}
(false, line)
}
/// Append the children of this item with the given level
pub fn append_children_view_slice(
&self,
view_items: &mut Vec<FileNodeViewData>,
naming: &Naming,
min: usize,
max: usize,
mut i: usize,
level: usize,
) -> usize {
let mut naming_extra = naming.extra_node(self.is_dir, level, &self.path);
if !self.open {
// If the folder isn't open, then we just put it right at the top
if i >= min {
if let Some(naming_extra) = naming_extra {
view_items.push(naming_extra);
i += 1;
}
}
return i;
}
let naming_is_dir = naming_extra.as_ref().map(|n| n.is_dir).unwrap_or(false);
// Immediately put the naming entry first if it's a directory
if naming_is_dir {
if let Some(node) = naming_extra.take() {
// Actually add the node if it's within the range
if i >= min {
view_items.push(node);
i += 1;
}
}
}
let mut after_dirs = false;
for item in self.sorted_children() {
// If we're naming a file at the root, then wait until we've added the directories
// before adding the input node
if naming_extra.is_some()
&& !naming_is_dir
&& !item.is_dir
&& !after_dirs
{
after_dirs = true;
// If we're creating a new file node, then we show it after the directories
// TODO(minor): should this be i >= min or i + 1 >= min?
if i >= min {
if let Some(node) = naming_extra.take() {
view_items.push(node);
i += 1;
}
}
}
i = item.append_view_slice(
view_items,
naming,
min,
max,
i + 1,
level + 1,
);
if i > max {
return i;
}
}
// If it has not been added yet, add it now.
if i >= min {
if let Some(node) = naming_extra {
view_items.push(node);
i += 1;
}
}
i
}
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-rpc/src/terminal.rs | lapce-rpc/src/terminal.rs | use std::collections::HashMap;
use serde::{Deserialize, Serialize};
use crate::counter::Counter;
#[derive(Eq, PartialEq, Hash, Copy, Clone, Debug, Serialize, Deserialize)]
pub struct TermId(pub u64);
impl TermId {
pub fn next() -> Self {
static TERMINAL_ID_COUNTER: Counter = Counter::new();
Self(TERMINAL_ID_COUNTER.next())
}
}
#[derive(Eq, PartialEq, Clone, Debug, Serialize, Deserialize, Default)]
pub struct TerminalProfile {
pub name: String,
pub command: Option<String>,
pub arguments: Option<Vec<String>>,
pub workdir: Option<url::Url>,
pub environment: Option<HashMap<String, String>>,
}
impl TerminalProfile {}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-rpc/src/style.rs | lapce-rpc/src/style.rs | use std::{collections::HashMap, path::PathBuf, sync::Arc};
use serde::{Deserialize, Serialize};
pub type LineStyles = HashMap<usize, Arc<Vec<LineStyle>>>;
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct LineStyle {
pub start: usize,
pub end: usize,
pub style: Style,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Style {
pub fg_color: Option<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct SemanticStyles {
pub rev: u64,
pub path: PathBuf,
pub len: usize,
pub styles: Vec<LineStyle>,
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-rpc/src/counter.rs | lapce-rpc/src/counter.rs | use std::sync::atomic::{self, AtomicU64};
pub struct Counter(AtomicU64);
impl Counter {
pub const fn new() -> Counter {
Counter(AtomicU64::new(1))
}
pub fn next(&self) -> u64 {
self.0.fetch_add(1, atomic::Ordering::Relaxed)
}
}
impl Default for Counter {
fn default() -> Self {
Self::new()
}
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-rpc/src/buffer.rs | lapce-rpc/src/buffer.rs | use serde::{Deserialize, Serialize};
use crate::counter::Counter;
#[derive(Eq, PartialEq, Hash, Copy, Clone, Debug, Serialize, Deserialize)]
pub struct BufferId(pub u64);
impl BufferId {
pub fn next() -> Self {
static BUFFER_ID_COUNTER: Counter = Counter::new();
Self(BUFFER_ID_COUNTER.next())
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct NewBufferResponse {
pub content: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BufferHeadResponse {
pub version: String,
pub content: String,
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-rpc/src/file_line.rs | lapce-rpc/src/file_line.rs | use std::path::PathBuf;
use lsp_types::Position;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FileLine {
pub path: PathBuf,
pub position: Position,
pub content: String,
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-rpc/src/proxy.rs | lapce-rpc/src/proxy.rs | use std::{
collections::HashMap,
path::PathBuf,
sync::{
Arc,
atomic::{AtomicU64, Ordering},
},
};
use crossbeam_channel::{Receiver, Sender};
use indexmap::IndexMap;
use lapce_xi_rope::RopeDelta;
use lsp_types::{
CallHierarchyIncomingCall, CallHierarchyItem, CodeAction, CodeActionResponse,
CodeLens, CompletionItem, Diagnostic, DocumentSymbolResponse, FoldingRange,
GotoDefinitionResponse, Hover, InlayHint, InlineCompletionResponse,
InlineCompletionTriggerKind, Location, Position, PrepareRenameResponse,
SelectionRange, SymbolInformation, TextDocumentItem, TextEdit, WorkspaceEdit,
request::{GotoImplementationResponse, GotoTypeDefinitionResponse},
};
use parking_lot::Mutex;
use serde::{Deserialize, Serialize};
use super::plugin::VoltID;
use crate::{
RequestId, RpcError, RpcMessage,
buffer::BufferId,
dap_types::{self, DapId, RunDebugConfig, SourceBreakpoint, ThreadId},
file::{FileNodeItem, PathObject},
file_line::FileLine,
plugin::{PluginId, VoltInfo, VoltMetadata},
source_control::FileDiff,
style::SemanticStyles,
terminal::{TermId, TerminalProfile},
};
#[allow(clippy::large_enum_variant)]
pub enum ProxyRpc {
Request(RequestId, ProxyRequest),
Notification(ProxyNotification),
Shutdown,
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
pub enum ProxyStatus {
Connecting,
Connected,
Disconnected,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SearchMatch {
pub line: usize,
pub start: usize,
pub end: usize,
pub line_content: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "method", content = "params")]
pub enum ProxyRequest {
NewBuffer {
buffer_id: BufferId,
path: PathBuf,
},
BufferHead {
path: PathBuf,
},
GlobalSearch {
pattern: String,
case_sensitive: bool,
whole_word: bool,
is_regex: bool,
},
CompletionResolve {
plugin_id: PluginId,
completion_item: Box<CompletionItem>,
},
CodeActionResolve {
plugin_id: PluginId,
action_item: Box<CodeAction>,
},
GetHover {
request_id: usize,
path: PathBuf,
position: Position,
},
GetSignature {
buffer_id: BufferId,
position: Position,
},
GetSelectionRange {
path: PathBuf,
positions: Vec<Position>,
},
GitGetRemoteFileUrl {
file: PathBuf,
},
GetReferences {
path: PathBuf,
position: Position,
},
GotoImplementation {
path: PathBuf,
position: Position,
},
GetDefinition {
request_id: usize,
path: PathBuf,
position: Position,
},
ShowCallHierarchy {
path: PathBuf,
position: Position,
},
CallHierarchyIncoming {
path: PathBuf,
call_hierarchy_item: CallHierarchyItem,
},
GetTypeDefinition {
request_id: usize,
path: PathBuf,
position: Position,
},
GetInlayHints {
path: PathBuf,
},
GetInlineCompletions {
path: PathBuf,
position: Position,
trigger_kind: InlineCompletionTriggerKind,
},
GetSemanticTokens {
path: PathBuf,
},
LspFoldingRange {
path: PathBuf,
},
PrepareRename {
path: PathBuf,
position: Position,
},
Rename {
path: PathBuf,
position: Position,
new_name: String,
},
GetCodeActions {
path: PathBuf,
position: Position,
diagnostics: Vec<Diagnostic>,
},
GetCodeLens {
path: PathBuf,
},
GetCodeLensResolve {
code_lens: CodeLens,
path: PathBuf,
},
GetDocumentSymbols {
path: PathBuf,
},
GetWorkspaceSymbols {
/// The search query
query: String,
},
GetDocumentFormatting {
path: PathBuf,
},
GetOpenFilesContent {},
GetFiles {
path: String,
},
ReadDir {
path: PathBuf,
},
Save {
rev: u64,
path: PathBuf,
/// Whether to create the parent directories if they do not exist.
create_parents: bool,
},
SaveBufferAs {
buffer_id: BufferId,
path: PathBuf,
rev: u64,
content: String,
/// Whether to create the parent directories if they do not exist.
create_parents: bool,
},
CreateFile {
path: PathBuf,
},
CreateDirectory {
path: PathBuf,
},
TrashPath {
path: PathBuf,
},
DuplicatePath {
existing_path: PathBuf,
new_path: PathBuf,
},
RenamePath {
from: PathBuf,
to: PathBuf,
},
TestCreateAtPath {
path: PathBuf,
},
DapVariable {
dap_id: DapId,
reference: usize,
},
DapGetScopes {
dap_id: DapId,
frame_id: usize,
},
ReferencesResolve {
items: Vec<Location>,
},
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "method", content = "params")]
pub enum ProxyNotification {
Initialize {
workspace: Option<PathBuf>,
disabled_volts: Vec<VoltID>,
/// Paths to extra plugins that should be loaded
extra_plugin_paths: Vec<PathBuf>,
plugin_configurations: HashMap<String, HashMap<String, serde_json::Value>>,
window_id: usize,
tab_id: usize,
},
OpenFileChanged {
path: PathBuf,
},
OpenPaths {
paths: Vec<PathObject>,
},
Shutdown {},
Completion {
request_id: usize,
path: PathBuf,
input: String,
position: Position,
},
SignatureHelp {
request_id: usize,
path: PathBuf,
position: Position,
},
Update {
path: PathBuf,
delta: RopeDelta,
rev: u64,
},
UpdatePluginConfigs {
configs: HashMap<String, HashMap<String, serde_json::Value>>,
},
NewTerminal {
term_id: TermId,
profile: TerminalProfile,
},
InstallVolt {
volt: VoltInfo,
},
RemoveVolt {
volt: VoltMetadata,
},
ReloadVolt {
volt: VoltMetadata,
},
DisableVolt {
volt: VoltInfo,
},
EnableVolt {
volt: VoltInfo,
},
GitCommit {
message: String,
diffs: Vec<FileDiff>,
},
GitCheckout {
reference: String,
},
GitDiscardFilesChanges {
files: Vec<PathBuf>,
},
GitDiscardWorkspaceChanges {},
GitInit {},
LspCancel {
id: i32,
},
TerminalWrite {
term_id: TermId,
content: String,
},
TerminalResize {
term_id: TermId,
width: usize,
height: usize,
},
TerminalClose {
term_id: TermId,
},
DapStart {
config: RunDebugConfig,
breakpoints: HashMap<PathBuf, Vec<SourceBreakpoint>>,
},
DapProcessId {
dap_id: DapId,
process_id: Option<u32>,
term_id: TermId,
},
DapContinue {
dap_id: DapId,
thread_id: ThreadId,
},
DapStepOver {
dap_id: DapId,
thread_id: ThreadId,
},
DapStepInto {
dap_id: DapId,
thread_id: ThreadId,
},
DapStepOut {
dap_id: DapId,
thread_id: ThreadId,
},
DapPause {
dap_id: DapId,
thread_id: ThreadId,
},
DapStop {
dap_id: DapId,
},
DapDisconnect {
dap_id: DapId,
},
DapRestart {
dap_id: DapId,
breakpoints: HashMap<PathBuf, Vec<SourceBreakpoint>>,
},
DapSetBreakpoints {
dap_id: DapId,
path: PathBuf,
breakpoints: Vec<SourceBreakpoint>,
},
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "method", content = "params")]
pub enum ProxyResponse {
GitGetRemoteFileUrl {
file_url: String,
},
NewBufferResponse {
content: String,
read_only: bool,
},
BufferHeadResponse {
version: String,
content: String,
},
ReadDirResponse {
items: Vec<FileNodeItem>,
},
CompletionResolveResponse {
item: Box<CompletionItem>,
},
CodeActionResolveResponse {
item: Box<CodeAction>,
},
HoverResponse {
request_id: usize,
hover: Hover,
},
GetDefinitionResponse {
request_id: usize,
definition: GotoDefinitionResponse,
},
ShowCallHierarchyResponse {
items: Option<Vec<CallHierarchyItem>>,
},
CallHierarchyIncomingResponse {
items: Option<Vec<CallHierarchyIncomingCall>>,
},
GetTypeDefinition {
request_id: usize,
definition: GotoTypeDefinitionResponse,
},
GetReferencesResponse {
references: Vec<Location>,
},
GetCodeActionsResponse {
plugin_id: PluginId,
resp: CodeActionResponse,
},
LspFoldingRangeResponse {
plugin_id: PluginId,
resp: Option<Vec<FoldingRange>>,
},
GetCodeLensResponse {
plugin_id: PluginId,
resp: Option<Vec<CodeLens>>,
},
GetCodeLensResolveResponse {
plugin_id: PluginId,
resp: CodeLens,
},
GotoImplementationResponse {
plugin_id: PluginId,
resp: Option<GotoImplementationResponse>,
},
GetFilesResponse {
items: Vec<PathBuf>,
},
GetDocumentFormatting {
edits: Vec<TextEdit>,
},
GetDocumentSymbols {
resp: DocumentSymbolResponse,
},
GetWorkspaceSymbols {
symbols: Vec<SymbolInformation>,
},
GetSelectionRange {
ranges: Vec<SelectionRange>,
},
GetInlayHints {
hints: Vec<InlayHint>,
},
GetInlineCompletions {
completions: InlineCompletionResponse,
},
GetSemanticTokens {
styles: SemanticStyles,
},
PrepareRename {
resp: PrepareRenameResponse,
},
Rename {
edit: WorkspaceEdit,
},
GetOpenFilesContentResponse {
items: Vec<TextDocumentItem>,
},
GlobalSearchResponse {
matches: IndexMap<PathBuf, Vec<SearchMatch>>,
},
DapVariableResponse {
varialbes: Vec<dap_types::Variable>,
},
DapGetScopesResponse {
scopes: Vec<(dap_types::Scope, Vec<dap_types::Variable>)>,
},
CreatePathResponse {
path: PathBuf,
},
Success {},
SaveResponse {},
ReferencesResolveResponse {
items: Vec<FileLine>,
},
}
pub type ProxyMessage = RpcMessage<ProxyRequest, ProxyNotification, ProxyResponse>;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReadDirResponse {
pub items: HashMap<PathBuf, FileNodeItem>,
}
pub trait ProxyCallback: Send + FnOnce(Result<ProxyResponse, RpcError>) {}
impl<F: Send + FnOnce(Result<ProxyResponse, RpcError>)> ProxyCallback for F {}
enum ResponseHandler {
Callback(Box<dyn ProxyCallback>),
Chan(Sender<Result<ProxyResponse, RpcError>>),
}
impl ResponseHandler {
fn invoke(self, result: Result<ProxyResponse, RpcError>) {
match self {
ResponseHandler::Callback(f) => f(result),
ResponseHandler::Chan(tx) => {
if let Err(err) = tx.send(result) {
tracing::error!("{:?}", err);
}
}
}
}
}
pub trait ProxyHandler {
fn handle_notification(&mut self, rpc: ProxyNotification);
fn handle_request(&mut self, id: RequestId, rpc: ProxyRequest);
}
#[derive(Clone)]
pub struct ProxyRpcHandler {
tx: Sender<ProxyRpc>,
rx: Receiver<ProxyRpc>,
id: Arc<AtomicU64>,
pending: Arc<Mutex<HashMap<u64, ResponseHandler>>>,
}
impl ProxyRpcHandler {
pub fn new() -> Self {
let (tx, rx) = crossbeam_channel::unbounded();
Self {
tx,
rx,
id: Arc::new(AtomicU64::new(0)),
pending: Arc::new(Mutex::new(HashMap::new())),
}
}
pub fn rx(&self) -> &Receiver<ProxyRpc> {
&self.rx
}
pub fn mainloop<H>(&self, handler: &mut H)
where
H: ProxyHandler,
{
use ProxyRpc::*;
for msg in &self.rx {
match msg {
Request(id, request) => {
handler.handle_request(id, request);
}
Notification(notification) => {
handler.handle_notification(notification);
}
Shutdown => {
return;
}
}
}
}
fn request_common(&self, request: ProxyRequest, rh: ResponseHandler) {
let id = self.id.fetch_add(1, Ordering::Relaxed);
self.pending.lock().insert(id, rh);
if let Err(err) = self.tx.send(ProxyRpc::Request(id, request)) {
tracing::error!("{:?}", err);
}
}
fn request(&self, request: ProxyRequest) -> Result<ProxyResponse, RpcError> {
let (tx, rx) = crossbeam_channel::bounded(1);
self.request_common(request, ResponseHandler::Chan(tx));
rx.recv().unwrap_or_else(|_| {
Err(RpcError {
code: 0,
message: "io error".to_string(),
})
})
}
pub fn request_async(
&self,
request: ProxyRequest,
f: impl ProxyCallback + 'static,
) {
self.request_common(request, ResponseHandler::Callback(Box::new(f)))
}
pub fn handle_response(
&self,
id: RequestId,
result: Result<ProxyResponse, RpcError>,
) {
let handler = { self.pending.lock().remove(&id) };
if let Some(handler) = handler {
handler.invoke(result);
}
}
pub fn notification(&self, notification: ProxyNotification) {
if let Err(err) = self.tx.send(ProxyRpc::Notification(notification)) {
tracing::error!("{:?}", err);
}
}
pub fn lsp_cancel(&self, id: i32) {
self.notification(ProxyNotification::LspCancel { id });
}
pub fn git_init(&self) {
self.notification(ProxyNotification::GitInit {});
}
pub fn git_commit(&self, message: String, diffs: Vec<FileDiff>) {
self.notification(ProxyNotification::GitCommit { message, diffs });
}
pub fn git_checkout(&self, reference: String) {
self.notification(ProxyNotification::GitCheckout { reference });
}
pub fn install_volt(&self, volt: VoltInfo) {
self.notification(ProxyNotification::InstallVolt { volt });
}
pub fn reload_volt(&self, volt: VoltMetadata) {
self.notification(ProxyNotification::ReloadVolt { volt });
}
pub fn remove_volt(&self, volt: VoltMetadata) {
self.notification(ProxyNotification::RemoveVolt { volt });
}
pub fn disable_volt(&self, volt: VoltInfo) {
self.notification(ProxyNotification::DisableVolt { volt });
}
pub fn enable_volt(&self, volt: VoltInfo) {
self.notification(ProxyNotification::EnableVolt { volt });
}
pub fn shutdown(&self) {
self.notification(ProxyNotification::Shutdown {});
if let Err(err) = self.tx.send(ProxyRpc::Shutdown) {
tracing::error!("{:?}", err);
}
}
pub fn initialize(
&self,
workspace: Option<PathBuf>,
disabled_volts: Vec<VoltID>,
extra_plugin_paths: Vec<PathBuf>,
plugin_configurations: HashMap<String, HashMap<String, serde_json::Value>>,
window_id: usize,
tab_id: usize,
) {
self.notification(ProxyNotification::Initialize {
workspace,
disabled_volts,
extra_plugin_paths,
plugin_configurations,
window_id,
tab_id,
});
}
pub fn completion(
&self,
request_id: usize,
path: PathBuf,
input: String,
position: Position,
) {
self.notification(ProxyNotification::Completion {
request_id,
path,
input,
position,
});
}
pub fn signature_help(
&self,
request_id: usize,
path: PathBuf,
position: Position,
) {
self.notification(ProxyNotification::SignatureHelp {
request_id,
path,
position,
});
}
pub fn new_terminal(&self, term_id: TermId, profile: TerminalProfile) {
self.notification(ProxyNotification::NewTerminal { term_id, profile })
}
pub fn terminal_close(&self, term_id: TermId) {
self.notification(ProxyNotification::TerminalClose { term_id });
}
pub fn terminal_resize(&self, term_id: TermId, width: usize, height: usize) {
self.notification(ProxyNotification::TerminalResize {
term_id,
width,
height,
});
}
pub fn terminal_write(&self, term_id: TermId, content: String) {
self.notification(ProxyNotification::TerminalWrite { term_id, content });
}
pub fn new_buffer(
&self,
buffer_id: BufferId,
path: PathBuf,
f: impl ProxyCallback + 'static,
) {
self.request_async(ProxyRequest::NewBuffer { buffer_id, path }, f);
}
pub fn get_buffer_head(&self, path: PathBuf, f: impl ProxyCallback + 'static) {
self.request_async(ProxyRequest::BufferHead { path }, f);
}
pub fn create_file(&self, path: PathBuf, f: impl ProxyCallback + 'static) {
self.request_async(ProxyRequest::CreateFile { path }, f);
}
pub fn create_directory(&self, path: PathBuf, f: impl ProxyCallback + 'static) {
self.request_async(ProxyRequest::CreateDirectory { path }, f);
}
pub fn trash_path(&self, path: PathBuf, f: impl ProxyCallback + 'static) {
self.request_async(ProxyRequest::TrashPath { path }, f);
}
pub fn duplicate_path(
&self,
existing_path: PathBuf,
new_path: PathBuf,
f: impl ProxyCallback + 'static,
) {
self.request_async(
ProxyRequest::DuplicatePath {
existing_path,
new_path,
},
f,
);
}
pub fn rename_path(
&self,
from: PathBuf,
to: PathBuf,
f: impl ProxyCallback + 'static,
) {
self.request_async(ProxyRequest::RenamePath { from, to }, f);
}
pub fn test_create_at_path(
&self,
path: PathBuf,
f: impl ProxyCallback + 'static,
) {
self.request_async(ProxyRequest::TestCreateAtPath { path }, f);
}
pub fn save_buffer_as(
&self,
buffer_id: BufferId,
path: PathBuf,
rev: u64,
content: String,
create_parents: bool,
f: impl ProxyCallback + 'static,
) {
self.request_async(
ProxyRequest::SaveBufferAs {
buffer_id,
path,
rev,
content,
create_parents,
},
f,
);
}
pub fn global_search(
&self,
pattern: String,
case_sensitive: bool,
whole_word: bool,
is_regex: bool,
f: impl ProxyCallback + 'static,
) {
self.request_async(
ProxyRequest::GlobalSearch {
pattern,
case_sensitive,
whole_word,
is_regex,
},
f,
);
}
pub fn save(
&self,
rev: u64,
path: PathBuf,
create_parents: bool,
f: impl ProxyCallback + 'static,
) {
self.request_async(
ProxyRequest::Save {
rev,
path,
create_parents,
},
f,
);
}
pub fn get_files(&self, f: impl ProxyCallback + 'static) {
self.request_async(
ProxyRequest::GetFiles {
path: "path".into(),
},
f,
);
}
pub fn get_open_files_content(&self) -> Result<ProxyResponse, RpcError> {
self.request(ProxyRequest::GetOpenFilesContent {})
}
pub fn read_dir(&self, path: PathBuf, f: impl ProxyCallback + 'static) {
self.request_async(ProxyRequest::ReadDir { path }, f);
}
pub fn completion_resolve(
&self,
plugin_id: PluginId,
completion_item: CompletionItem,
f: impl ProxyCallback + 'static,
) {
self.request_async(
ProxyRequest::CompletionResolve {
plugin_id,
completion_item: Box::new(completion_item),
},
f,
);
}
pub fn code_action_resolve(
&self,
action_item: CodeAction,
plugin_id: PluginId,
f: impl ProxyCallback + 'static,
) {
self.request_async(
ProxyRequest::CodeActionResolve {
action_item: Box::new(action_item),
plugin_id,
},
f,
);
}
pub fn get_hover(
&self,
request_id: usize,
path: PathBuf,
position: Position,
f: impl ProxyCallback + 'static,
) {
self.request_async(
ProxyRequest::GetHover {
request_id,
path,
position,
},
f,
);
}
pub fn get_definition(
&self,
request_id: usize,
path: PathBuf,
position: Position,
f: impl ProxyCallback + 'static,
) {
self.request_async(
ProxyRequest::GetDefinition {
request_id,
path,
position,
},
f,
);
}
pub fn show_call_hierarchy(
&self,
path: PathBuf,
position: Position,
f: impl ProxyCallback + 'static,
) {
self.request_async(ProxyRequest::ShowCallHierarchy { path, position }, f);
}
pub fn call_hierarchy_incoming(
&self,
path: PathBuf,
call_hierarchy_item: CallHierarchyItem,
f: impl ProxyCallback + 'static,
) {
self.request_async(
ProxyRequest::CallHierarchyIncoming {
path,
call_hierarchy_item,
},
f,
);
}
pub fn get_type_definition(
&self,
request_id: usize,
path: PathBuf,
position: Position,
f: impl ProxyCallback + 'static,
) {
self.request_async(
ProxyRequest::GetTypeDefinition {
request_id,
path,
position,
},
f,
);
}
pub fn get_lsp_folding_range(
&self,
path: PathBuf,
f: impl ProxyCallback + 'static,
) {
self.request_async(ProxyRequest::LspFoldingRange { path }, f);
}
pub fn get_references(
&self,
path: PathBuf,
position: Position,
f: impl ProxyCallback + 'static,
) {
self.request_async(ProxyRequest::GetReferences { path, position }, f);
}
pub fn references_resolve(
&self,
items: Vec<Location>,
f: impl ProxyCallback + 'static,
) {
self.request_async(ProxyRequest::ReferencesResolve { items }, f);
}
pub fn go_to_implementation(
&self,
path: PathBuf,
position: Position,
f: impl ProxyCallback + 'static,
) {
self.request_async(ProxyRequest::GotoImplementation { path, position }, f);
}
pub fn get_code_actions(
&self,
path: PathBuf,
position: Position,
diagnostics: Vec<Diagnostic>,
f: impl ProxyCallback + 'static,
) {
self.request_async(
ProxyRequest::GetCodeActions {
path,
position,
diagnostics,
},
f,
);
}
pub fn get_code_lens(&self, path: PathBuf, f: impl ProxyCallback + 'static) {
self.request_async(ProxyRequest::GetCodeLens { path }, f);
}
pub fn get_code_lens_resolve(
&self,
code_lens: CodeLens,
path: PathBuf,
f: impl ProxyCallback + 'static,
) {
self.request_async(ProxyRequest::GetCodeLensResolve { code_lens, path }, f);
}
pub fn get_document_formatting(
&self,
path: PathBuf,
f: impl ProxyCallback + 'static,
) {
self.request_async(ProxyRequest::GetDocumentFormatting { path }, f);
}
pub fn get_semantic_tokens(
&self,
path: PathBuf,
f: impl ProxyCallback + 'static,
) {
self.request_async(ProxyRequest::GetSemanticTokens { path }, f);
}
pub fn get_document_symbols(
&self,
path: PathBuf,
f: impl ProxyCallback + 'static,
) {
self.request_async(ProxyRequest::GetDocumentSymbols { path }, f);
}
pub fn get_workspace_symbols(
&self,
query: String,
f: impl ProxyCallback + 'static,
) {
self.request_async(ProxyRequest::GetWorkspaceSymbols { query }, f);
}
pub fn prepare_rename(
&self,
path: PathBuf,
position: Position,
f: impl ProxyCallback + 'static,
) {
self.request_async(ProxyRequest::PrepareRename { path, position }, f);
}
pub fn git_get_remote_file_url(
&self,
file: PathBuf,
f: impl ProxyCallback + 'static,
) {
self.request_async(ProxyRequest::GitGetRemoteFileUrl { file }, f);
}
pub fn rename(
&self,
path: PathBuf,
position: Position,
new_name: String,
f: impl ProxyCallback + 'static,
) {
self.request_async(
ProxyRequest::Rename {
path,
position,
new_name,
},
f,
);
}
pub fn get_inlay_hints(&self, path: PathBuf, f: impl ProxyCallback + 'static) {
self.request_async(ProxyRequest::GetInlayHints { path }, f);
}
pub fn get_inline_completions(
&self,
path: PathBuf,
position: Position,
trigger_kind: InlineCompletionTriggerKind,
f: impl ProxyCallback + 'static,
) {
self.request_async(
ProxyRequest::GetInlineCompletions {
path,
position,
trigger_kind,
},
f,
);
}
pub fn update(&self, path: PathBuf, delta: RopeDelta, rev: u64) {
self.notification(ProxyNotification::Update { path, delta, rev });
}
pub fn update_plugin_configs(
&self,
configs: HashMap<String, HashMap<String, serde_json::Value>>,
) {
self.notification(ProxyNotification::UpdatePluginConfigs { configs });
}
pub fn git_discard_files_changes(&self, files: Vec<PathBuf>) {
self.notification(ProxyNotification::GitDiscardFilesChanges { files });
}
pub fn git_discard_workspace_changes(&self) {
self.notification(ProxyNotification::GitDiscardWorkspaceChanges {});
}
pub fn get_selection_range(
&self,
path: PathBuf,
positions: Vec<Position>,
f: impl ProxyCallback + 'static,
) {
self.request_async(ProxyRequest::GetSelectionRange { path, positions }, f);
}
pub fn dap_start(
&self,
config: RunDebugConfig,
breakpoints: HashMap<PathBuf, Vec<SourceBreakpoint>>,
) {
self.notification(ProxyNotification::DapStart {
config,
breakpoints,
})
}
pub fn dap_process_id(
&self,
dap_id: DapId,
process_id: Option<u32>,
term_id: TermId,
) {
self.notification(ProxyNotification::DapProcessId {
dap_id,
process_id,
term_id,
})
}
pub fn dap_restart(
&self,
dap_id: DapId,
breakpoints: HashMap<PathBuf, Vec<SourceBreakpoint>>,
) {
self.notification(ProxyNotification::DapRestart {
dap_id,
breakpoints,
})
}
pub fn dap_continue(&self, dap_id: DapId, thread_id: ThreadId) {
self.notification(ProxyNotification::DapContinue { dap_id, thread_id })
}
pub fn dap_step_over(&self, dap_id: DapId, thread_id: ThreadId) {
self.notification(ProxyNotification::DapStepOver { dap_id, thread_id })
}
pub fn dap_step_into(&self, dap_id: DapId, thread_id: ThreadId) {
self.notification(ProxyNotification::DapStepInto { dap_id, thread_id })
}
pub fn dap_step_out(&self, dap_id: DapId, thread_id: ThreadId) {
self.notification(ProxyNotification::DapStepOut { dap_id, thread_id })
}
pub fn dap_pause(&self, dap_id: DapId, thread_id: ThreadId) {
self.notification(ProxyNotification::DapPause { dap_id, thread_id })
}
pub fn dap_stop(&self, dap_id: DapId) {
self.notification(ProxyNotification::DapStop { dap_id })
}
pub fn dap_disconnect(&self, dap_id: DapId) {
self.notification(ProxyNotification::DapDisconnect { dap_id })
}
pub fn dap_set_breakpoints(
&self,
dap_id: DapId,
path: PathBuf,
breakpoints: Vec<SourceBreakpoint>,
) {
self.notification(ProxyNotification::DapSetBreakpoints {
dap_id,
path,
breakpoints,
})
}
pub fn dap_variable(
&self,
dap_id: DapId,
reference: usize,
f: impl ProxyCallback + 'static,
) {
self.request_async(ProxyRequest::DapVariable { dap_id, reference }, f);
}
pub fn dap_get_scopes(
&self,
dap_id: DapId,
frame_id: usize,
f: impl ProxyCallback + 'static,
) {
self.request_async(ProxyRequest::DapGetScopes { dap_id, frame_id }, f);
}
}
impl Default for ProxyRpcHandler {
fn default() -> Self {
Self::new()
}
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-rpc/src/plugin.rs | lapce-rpc/src/plugin.rs | use core::fmt;
use std::{collections::HashMap, path::PathBuf};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use crate::counter::Counter;
#[derive(Eq, PartialEq, Hash, Clone, Copy, Debug, Serialize, Deserialize)]
pub struct PluginId(pub u64);
impl PluginId {
pub fn next() -> Self {
static PLUGIN_ID_COUNTER: Counter = Counter::new();
Self(PLUGIN_ID_COUNTER.next())
}
}
#[derive(Deserialize, Clone, Debug, Serialize)]
pub struct PluginConfiguration {
#[serde(rename(deserialize = "type"))]
pub kind: String,
pub default: Value,
pub description: String,
}
#[derive(Deserialize, Clone, Debug, Serialize, PartialEq, Eq)]
pub struct VoltInfo {
pub name: String,
pub version: String,
pub display_name: String,
pub author: String,
pub description: String,
pub repository: Option<String>,
pub wasm: bool,
pub updated_at_ts: i64,
}
impl VoltInfo {
pub fn id(&self) -> VoltID {
VoltID::from(self)
}
}
#[derive(Deserialize, Clone, Debug, Serialize, PartialEq, Eq)]
#[serde(rename_all = "kebab-case")]
pub struct VoltActivation {
pub language: Option<Vec<String>>,
pub workspace_contains: Option<Vec<String>>,
}
#[derive(Deserialize, Clone, Debug, Serialize, PartialEq, Eq)]
pub struct VoltConfig {
pub default: Value,
pub description: String,
}
#[derive(Deserialize, Clone, Debug, Serialize, PartialEq, Eq)]
#[serde(rename_all = "kebab-case")]
pub struct VoltMetadata {
pub name: String,
pub version: String,
pub display_name: String,
pub author: String,
pub description: String,
pub icon: Option<String>,
pub repository: Option<String>,
pub wasm: Option<String>,
pub color_themes: Option<Vec<String>>,
pub icon_themes: Option<Vec<String>>,
pub dir: Option<PathBuf>,
pub activation: Option<VoltActivation>,
pub config: Option<HashMap<String, VoltConfig>>,
}
impl VoltMetadata {
pub fn id(&self) -> VoltID {
VoltID::from(self)
}
pub fn info(&self) -> VoltInfo {
VoltInfo {
name: self.name.clone(),
version: self.version.clone(),
display_name: self.display_name.clone(),
author: self.author.clone(),
description: self.description.clone(),
repository: self.repository.clone(),
wasm: self.wasm.is_some(),
updated_at_ts: 0,
}
}
}
#[derive(Clone, Debug, Hash, PartialEq, Eq, Serialize, Deserialize)]
pub struct VoltID {
pub author: String,
pub name: String,
}
impl fmt::Display for VoltID {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}.{}", self.author, self.name)
}
}
impl From<VoltMetadata> for VoltID {
fn from(volt: VoltMetadata) -> Self {
Self {
author: volt.author,
name: volt.name,
}
}
}
impl From<&VoltMetadata> for VoltID {
fn from(volt: &VoltMetadata) -> Self {
Self {
author: volt.author.clone(),
name: volt.name.clone(),
}
}
}
impl From<VoltInfo> for VoltID {
fn from(volt: VoltInfo) -> Self {
Self {
author: volt.author,
name: volt.name,
}
}
}
impl From<&VoltInfo> for VoltID {
fn from(volt: &VoltInfo) -> Self {
Self {
author: volt.author.clone(),
name: volt.name.clone(),
}
}
}
#[cfg(test)]
mod tests {
use super::{VoltID, VoltInfo, VoltMetadata};
#[test]
fn test_volt_metadata_id() {
let volt_metadata = VoltMetadata {
name: "plugin".to_string(),
version: "0.1".to_string(),
display_name: "Plugin".to_string(),
author: "Author".to_string(),
description: "Useful plugin".to_string(),
icon: None,
repository: None,
wasm: None,
color_themes: None,
icon_themes: None,
dir: std::env::current_dir().unwrap().canonicalize().ok(),
activation: None,
config: None,
};
let volt_id = VoltID {
author: "Author".to_string(),
name: "plugin".to_string(),
};
assert_eq!(volt_metadata.id(), volt_id);
assert_eq!(
<VoltID as From<&VoltMetadata>>::from(&volt_metadata),
volt_id
);
assert_eq!(
<VoltID as From<VoltMetadata>>::from(volt_metadata.clone()),
volt_id
);
assert_eq!(
<&VoltMetadata as Into<VoltID>>::into(&volt_metadata),
volt_id
);
assert_eq!(<VoltMetadata as Into<VoltID>>::into(volt_metadata), volt_id);
}
#[test]
fn test_volt_metadata_info() {
let volt_metadata = VoltMetadata {
name: "plugin".to_string(),
version: "0.1".to_string(),
display_name: "Plugin".to_string(),
author: "Author".to_string(),
description: "Useful plugin".to_string(),
icon: None,
repository: None,
wasm: None,
color_themes: None,
icon_themes: None,
dir: std::env::current_dir().unwrap().canonicalize().ok(),
activation: None,
config: None,
};
let volt_info = VoltInfo {
name: "plugin".to_string(),
version: "0.1".to_string(),
display_name: "Plugin".to_string(),
author: "Author".to_string(),
description: "Useful plugin".to_string(),
repository: None,
wasm: false,
updated_at_ts: 0,
};
assert_eq!(volt_metadata.info(), volt_info);
}
#[test]
fn test_volt_info_id() {
let volt_info = VoltInfo {
name: "plugin".to_string(),
version: "0.1".to_string(),
display_name: "Plugin".to_string(),
author: "Author".to_string(),
description: "Useful plugin".to_string(),
repository: None,
wasm: false,
updated_at_ts: 0,
};
let volt_id = VoltID {
author: "Author".to_string(),
name: "plugin".to_string(),
};
assert_eq!(volt_info.id(), volt_id);
assert_eq!(<VoltID as From<&VoltInfo>>::from(&volt_info), volt_id);
assert_eq!(<VoltID as From<VoltInfo>>::from(volt_info.clone()), volt_id);
assert_eq!(<&VoltInfo as Into<VoltID>>::into(&volt_info), volt_id);
assert_eq!(<VoltInfo as Into<VoltID>>::into(volt_info), volt_id);
}
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-proxy/src/lib.rs | lapce-proxy/src/lib.rs | #![allow(clippy::manual_clamp)]
pub mod buffer;
pub mod cli;
pub mod dispatch;
pub mod plugin;
pub mod terminal;
pub mod watcher;
use std::{
io::{BufReader, stdin, stdout},
process::exit,
sync::Arc,
thread,
};
use anyhow::{Result, anyhow};
use clap::Parser;
use dispatch::Dispatcher;
use lapce_core::{directory::Directory, meta};
use lapce_rpc::{
RpcMessage,
core::{CoreRpc, CoreRpcHandler},
file::PathObject,
proxy::{ProxyMessage, ProxyNotification, ProxyRpcHandler},
stdio::stdio_transport,
};
use tracing::error;
#[derive(Parser)]
#[clap(name = "Lapce-proxy")]
#[clap(version = meta::VERSION)]
struct Cli {
#[clap(short, long, action, hide = true)]
proxy: bool,
/// Paths to file(s) and/or folder(s) to open.
/// When path is a file (that exists or not),
/// it accepts `path:line:column` syntax
/// to specify line and column at which it should open the file
#[clap(value_parser = cli::parse_file_line_column)]
#[clap(value_hint = clap::ValueHint::AnyPath)]
paths: Vec<PathObject>,
}
pub fn mainloop() {
let cli = Cli::parse();
if !cli.proxy {
if let Err(e) = cli::try_open_in_existing_process(&cli.paths) {
error!("failed to open path(s): {e}");
};
exit(1);
}
let core_rpc = CoreRpcHandler::new();
let proxy_rpc = ProxyRpcHandler::new();
let mut dispatcher = Dispatcher::new(core_rpc.clone(), proxy_rpc.clone());
let (writer_tx, writer_rx) = crossbeam_channel::unbounded();
let (reader_tx, reader_rx) = crossbeam_channel::unbounded();
stdio_transport(stdout(), writer_rx, BufReader::new(stdin()), reader_tx);
let local_core_rpc = core_rpc.clone();
let local_writer_tx = writer_tx.clone();
thread::spawn(move || {
for msg in local_core_rpc.rx() {
match msg {
CoreRpc::Request(id, rpc) => {
if let Err(err) =
local_writer_tx.send(RpcMessage::Request(id, rpc))
{
tracing::error!("{:?}", err);
}
}
CoreRpc::Notification(rpc) => {
if let Err(err) =
local_writer_tx.send(RpcMessage::Notification(rpc))
{
tracing::error!("{:?}", err);
}
}
CoreRpc::Shutdown => {
return;
}
}
}
});
let local_proxy_rpc = proxy_rpc.clone();
let writer_tx = Arc::new(writer_tx);
thread::spawn(move || {
for msg in reader_rx {
match msg {
RpcMessage::Request(id, req) => {
let writer_tx = writer_tx.clone();
local_proxy_rpc.request_async(req, move |result| match result {
Ok(resp) => {
if let Err(err) =
writer_tx.send(RpcMessage::Response(id, resp))
{
tracing::error!("{:?}", err);
}
}
Err(e) => {
if let Err(err) =
writer_tx.send(RpcMessage::Error(id, e))
{
tracing::error!("{:?}", err);
}
}
});
}
RpcMessage::Notification(n) => {
local_proxy_rpc.notification(n);
}
RpcMessage::Response(id, resp) => {
core_rpc.handle_response(id, Ok(resp));
}
RpcMessage::Error(id, err) => {
core_rpc.handle_response(id, Err(err));
}
}
}
local_proxy_rpc.shutdown();
});
let local_proxy_rpc = proxy_rpc.clone();
std::thread::spawn(move || {
if let Err(err) = listen_local_socket(local_proxy_rpc) {
tracing::error!("{:?}", err);
}
});
if let Err(err) = register_lapce_path() {
tracing::error!("{:?}", err);
}
proxy_rpc.mainloop(&mut dispatcher);
}
pub fn register_lapce_path() -> Result<()> {
let exedir = std::env::current_exe()?
.parent()
.ok_or(anyhow!("can't get parent dir of exe"))?
.canonicalize()?;
let current_path = std::env::var("PATH")?;
let paths = std::env::split_paths(¤t_path);
for path in paths {
if exedir == path.canonicalize()? {
return Ok(());
}
}
let paths = std::env::split_paths(¤t_path);
let paths = std::env::join_paths(std::iter::once(exedir).chain(paths))?;
unsafe {
std::env::set_var("PATH", paths);
}
Ok(())
}
fn listen_local_socket(proxy_rpc: ProxyRpcHandler) -> Result<()> {
let local_socket = Directory::local_socket()
.ok_or_else(|| anyhow!("can't get local socket folder"))?;
if let Err(err) = std::fs::remove_file(&local_socket) {
tracing::error!("{:?}", err);
}
let socket =
interprocess::local_socket::LocalSocketListener::bind(local_socket)?;
for stream in socket.incoming().flatten() {
let mut reader = BufReader::new(stream);
let proxy_rpc = proxy_rpc.clone();
thread::spawn(move || -> Result<()> {
loop {
let msg: Option<ProxyMessage> =
lapce_rpc::stdio::read_msg(&mut reader)?;
if let Some(RpcMessage::Notification(
ProxyNotification::OpenPaths { paths },
)) = msg
{
proxy_rpc.notification(ProxyNotification::OpenPaths { paths });
}
}
});
}
Ok(())
}
pub fn get_url<T: reqwest::IntoUrl + Clone>(
url: T,
user_agent: Option<&str>,
) -> Result<reqwest::blocking::Response> {
let mut builder = if let Ok(proxy) = std::env::var("https_proxy") {
let proxy = reqwest::Proxy::all(proxy)?;
reqwest::blocking::Client::builder()
.proxy(proxy)
.timeout(std::time::Duration::from_secs(10))
} else {
reqwest::blocking::Client::builder()
.timeout(std::time::Duration::from_secs(10))
};
if let Some(user_agent) = user_agent {
builder = builder.user_agent(user_agent);
}
let client = builder.build()?;
let mut try_time = 0;
loop {
let rs = client.get(url.clone()).send();
if rs.is_ok() || try_time > 3 {
return Ok(rs?);
} else {
try_time += 1;
}
}
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-proxy/src/cli.rs | lapce-proxy/src/cli.rs | use std::path::PathBuf;
use anyhow::{Error, Result, anyhow};
use lapce_core::directory::Directory;
use lapce_rpc::{
RpcMessage,
file::{LineCol, PathObject},
proxy::{ProxyMessage, ProxyNotification},
};
#[derive(Default, Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub enum PathObjectType {
#[default]
Directory,
File,
}
pub fn parse_file_line_column(path: &str) -> Result<PathObject, Error> {
if let Ok(path) = PathBuf::from(path).canonicalize() {
return Ok(PathObject {
is_dir: path.is_dir(),
path,
linecol: None,
});
}
let pwd = std::env::current_dir().unwrap_or_default();
let mut splits = path.rsplit(':').peekable();
let (path, linecol) = if let Some(first_rhs) =
splits.peek().and_then(|s| s.parse::<usize>().ok())
{
splits.next();
if let Some(second_rhs) = splits.peek().and_then(|s| s.parse::<usize>().ok())
{
splits.next();
let remaning: Vec<&str> = splits.rev().collect();
let path = remaning.join(":");
let path = PathBuf::from(path);
let path = if let Ok(path) = path.canonicalize() {
path
} else {
pwd.join(&path)
};
(
path,
Some(LineCol {
line: second_rhs,
column: first_rhs,
}),
)
} else {
let remaning: Vec<&str> = splits.rev().collect();
let path = remaning.join(":");
let path = PathBuf::from(path);
let path = if let Ok(path) = path.canonicalize() {
path
} else {
pwd.join(&path)
};
(
path,
Some(LineCol {
line: first_rhs,
column: 1,
}),
)
}
} else {
(pwd.join(path), None)
};
Ok(PathObject {
is_dir: path.is_dir(),
path,
linecol,
})
}
pub fn try_open_in_existing_process(paths: &[PathObject]) -> Result<()> {
let local_socket = Directory::local_socket()
.ok_or_else(|| anyhow!("can't get local socket folder"))?;
let mut socket =
interprocess::local_socket::LocalSocketStream::connect(local_socket)?;
let msg: ProxyMessage = RpcMessage::Notification(ProxyNotification::OpenPaths {
paths: paths.to_vec(),
});
lapce_rpc::stdio::write_msg(&mut socket, msg)?;
Ok(())
}
#[cfg(test)]
mod tests {
use std::{env, path::PathBuf};
use super::parse_file_line_column;
use crate::cli::PathObject;
#[test]
#[cfg(windows)]
fn test_absolute_path() {
assert_eq!(
parse_file_line_column("C:\\Cargo.toml:55").unwrap(),
PathObject::new(PathBuf::from("C:\\Cargo.toml"), false, 55, 1),
);
}
#[test]
#[cfg(windows)]
fn test_relative_path() {
assert_eq!(
parse_file_line_column(".\\..\\Cargo.toml:55").unwrap(),
PathObject::new(
PathBuf::from(".\\..\\Cargo.toml").canonicalize().unwrap(),
false,
55,
1
),
);
}
#[test]
#[cfg(windows)]
fn test_directory_looking_like_file() {
assert_eq!(
parse_file_line_column(".\\Cargo.toml\\").unwrap(),
PathObject::from_path(
env::current_dir().unwrap().join("Cargo.toml"),
false
),
);
}
#[test]
#[cfg(unix)]
fn test_absolute_path() {
assert_eq!(
parse_file_line_column("/tmp/Cargo.toml:55").unwrap(),
PathObject::new(PathBuf::from("/tmp/Cargo.toml"), false, 55, 1),
);
}
#[test]
#[cfg(unix)]
fn test_relative_path() {
assert_eq!(
parse_file_line_column("./../Cargo.toml").unwrap(),
PathObject::from_path(
PathBuf::from("./../Cargo.toml").canonicalize().unwrap(),
false,
),
);
}
#[test]
#[cfg(unix)]
fn test_directory_looking_like_file() {
assert_eq!(
parse_file_line_column("./Cargo.toml/").unwrap(),
PathObject::from_path(
env::current_dir().unwrap().join("Cargo.toml"),
false
),
);
}
#[test]
fn test_current_dir() {
assert_eq!(
parse_file_line_column(".").unwrap(),
PathObject::from_path(
env::current_dir().unwrap().canonicalize().unwrap(),
true
),
);
}
#[test]
fn test_relative_path_with_line() {
assert_eq!(
parse_file_line_column("Cargo.toml:55").unwrap(),
PathObject::new(
PathBuf::from("Cargo.toml").canonicalize().unwrap(),
false,
55,
1
),
);
}
#[test]
fn test_relative_path_with_linecol() {
assert_eq!(
parse_file_line_column("Cargo.toml:55:3").unwrap(),
PathObject::new(
PathBuf::from("Cargo.toml").canonicalize().unwrap(),
false,
55,
3
),
);
}
#[test]
fn test_relative_path_with_none() {
assert_eq!(
parse_file_line_column("Cargo.toml:12:623:352").unwrap(),
PathObject::new(
env::current_dir()
.unwrap()
.join(PathBuf::from("Cargo.toml:12")),
false,
623,
352
),
);
}
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-proxy/src/dispatch.rs | lapce-proxy/src/dispatch.rs | use std::{
collections::{HashMap, HashSet},
fs, io,
path::{Path, PathBuf},
sync::{
Arc,
atomic::{AtomicU64, Ordering},
},
thread,
time::Duration,
};
use alacritty_terminal::{event::WindowSize, event_loop::Msg};
use anyhow::{Context, Result, anyhow};
use crossbeam_channel::Sender;
use git2::{
DiffOptions, ErrorCode::NotFound, Oid, Repository, build::CheckoutBuilder,
};
use grep_matcher::Matcher;
use grep_regex::RegexMatcherBuilder;
use grep_searcher::{SearcherBuilder, sinks::UTF8};
use indexmap::IndexMap;
use lapce_rpc::{
RequestId, RpcError,
buffer::BufferId,
core::{CoreNotification, CoreRpcHandler, FileChanged},
file::FileNodeItem,
file_line::FileLine,
proxy::{
ProxyHandler, ProxyNotification, ProxyRequest, ProxyResponse,
ProxyRpcHandler, SearchMatch,
},
source_control::{DiffInfo, FileDiff},
style::{LineStyle, SemanticStyles},
terminal::TermId,
};
use lapce_xi_rope::Rope;
use lsp_types::{
CancelParams, MessageType, NumberOrString, Position, Range, ShowMessageParams,
TextDocumentItem, Url,
notification::{Cancel, Notification},
};
use parking_lot::Mutex;
use crate::{
buffer::{Buffer, get_mod_time, load_file},
plugin::{PluginCatalogRpcHandler, catalog::PluginCatalog},
terminal::{Terminal, TerminalSender},
watcher::{FileWatcher, Notify, WatchToken},
};
const OPEN_FILE_EVENT_TOKEN: WatchToken = WatchToken(1);
const WORKSPACE_EVENT_TOKEN: WatchToken = WatchToken(2);
pub struct Dispatcher {
workspace: Option<PathBuf>,
pub proxy_rpc: ProxyRpcHandler,
core_rpc: CoreRpcHandler,
catalog_rpc: PluginCatalogRpcHandler,
buffers: HashMap<PathBuf, Buffer>,
terminals: HashMap<TermId, TerminalSender>,
file_watcher: FileWatcher,
window_id: usize,
tab_id: usize,
}
impl ProxyHandler for Dispatcher {
fn handle_notification(&mut self, rpc: ProxyNotification) {
use ProxyNotification::*;
match rpc {
Initialize {
workspace,
disabled_volts,
extra_plugin_paths,
plugin_configurations,
window_id,
tab_id,
} => {
self.window_id = window_id;
self.tab_id = tab_id;
self.workspace = workspace;
self.file_watcher.notify(FileWatchNotifier::new(
self.workspace.clone(),
self.core_rpc.clone(),
self.proxy_rpc.clone(),
));
if let Some(workspace) = self.workspace.as_ref() {
self.file_watcher
.watch(workspace, true, WORKSPACE_EVENT_TOKEN);
}
let plugin_rpc = self.catalog_rpc.clone();
let workspace = self.workspace.clone();
thread::spawn(move || {
let mut plugin = PluginCatalog::new(
workspace,
disabled_volts,
extra_plugin_paths,
plugin_configurations,
plugin_rpc.clone(),
);
plugin_rpc.mainloop(&mut plugin);
});
self.core_rpc.notification(CoreNotification::ProxyStatus {
status: lapce_rpc::proxy::ProxyStatus::Connected,
});
// send home directory for initinal filepicker dir
let dirs = directories::UserDirs::new();
if let Some(dirs) = dirs {
self.core_rpc.home_dir(dirs.home_dir().into());
}
}
OpenPaths { paths } => {
self.core_rpc
.notification(CoreNotification::OpenPaths { paths });
}
OpenFileChanged { path } => {
if path.exists() {
if let Some(buffer) = self.buffers.get(&path) {
if get_mod_time(&buffer.path) == buffer.mod_time {
return;
}
match load_file(&buffer.path) {
Ok(content) => {
self.core_rpc.open_file_changed(
path,
FileChanged::Change(content),
);
}
Err(err) => {
tracing::event!(
tracing::Level::ERROR,
"Failed to re-read file after change notification: {err}"
);
}
}
}
} else {
self.buffers.remove(&path);
self.core_rpc.open_file_changed(path, FileChanged::Delete);
}
}
Completion {
request_id,
path,
input,
position,
} => {
self.catalog_rpc
.completion(request_id, &path, input, position);
}
SignatureHelp {
request_id,
path,
position,
} => {
self.catalog_rpc.signature_help(request_id, &path, position);
}
Shutdown {} => {
self.catalog_rpc.shutdown();
for (_, sender) in self.terminals.iter() {
sender.send(Msg::Shutdown);
}
self.proxy_rpc.shutdown();
}
Update { path, delta, rev } => {
let buffer = self.buffers.get_mut(&path).unwrap();
let old_text = buffer.rope.clone();
buffer.update(&delta, rev);
self.catalog_rpc.did_change_text_document(
&path,
rev,
delta,
old_text,
buffer.rope.clone(),
);
}
UpdatePluginConfigs { configs } => {
if let Err(err) = self.catalog_rpc.update_plugin_configs(configs) {
tracing::error!("{:?}", err);
}
}
NewTerminal { term_id, profile } => {
let mut terminal = match Terminal::new(term_id, profile, 50, 10) {
Ok(terminal) => terminal,
Err(e) => {
self.core_rpc.terminal_launch_failed(term_id, e.to_string());
return;
}
};
#[allow(unused)]
let mut child_id = None;
#[cfg(target_os = "windows")]
{
child_id = terminal.pty.child_watcher().pid().map(|x| x.get());
}
#[cfg(not(target_os = "windows"))]
{
child_id = Some(terminal.pty.child().id());
}
self.core_rpc.terminal_process_id(term_id, child_id);
let tx = terminal.tx.clone();
let poller = terminal.poller.clone();
let sender = TerminalSender::new(tx, poller);
self.terminals.insert(term_id, sender);
let rpc = self.core_rpc.clone();
thread::spawn(move || {
terminal.run(rpc);
});
}
TerminalWrite { term_id, content } => {
if let Some(tx) = self.terminals.get(&term_id) {
tx.send(Msg::Input(content.into_bytes().into()));
}
}
TerminalResize {
term_id,
width,
height,
} => {
if let Some(tx) = self.terminals.get(&term_id) {
let size = WindowSize {
num_lines: height as u16,
num_cols: width as u16,
cell_width: 1,
cell_height: 1,
};
tx.send(Msg::Resize(size));
}
}
TerminalClose { term_id } => {
if let Some(tx) = self.terminals.remove(&term_id) {
tx.send(Msg::Shutdown);
}
}
DapStart {
config,
breakpoints,
} => {
if let Err(err) = self.catalog_rpc.dap_start(config, breakpoints) {
tracing::error!("{:?}", err);
}
}
DapProcessId {
dap_id,
process_id,
term_id,
} => {
if let Err(err) =
self.catalog_rpc.dap_process_id(dap_id, process_id, term_id)
{
tracing::error!("{:?}", err);
}
}
DapContinue { dap_id, thread_id } => {
if let Err(err) = self.catalog_rpc.dap_continue(dap_id, thread_id) {
tracing::error!("{:?}", err);
}
}
DapPause { dap_id, thread_id } => {
if let Err(err) = self.catalog_rpc.dap_pause(dap_id, thread_id) {
tracing::error!("{:?}", err);
}
}
DapStepOver { dap_id, thread_id } => {
if let Err(err) = self.catalog_rpc.dap_step_over(dap_id, thread_id) {
tracing::error!("{:?}", err);
}
}
DapStepInto { dap_id, thread_id } => {
if let Err(err) = self.catalog_rpc.dap_step_into(dap_id, thread_id) {
tracing::error!("{:?}", err);
}
}
DapStepOut { dap_id, thread_id } => {
if let Err(err) = self.catalog_rpc.dap_step_out(dap_id, thread_id) {
tracing::error!("{:?}", err);
}
}
DapStop { dap_id } => {
if let Err(err) = self.catalog_rpc.dap_stop(dap_id) {
tracing::error!("{:?}", err);
}
}
DapDisconnect { dap_id } => {
if let Err(err) = self.catalog_rpc.dap_disconnect(dap_id) {
tracing::error!("{:?}", err);
}
}
DapRestart {
dap_id,
breakpoints,
} => {
if let Err(err) = self.catalog_rpc.dap_restart(dap_id, breakpoints) {
tracing::error!("{:?}", err);
}
}
DapSetBreakpoints {
dap_id,
path,
breakpoints,
} => {
if let Err(err) =
self.catalog_rpc
.dap_set_breakpoints(dap_id, path, breakpoints)
{
tracing::error!("{:?}", err);
}
}
InstallVolt { volt } => {
let catalog_rpc = self.catalog_rpc.clone();
if let Err(err) = catalog_rpc.install_volt(volt) {
tracing::error!("{:?}", err);
}
}
ReloadVolt { volt } => {
if let Err(err) = self.catalog_rpc.reload_volt(volt) {
tracing::error!("{:?}", err);
}
}
RemoveVolt { volt } => {
self.catalog_rpc.remove_volt(volt);
}
DisableVolt { volt } => {
self.catalog_rpc.stop_volt(volt);
}
EnableVolt { volt } => {
if let Err(err) = self.catalog_rpc.enable_volt(volt) {
tracing::error!("{:?}", err);
}
}
GitCommit { message, diffs } => {
if let Some(workspace) = self.workspace.as_ref() {
match git_commit(workspace, &message, diffs) {
Ok(()) => (),
Err(e) => {
self.core_rpc.show_message(
"Git Commit failure".to_owned(),
ShowMessageParams {
typ: MessageType::ERROR,
message: e.to_string(),
},
);
}
}
}
}
GitCheckout { reference } => {
if let Some(workspace) = self.workspace.as_ref() {
match git_checkout(workspace, &reference) {
Ok(()) => (),
Err(e) => eprintln!("{e:?}"),
}
}
}
GitDiscardFilesChanges { files } => {
if let Some(workspace) = self.workspace.as_ref() {
match git_discard_files_changes(
workspace,
files.iter().map(AsRef::as_ref),
) {
Ok(()) => (),
Err(e) => eprintln!("{e:?}"),
}
}
}
GitDiscardWorkspaceChanges {} => {
if let Some(workspace) = self.workspace.as_ref() {
match git_discard_workspace_changes(workspace) {
Ok(()) => (),
Err(e) => eprintln!("{e:?}"),
}
}
}
GitInit {} => {
if let Some(workspace) = self.workspace.as_ref() {
match git_init(workspace) {
Ok(()) => (),
Err(e) => eprintln!("{e:?}"),
}
}
}
LspCancel { id } => {
self.catalog_rpc.send_notification(
None,
Cancel::METHOD,
CancelParams {
id: NumberOrString::Number(id),
},
None,
None,
false,
);
}
}
}
fn handle_request(&mut self, id: RequestId, rpc: ProxyRequest) {
use ProxyRequest::*;
match rpc {
NewBuffer { buffer_id, path } => {
let buffer = Buffer::new(buffer_id, path.clone());
let content = buffer.rope.to_string();
let read_only = buffer.read_only;
self.catalog_rpc.did_open_document(
&path,
buffer.language_id.to_string(),
buffer.rev as i32,
content.clone(),
);
self.file_watcher.watch(&path, false, OPEN_FILE_EVENT_TOKEN);
self.buffers.insert(path, buffer);
self.respond_rpc(
id,
Ok(ProxyResponse::NewBufferResponse { content, read_only }),
);
}
BufferHead { path } => {
let result = if let Some(workspace) = self.workspace.as_ref() {
let result = file_get_head(workspace, &path);
if let Ok((_blob_id, content)) = result {
Ok(ProxyResponse::BufferHeadResponse {
version: "head".to_string(),
content,
})
} else {
Err(RpcError {
code: 0,
message: "can't get file head".to_string(),
})
}
} else {
Err(RpcError {
code: 0,
message: "no workspace set".to_string(),
})
};
self.respond_rpc(id, result);
}
GlobalSearch {
pattern,
case_sensitive,
whole_word,
is_regex,
} => {
static WORKER_ID: AtomicU64 = AtomicU64::new(0);
let our_id = WORKER_ID.fetch_add(1, Ordering::SeqCst) + 1;
let workspace = self.workspace.clone();
let buffers = self
.buffers
.iter()
.map(|p| p.0)
.cloned()
.collect::<Vec<PathBuf>>();
let proxy_rpc = self.proxy_rpc.clone();
// Perform the search on another thread to avoid blocking the proxy thread
thread::spawn(move || {
proxy_rpc.handle_response(
id,
search_in_path(
our_id,
&WORKER_ID,
workspace
.iter()
.flat_map(|w| ignore::Walk::new(w).flatten())
.chain(
buffers.iter().flat_map(|p| {
ignore::Walk::new(p).flatten()
}),
)
.map(|p| p.into_path()),
&pattern,
case_sensitive,
whole_word,
is_regex,
),
);
});
}
CompletionResolve {
plugin_id,
completion_item,
} => {
let proxy_rpc = self.proxy_rpc.clone();
self.catalog_rpc.completion_resolve(
plugin_id,
*completion_item,
move |result| {
let result = result.map(|item| {
ProxyResponse::CompletionResolveResponse {
item: Box::new(item),
}
});
proxy_rpc.handle_response(id, result);
},
);
}
GetHover {
request_id,
path,
position,
} => {
let proxy_rpc = self.proxy_rpc.clone();
self.catalog_rpc.hover(&path, position, move |_, result| {
let result = result.map(|hover| ProxyResponse::HoverResponse {
request_id,
hover,
});
proxy_rpc.handle_response(id, result);
});
}
GetSignature { .. } => {}
GetReferences { path, position } => {
let proxy_rpc = self.proxy_rpc.clone();
self.catalog_rpc.get_references(
&path,
position,
move |_, result| {
let result = result.map(|references| {
ProxyResponse::GetReferencesResponse { references }
});
proxy_rpc.handle_response(id, result);
},
);
}
GitGetRemoteFileUrl { file } => {
if let Some(workspace) = self.workspace.as_ref() {
match git_get_remote_file_url(workspace, &file) {
Ok(s) => self.proxy_rpc.handle_response(
id,
Ok(ProxyResponse::GitGetRemoteFileUrl { file_url: s }),
),
Err(e) => eprintln!("{e:?}"),
}
}
}
GetDefinition {
request_id,
path,
position,
} => {
let proxy_rpc = self.proxy_rpc.clone();
self.catalog_rpc.get_definition(
&path,
position,
move |_, result| {
let result = result.map(|definition| {
ProxyResponse::GetDefinitionResponse {
request_id,
definition,
}
});
proxy_rpc.handle_response(id, result);
},
);
}
GetTypeDefinition {
request_id,
path,
position,
} => {
let proxy_rpc = self.proxy_rpc.clone();
self.catalog_rpc.get_type_definition(
&path,
position,
move |_, result| {
let result = result.map(|definition| {
ProxyResponse::GetTypeDefinition {
request_id,
definition,
}
});
proxy_rpc.handle_response(id, result);
},
);
}
ShowCallHierarchy { path, position } => {
let proxy_rpc = self.proxy_rpc.clone();
self.catalog_rpc.show_call_hierarchy(
&path,
position,
move |_, result| {
let result = result.map(|items| {
ProxyResponse::ShowCallHierarchyResponse { items }
});
proxy_rpc.handle_response(id, result);
},
);
}
CallHierarchyIncoming {
path,
call_hierarchy_item,
} => {
let proxy_rpc = self.proxy_rpc.clone();
self.catalog_rpc.call_hierarchy_incoming(
&path,
call_hierarchy_item,
move |_, result| {
let result = result.map(|items| {
ProxyResponse::CallHierarchyIncomingResponse { items }
});
proxy_rpc.handle_response(id, result);
},
);
}
GetInlayHints { path } => {
let proxy_rpc = self.proxy_rpc.clone();
let buffer = self.buffers.get(&path).unwrap();
let range = Range {
start: Position::new(0, 0),
end: buffer.offset_to_position(buffer.len()),
};
self.catalog_rpc
.get_inlay_hints(&path, range, move |_, result| {
let result = result
.map(|hints| ProxyResponse::GetInlayHints { hints });
proxy_rpc.handle_response(id, result);
});
}
GetInlineCompletions {
path,
position,
trigger_kind,
} => {
let proxy_rpc = self.proxy_rpc.clone();
self.catalog_rpc.get_inline_completions(
&path,
position,
trigger_kind,
move |_, result| {
let result = result.map(|completions| {
ProxyResponse::GetInlineCompletions { completions }
});
proxy_rpc.handle_response(id, result);
},
);
}
GetSemanticTokens { path } => {
let buffer = self.buffers.get(&path).unwrap();
let text = buffer.rope.clone();
let rev = buffer.rev;
let len = buffer.len();
let local_path = path.clone();
let proxy_rpc = self.proxy_rpc.clone();
let catalog_rpc = self.catalog_rpc.clone();
let handle_tokens =
move |result: Result<Vec<LineStyle>, RpcError>| match result {
Ok(styles) => {
proxy_rpc.handle_response(
id,
Ok(ProxyResponse::GetSemanticTokens {
styles: SemanticStyles {
rev,
path: local_path,
styles,
len,
},
}),
);
}
Err(e) => {
proxy_rpc.handle_response(id, Err(e));
}
};
let proxy_rpc = self.proxy_rpc.clone();
self.catalog_rpc.get_semantic_tokens(
&path,
move |plugin_id, result| match result {
Ok(result) => {
catalog_rpc.format_semantic_tokens(
plugin_id,
result,
text,
Box::new(handle_tokens),
);
}
Err(e) => {
proxy_rpc.handle_response(id, Err(e));
}
},
);
}
GetCodeActions {
path,
position,
diagnostics,
} => {
let proxy_rpc = self.proxy_rpc.clone();
self.catalog_rpc.get_code_actions(
&path,
position,
diagnostics,
move |plugin_id, result| {
let result = result.map(|resp| {
ProxyResponse::GetCodeActionsResponse { plugin_id, resp }
});
proxy_rpc.handle_response(id, result);
},
);
}
GetDocumentSymbols { path } => {
let proxy_rpc = self.proxy_rpc.clone();
self.catalog_rpc
.get_document_symbols(&path, move |_, result| {
let result = result
.map(|resp| ProxyResponse::GetDocumentSymbols { resp });
proxy_rpc.handle_response(id, result);
});
}
GetWorkspaceSymbols { query } => {
let proxy_rpc = self.proxy_rpc.clone();
self.catalog_rpc
.get_workspace_symbols(query, move |_, result| {
let result = result.map(|symbols| {
ProxyResponse::GetWorkspaceSymbols { symbols }
});
proxy_rpc.handle_response(id, result);
});
}
GetDocumentFormatting { path } => {
let proxy_rpc = self.proxy_rpc.clone();
self.catalog_rpc
.get_document_formatting(&path, move |_, result| {
let result = result.map(|edits| {
ProxyResponse::GetDocumentFormatting { edits }
});
proxy_rpc.handle_response(id, result);
});
}
PrepareRename { path, position } => {
let proxy_rpc = self.proxy_rpc.clone();
self.catalog_rpc.prepare_rename(
&path,
position,
move |_, result| {
let result =
result.map(|resp| ProxyResponse::PrepareRename { resp });
proxy_rpc.handle_response(id, result);
},
);
}
Rename {
path,
position,
new_name,
} => {
let proxy_rpc = self.proxy_rpc.clone();
self.catalog_rpc.rename(
&path,
position,
new_name,
move |_, result| {
let result =
result.map(|edit| ProxyResponse::Rename { edit });
proxy_rpc.handle_response(id, result);
},
);
}
GetFiles { .. } => {
let workspace = self.workspace.clone();
let proxy_rpc = self.proxy_rpc.clone();
thread::spawn(move || {
let result = if let Some(workspace) = workspace {
let git_folder =
ignore::overrides::OverrideBuilder::new(&workspace)
.add("!.git/")
.map(|git_folder| git_folder.build());
let walker = match git_folder {
Ok(Ok(git_folder)) => {
ignore::WalkBuilder::new(&workspace)
.hidden(false)
.parents(false)
.require_git(false)
.overrides(git_folder)
.build()
}
_ => ignore::WalkBuilder::new(&workspace)
.parents(false)
.require_git(false)
.build(),
};
let mut items = Vec::new();
for path in walker.flatten() {
if let Some(file_type) = path.file_type() {
if file_type.is_file() {
items.push(path.into_path());
}
}
}
Ok(ProxyResponse::GetFilesResponse { items })
} else {
Ok(ProxyResponse::GetFilesResponse { items: Vec::new() })
};
proxy_rpc.handle_response(id, result);
});
}
GetOpenFilesContent {} => {
let items = self
.buffers
.iter()
.map(|(path, buffer)| TextDocumentItem {
uri: Url::from_file_path(path).unwrap(),
language_id: buffer.language_id.to_string(),
version: buffer.rev as i32,
text: buffer.get_document(),
})
.collect();
let resp = ProxyResponse::GetOpenFilesContentResponse { items };
self.proxy_rpc.handle_response(id, Ok(resp));
}
ReadDir { path } => {
let proxy_rpc = self.proxy_rpc.clone();
thread::spawn(move || {
let result = fs::read_dir(path)
.map(|entries| {
let mut items = entries
.into_iter()
.filter_map(|entry| {
entry
.map(|e| FileNodeItem {
path: e.path(),
is_dir: e.path().is_dir(),
open: false,
read: false,
children: HashMap::new(),
children_open_count: 0,
})
.ok()
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | true |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-proxy/src/watcher.rs | lapce-proxy/src/watcher.rs | use std::{
collections::VecDeque,
path::{Path, PathBuf},
sync::Arc,
};
use crossbeam_channel::{Receiver, unbounded};
use notify::{
Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher,
event::{ModifyKind, RenameMode},
recommended_watcher,
};
use parking_lot::Mutex;
/// Wrapper around a `notify::Watcher`. It runs the inner watcher
/// in a separate thread, and communicates with it via a [crossbeam channel].
/// [crossbeam channel]: https://docs.rs/crossbeam-channel
pub struct FileWatcher {
rx_event: Option<Receiver<Result<Event, notify::Error>>>,
inner: RecommendedWatcher,
state: Arc<Mutex<WatcherState>>,
}
#[derive(Debug, Default)]
struct WatcherState {
events: EventQueue,
watchees: Vec<Watchee>,
}
/// Tracks a registered 'that-which-is-watched'.
#[doc(hidden)]
struct Watchee {
path: PathBuf,
recursive: bool,
token: WatchToken,
filter: Option<Box<PathFilter>>,
}
/// Token provided to `FileWatcher`, to associate events with
/// interested parties.
///
/// Note: `WatchToken`s are assumed to correspond with an
/// 'area of interest'; that is, they are used to route delivery
/// of events.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct WatchToken(pub usize);
/// A trait for types which can be notified of new events.
/// New events are accessible through the `FileWatcher` instance.
pub trait Notify: Send {
fn notify(&self, events: Vec<(WatchToken, Event)>);
}
pub type EventQueue = VecDeque<(WatchToken, Event)>;
pub type PathFilter = dyn Fn(&Path) -> bool + Send + 'static;
impl FileWatcher {
pub fn new() -> Self {
let (tx_event, rx_event) = unbounded();
let state = Arc::new(Mutex::new(WatcherState::default()));
let inner = recommended_watcher(tx_event).expect("watcher should spawn");
FileWatcher {
rx_event: Some(rx_event),
inner,
state,
}
}
pub fn notify<T: Notify + 'static>(&mut self, peer: T) {
let rx_event = self.rx_event.take().unwrap();
let state = self.state.clone();
std::thread::spawn(move || {
while let Ok(Ok(event)) = rx_event.recv() {
let mut events = Vec::new();
{
let mut state = state.lock();
let WatcherState {
ref mut watchees, ..
} = *state;
watchees
.iter()
.filter(|w| w.wants_event(&event))
.map(|w| w.token)
.for_each(|t| events.push((t, event.clone())));
}
peer.notify(events);
}
});
}
/// Begin watching `path`. As `Event`s (documented in the
/// [notify](https://docs.rs/notify) crate) arrive, they are stored
/// with the associated `token` and a task is added to the runloop's
/// idle queue.
///
/// Delivery of events then requires that the runloop's handler
/// correctly forward the `handle_idle` call to the interested party.
pub fn watch(&mut self, path: &Path, recursive: bool, token: WatchToken) {
self.watch_impl(path, recursive, token, None);
}
/// Like `watch`, but taking a predicate function that filters delivery
/// of events based on their path.
pub fn watch_filtered<F>(
&mut self,
path: &Path,
recursive: bool,
token: WatchToken,
filter: F,
) where
F: Fn(&Path) -> bool + Send + 'static,
{
let filter = Box::new(filter) as Box<PathFilter>;
self.watch_impl(path, recursive, token, Some(filter));
}
fn watch_impl(
&mut self,
path: &Path,
recursive: bool,
token: WatchToken,
filter: Option<Box<PathFilter>>,
) {
let path = match path.canonicalize() {
Ok(ref p) => p.to_owned(),
Err(_) => {
return;
}
};
let mut state = self.state.lock();
let w = Watchee {
path,
recursive,
token,
filter,
};
let mode = mode_from_bool(w.recursive);
if !state.watchees.iter().any(|w2| w.path == w2.path) {
if let Err(err) = self.inner.watch(&w.path, mode) {
tracing::error!("{:?}", err);
}
}
state.watchees.push(w);
}
/// Removes the provided token/path pair from the watch list.
/// Does not stop watching this path, if it is associated with
/// other tokens.
pub fn unwatch(&mut self, path: &Path, token: WatchToken) {
let mut state = self.state.lock();
let idx = state
.watchees
.iter()
.position(|w| w.token == token && w.path == path);
if let Some(idx) = idx {
let removed = state.watchees.remove(idx);
if !state.watchees.iter().any(|w| w.path == removed.path) {
if let Err(err) = self.inner.unwatch(&removed.path) {
tracing::error!("{:?}", err);
}
}
//TODO: Ideally we would be tracking what paths we're watching with
// some prefix-tree-like structure, which would let us keep track
// of when some child path might need to be reregistered. How this
// works and when registration would be required is dependent on
// the underlying notification mechanism, however. There's an
// in-progress rewrite of the Notify crate which use under the
// hood, and a component of that rewrite is adding this
// functionality; so until that lands we're using a fairly coarse
// heuristic to determine if we need to re-watch subpaths.
// if this was recursive, check if any child paths need to be
// manually re-added
if removed.recursive {
// do this in two steps because we've borrowed mutably up top
let to_add = state
.watchees
.iter()
.filter(|w| w.path.starts_with(&removed.path))
.map(|w| (w.path.to_owned(), mode_from_bool(w.recursive)))
.collect::<Vec<_>>();
for (path, mode) in to_add {
if let Err(err) = self.inner.watch(&path, mode) {
tracing::error!("{:?}", err);
}
}
}
}
}
/// Takes ownership of this `Watcher`'s current event queue.
pub fn take_events(&self) -> VecDeque<(WatchToken, Event)> {
let mut state = self.state.lock();
let WatcherState { ref mut events, .. } = *state;
std::mem::take(events)
}
}
impl Default for FileWatcher {
fn default() -> Self {
Self::new()
}
}
impl Watchee {
fn wants_event(&self, event: &Event) -> bool {
match &event.kind {
EventKind::Modify(ModifyKind::Name(RenameMode::Both)) => {
if event.paths.len() == 2 {
//There will be two paths. First is "from" and other is "to".
self.applies_to_path(&event.paths[0])
|| self.applies_to_path(&event.paths[1])
} else {
false
}
}
EventKind::Create(_) | EventKind::Remove(_) | EventKind::Modify(_) => {
if event.paths.len() == 1 {
self.applies_to_path(&event.paths[0])
} else {
false
}
}
_ => false,
}
}
fn applies_to_path(&self, path: &Path) -> bool {
let general_case = if path.starts_with(&self.path) {
(self.recursive || self.path == path)
|| path.parent() == Some(self.path.as_path())
} else {
false
};
if let Some(ref filter) = self.filter {
general_case && filter(path)
} else {
general_case
}
}
}
impl std::fmt::Debug for Watchee {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"Watchee path: {:?}, r {}, t {} f {}",
self.path,
self.recursive,
self.token.0,
self.filter.is_some()
)
}
}
fn mode_from_bool(is_recursive: bool) -> RecursiveMode {
if is_recursive {
RecursiveMode::Recursive
} else {
RecursiveMode::NonRecursive
}
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-proxy/src/terminal.rs | lapce-proxy/src/terminal.rs | use std::{
borrow::Cow,
collections::VecDeque,
io::{self, ErrorKind, Read, Write},
num::NonZeroUsize,
path::PathBuf,
sync::Arc,
time::Duration,
};
use alacritty_terminal::{
event::{OnResize, WindowSize},
event_loop::Msg,
tty::{self, EventedPty, EventedReadWrite, Options, Shell, setup_env},
};
use anyhow::Result;
use crossbeam_channel::{Receiver, Sender};
use directories::BaseDirs;
use lapce_rpc::{
core::CoreRpcHandler,
terminal::{TermId, TerminalProfile},
};
use polling::PollMode;
const READ_BUFFER_SIZE: usize = 0x10_0000;
#[cfg(any(target_os = "linux", target_os = "macos"))]
const PTY_READ_WRITE_TOKEN: usize = 0;
#[cfg(any(target_os = "linux", target_os = "macos"))]
const PTY_CHILD_EVENT_TOKEN: usize = 1;
#[cfg(target_os = "windows")]
const PTY_READ_WRITE_TOKEN: usize = 2;
#[cfg(target_os = "windows")]
const PTY_CHILD_EVENT_TOKEN: usize = 1;
pub struct TerminalSender {
tx: Sender<Msg>,
poller: Arc<polling::Poller>,
}
impl TerminalSender {
pub fn new(tx: Sender<Msg>, poller: Arc<polling::Poller>) -> Self {
Self { tx, poller }
}
pub fn send(&self, msg: Msg) {
if let Err(err) = self.tx.send(msg) {
tracing::error!("{:?}", err);
}
if let Err(err) = self.poller.notify() {
tracing::error!("{:?}", err);
}
}
}
pub struct Terminal {
term_id: TermId,
pub(crate) poller: Arc<polling::Poller>,
pub(crate) pty: alacritty_terminal::tty::Pty,
rx: Receiver<Msg>,
pub tx: Sender<Msg>,
}
impl Terminal {
pub fn new(
term_id: TermId,
profile: TerminalProfile,
width: usize,
height: usize,
) -> Result<Terminal> {
let poll = polling::Poller::new()?.into();
let options = Options {
shell: Terminal::program(&profile),
working_directory: Terminal::workdir(&profile),
hold: false,
env: profile.environment.unwrap_or_default(),
};
setup_env();
#[cfg(target_os = "macos")]
set_locale_environment();
let size = WindowSize {
num_lines: height as u16,
num_cols: width as u16,
cell_width: 1,
cell_height: 1,
};
let pty = alacritty_terminal::tty::new(&options, size, 0)?;
let (tx, rx) = crossbeam_channel::unbounded();
Ok(Terminal {
term_id,
poller: poll,
pty,
tx,
rx,
})
}
pub fn run(&mut self, core_rpc: CoreRpcHandler) {
let mut state = State::default();
let mut buf = [0u8; READ_BUFFER_SIZE];
let poll_opts = PollMode::Level;
let mut interest = polling::Event::readable(0);
// Register TTY through EventedRW interface.
unsafe {
self.pty
.register(&self.poller, interest, poll_opts)
.unwrap();
}
let mut events =
polling::Events::with_capacity(NonZeroUsize::new(1024).unwrap());
let timeout = Some(Duration::from_secs(6));
let mut exit_code = None;
'event_loop: loop {
events.clear();
if let Err(err) = self.poller.wait(&mut events, timeout) {
match err.kind() {
ErrorKind::Interrupted => continue,
_ => panic!("EventLoop polling error: {err:?}"),
}
}
// Handle channel events, if there are any.
if !self.drain_recv_channel(&mut state) {
break;
}
for event in events.iter() {
match event.key {
PTY_CHILD_EVENT_TOKEN => {
if let Some(tty::ChildEvent::Exited(exited_code)) =
self.pty.next_child_event()
{
if let Err(err) = self.pty_read(&core_rpc, &mut buf) {
tracing::error!("{:?}", err);
}
exit_code = exited_code;
break 'event_loop;
}
}
PTY_READ_WRITE_TOKEN => {
if event.is_interrupt() {
// Don't try to do I/O on a dead PTY.
continue;
}
if event.readable {
if let Err(err) = self.pty_read(&core_rpc, &mut buf) {
// On Linux, a `read` on the master side of a PTY can fail
// with `EIO` if the client side hangs up. In that case,
// just loop back round for the inevitable `Exited` event.
// This sucks, but checking the process is either racy or
// blocking.
#[cfg(target_os = "linux")]
if err.raw_os_error() == Some(libc::EIO) {
continue;
}
tracing::error!(
"Error reading from PTY in event loop: {}",
err
);
break 'event_loop;
}
}
if event.writable {
if let Err(_err) = self.pty_write(&mut state) {
// error!(
// "Error writing to PTY in event loop: {}",
// err
// );
break 'event_loop;
}
}
}
_ => (),
}
}
// Register write interest if necessary.
let needs_write = state.needs_write();
if needs_write != interest.writable {
interest.writable = needs_write;
// Re-register with new interest.
self.pty
.reregister(&self.poller, interest, poll_opts)
.unwrap();
}
}
core_rpc.terminal_process_stopped(self.term_id, exit_code);
if let Err(err) = self.pty.deregister(&self.poller) {
tracing::error!("{:?}", err);
}
}
/// Drain the channel.
///
/// Returns `false` when a shutdown message was received.
fn drain_recv_channel(&mut self, state: &mut State) -> bool {
while let Ok(msg) = self.rx.try_recv() {
match msg {
Msg::Input(input) => state.write_list.push_back(input),
Msg::Shutdown => return false,
Msg::Resize(size) => self.pty.on_resize(size),
}
}
true
}
#[inline]
fn pty_read(
&mut self,
core_rpc: &CoreRpcHandler,
buf: &mut [u8],
) -> io::Result<()> {
loop {
match self.pty.reader().read(buf) {
Ok(0) => break,
Ok(n) => {
core_rpc.update_terminal(self.term_id, buf[..n].to_vec());
}
Err(err) => match err.kind() {
ErrorKind::Interrupted | ErrorKind::WouldBlock => {
break;
}
_ => return Err(err),
},
}
}
Ok(())
}
#[inline]
fn pty_write(&mut self, state: &mut State) -> io::Result<()> {
state.ensure_next();
'write_many: while let Some(mut current) = state.take_current() {
'write_one: loop {
match self.pty.writer().write(current.remaining_bytes()) {
Ok(0) => {
state.set_current(Some(current));
break 'write_many;
}
Ok(n) => {
current.advance(n);
if current.finished() {
state.goto_next();
break 'write_one;
}
}
Err(err) => {
state.set_current(Some(current));
match err.kind() {
ErrorKind::Interrupted | ErrorKind::WouldBlock => {
break 'write_many;
}
_ => return Err(err),
}
}
}
}
}
Ok(())
}
fn workdir(profile: &TerminalProfile) -> Option<PathBuf> {
if let Some(cwd) = &profile.workdir {
match cwd.to_file_path() {
Ok(cwd) => {
if cwd.exists() {
return Some(cwd);
}
}
Err(err) => {
tracing::error!("{:?}", err);
}
}
}
BaseDirs::new().map(|d| PathBuf::from(d.home_dir()))
}
fn program(profile: &TerminalProfile) -> Option<Shell> {
if let Some(command) = &profile.command {
if let Some(arguments) = &profile.arguments {
Some(Shell::new(command.to_owned(), arguments.to_owned()))
} else {
Some(Shell::new(command.to_owned(), Vec::new()))
}
} else {
None
}
}
}
struct Writing {
source: Cow<'static, [u8]>,
written: usize,
}
impl Writing {
#[inline]
fn new(c: Cow<'static, [u8]>) -> Writing {
Writing {
source: c,
written: 0,
}
}
#[inline]
fn advance(&mut self, n: usize) {
self.written += n;
}
#[inline]
fn remaining_bytes(&self) -> &[u8] {
&self.source[self.written..]
}
#[inline]
fn finished(&self) -> bool {
self.written >= self.source.len()
}
}
#[derive(Default)]
pub struct State {
write_list: VecDeque<Cow<'static, [u8]>>,
writing: Option<Writing>,
}
impl State {
#[inline]
fn ensure_next(&mut self) {
if self.writing.is_none() {
self.goto_next();
}
}
#[inline]
fn goto_next(&mut self) {
self.writing = self.write_list.pop_front().map(Writing::new);
}
#[inline]
fn take_current(&mut self) -> Option<Writing> {
self.writing.take()
}
#[inline]
fn needs_write(&self) -> bool {
self.writing.is_some() || !self.write_list.is_empty()
}
#[inline]
fn set_current(&mut self, new: Option<Writing>) {
self.writing = new;
}
}
#[cfg(target_os = "macos")]
fn set_locale_environment() {
let locale = locale_config::Locale::global_default()
.to_string()
.replace('-', "_");
unsafe {
std::env::set_var("LC_ALL", locale + ".UTF-8");
}
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-proxy/src/buffer.rs | lapce-proxy/src/buffer.rs | use std::{
borrow::Cow,
ffi::OsString,
fs,
fs::File,
io::{Read, Write},
path::{Path, PathBuf},
time::SystemTime,
};
use anyhow::{Result, anyhow};
use floem_editor_core::buffer::rope_text::CharIndicesJoin;
use lapce_core::encoding::offset_utf8_to_utf16;
use lapce_rpc::buffer::BufferId;
use lapce_xi_rope::{RopeDelta, interval::IntervalBounds, rope::Rope};
use lsp_types::*;
#[derive(Clone)]
pub struct Buffer {
pub language_id: &'static str,
pub read_only: bool,
pub id: BufferId,
pub rope: Rope,
pub path: PathBuf,
pub rev: u64,
pub mod_time: Option<SystemTime>,
}
impl Buffer {
pub fn new(id: BufferId, path: PathBuf) -> Buffer {
let (s, read_only) = match load_file(&path) {
Ok(s) => (s, false),
Err(err) => {
use std::io::ErrorKind;
match err.downcast_ref::<std::io::Error>() {
Some(err) => match err.kind() {
ErrorKind::PermissionDenied => {
("Permission Denied".to_string(), true)
}
ErrorKind::NotFound => ("".to_string(), false),
ErrorKind::OutOfMemory => {
("File too big (out of memory)".to_string(), false)
}
_ => (format!("Not supported: {err}"), true),
},
None => (format!("Not supported: {err}"), true),
}
}
};
let rope = Rope::from(s);
let rev = u64::from(!rope.is_empty());
let language_id = language_id_from_path(&path).unwrap_or("");
let mod_time = get_mod_time(&path);
Buffer {
id,
rope,
read_only,
path,
language_id,
rev,
mod_time,
}
}
pub fn save(&mut self, rev: u64, create_parents: bool) -> Result<()> {
if self.read_only {
return Err(anyhow!("can't save to read only file"));
}
if self.rev != rev {
return Err(anyhow!("not the right rev"));
}
let bak_extension = self.path.extension().map_or_else(
|| OsString::from("bak"),
|ext| {
let mut ext = ext.to_os_string();
ext.push(".bak");
ext
},
);
let path = if self.path.is_symlink() {
self.path.canonicalize()?
} else {
self.path.clone()
};
let new_file = !path.exists();
let bak_file_path = &path.with_extension(bak_extension);
if !new_file {
fs::copy(&path, bak_file_path)?;
}
if create_parents {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
}
let mut f = fs::OpenOptions::new()
.create(true)
.write(true)
.truncate(true)
.open(&path)?;
for chunk in self.rope.iter_chunks(..self.rope.len()) {
f.write_all(chunk.as_bytes())?;
}
self.mod_time = get_mod_time(&path);
if !new_file {
fs::remove_file(bak_file_path)?;
}
Ok(())
}
pub fn update(
&mut self,
delta: &RopeDelta,
rev: u64,
) -> Option<TextDocumentContentChangeEvent> {
if self.rev + 1 != rev {
return None;
}
self.rev += 1;
let content_change = get_document_content_changes(delta, self);
self.rope = delta.apply(&self.rope);
Some(
content_change.unwrap_or_else(|| TextDocumentContentChangeEvent {
range: None,
range_length: None,
text: self.get_document(),
}),
)
}
pub fn get_document(&self) -> String {
self.rope.to_string()
}
pub fn offset_of_line(&self, line: usize) -> usize {
self.rope.offset_of_line(line)
}
pub fn line_of_offset(&self, offset: usize) -> usize {
self.rope.line_of_offset(offset)
}
pub fn offset_to_line_col(&self, offset: usize) -> (usize, usize) {
let line = self.line_of_offset(offset);
(line, offset - self.offset_of_line(line))
}
/// Converts a UTF8 offset to a UTF16 LSP position
pub fn offset_to_position(&self, offset: usize) -> Position {
let (line, col) = self.offset_to_line_col(offset);
// Get the offset of line to make the conversion cheaper, rather than working
// from the very start of the document to `offset`
let line_offset = self.offset_of_line(line);
let utf16_col =
offset_utf8_to_utf16(self.char_indices_iter(line_offset..), col);
Position {
line: line as u32,
character: utf16_col as u32,
}
}
pub fn slice_to_cow<T: IntervalBounds>(&self, range: T) -> Cow<'_, str> {
self.rope.slice_to_cow(range)
}
pub fn line_to_cow(&self, line: usize) -> Cow<'_, str> {
self.rope
.slice_to_cow(self.offset_of_line(line)..self.offset_of_line(line + 1))
}
/// Iterate over (utf8_offset, char) values in the given range
/// This uses `iter_chunks` and so does not allocate, compared to `slice_to_cow` which can
pub fn char_indices_iter<T: IntervalBounds>(
&self,
range: T,
) -> impl Iterator<Item = (usize, char)> + '_ {
CharIndicesJoin::new(self.rope.iter_chunks(range).map(str::char_indices))
}
pub fn len(&self) -> usize {
self.rope.len()
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
}
pub fn load_file(path: &Path) -> Result<String> {
read_path_to_string(path)
}
pub fn read_path_to_string<P: AsRef<Path>>(path: P) -> Result<String> {
let path = path.as_ref();
let mut file = File::open(path)?;
// Read the file in as bytes
let mut buffer = Vec::new();
file.read_to_end(&mut buffer)?;
// Parse the file contents as utf8
let contents = String::from_utf8(buffer)?;
Ok(contents.to_string())
}
pub fn language_id_from_path(path: &Path) -> Option<&'static str> {
// recommended language_id values
// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocumentItem
Some(match path.extension() {
Some(ext) => {
match ext.to_str()? {
"C" | "H" => "cpp",
"M" => "objective-c",
// stop case-sensitive matching
ext => match ext.to_lowercase().as_str() {
"bat" => "bat",
"clj" | "cljs" | "cljc" | "edn" => "clojure",
"coffee" => "coffeescript",
"c" | "h" => "c",
"cpp" | "hpp" | "cxx" | "hxx" | "c++" | "h++" | "cc" | "hh" => {
"cpp"
}
"cs" | "csx" => "csharp",
"css" => "css",
"d" | "di" | "dlang" => "dlang",
"diff" | "patch" => "diff",
"dart" => "dart",
"dockerfile" => "dockerfile",
"elm" => "elm",
"ex" | "exs" => "elixir",
"erl" | "hrl" => "erlang",
"fs" | "fsi" | "fsx" | "fsscript" => "fsharp",
"git-commit" | "git-rebase" => "git",
"go" => "go",
"groovy" | "gvy" | "gy" | "gsh" => "groovy",
"hbs" => "handlebars",
"htm" | "html" | "xhtml" => "html",
"ini" => "ini",
"java" | "class" => "java",
"js" => "javascript",
"jsx" => "javascriptreact",
"json" => "json",
"jl" => "julia",
"kt" => "kotlin",
"kts" => "kotlinbuildscript",
"less" => "less",
"lua" => "lua",
"makefile" | "gnumakefile" => "makefile",
"md" | "markdown" => "markdown",
"m" => "objective-c",
"mm" => "objective-cpp",
"plx" | "pl" | "pm" | "xs" | "t" | "pod" | "cgi" => "perl",
"p6" | "pm6" | "pod6" | "t6" | "raku" | "rakumod"
| "rakudoc" | "rakutest" => "perl6",
"php" | "phtml" | "pht" | "phps" => "php",
"proto" => "proto",
"ps1" | "ps1xml" | "psc1" | "psm1" | "psd1" | "pssc"
| "psrc" => "powershell",
"py" | "pyi" | "pyc" | "pyd" | "pyw" => "python",
"r" => "r",
"rb" => "ruby",
"rs" => "rust",
"scss" | "sass" => "scss",
"sc" | "scala" => "scala",
"sh" | "bash" | "zsh" => "shellscript",
"sql" => "sql",
"swift" => "swift",
"svelte" => "svelte",
"thrift" => "thrift",
"toml" => "toml",
"ts" => "typescript",
"tsx" => "typescriptreact",
"tex" => "tex",
"vb" => "vb",
"xml" | "csproj" => "xml",
"xsl" => "xsl",
"yml" | "yaml" => "yaml",
"zig" => "zig",
"vue" => "vue",
_ => return None,
},
}
}
// Handle paths without extension
#[allow(clippy::match_single_binding)]
None => match path.file_name()?.to_str()? {
// case-insensitive matching
filename => match filename.to_lowercase().as_str() {
"dockerfile" => "dockerfile",
"makefile" | "gnumakefile" => "makefile",
_ => return None,
},
},
})
}
fn get_document_content_changes(
delta: &RopeDelta,
buffer: &Buffer,
) -> Option<TextDocumentContentChangeEvent> {
let (interval, _) = delta.summary();
let (start, end) = interval.start_end();
// TODO: Handle more trivial cases like typing when there's a selection or transpose
if let Some(node) = delta.as_simple_insert() {
let (start, end) = interval.start_end();
let start = buffer.offset_to_position(start);
let end = buffer.offset_to_position(end);
Some(TextDocumentContentChangeEvent {
range: Some(Range { start, end }),
range_length: None,
text: String::from(node),
})
}
// Or a simple delete
else if delta.is_simple_delete() {
let end_position = buffer.offset_to_position(end);
let start = buffer.offset_to_position(start);
Some(TextDocumentContentChangeEvent {
range: Some(Range {
start,
end: end_position,
}),
range_length: None,
text: String::new(),
})
} else {
None
}
}
/// Returns the modification timestamp for the file at a given path,
/// if present.
pub fn get_mod_time<P: AsRef<Path>>(path: P) -> Option<SystemTime> {
File::open(path)
.and_then(|f| f.metadata())
.and_then(|meta| meta.modified())
.ok()
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-proxy/src/plugin/catalog.rs | lapce-proxy/src/plugin/catalog.rs | use std::{
borrow::Cow,
collections::HashMap,
path::PathBuf,
sync::{
Arc,
atomic::{AtomicUsize, Ordering},
},
thread,
};
use lapce_rpc::{
RpcError,
dap_types::{self, DapId, DapServer, SetBreakpointsResponse},
plugin::{PluginId, VoltID, VoltInfo, VoltMetadata},
proxy::ProxyResponse,
style::LineStyle,
};
use lapce_xi_rope::{Rope, RopeDelta};
use lsp_types::{
DidOpenTextDocumentParams, MessageType, SemanticTokens, ShowMessageParams,
TextDocumentIdentifier, TextDocumentItem, VersionedTextDocumentIdentifier,
notification::DidOpenTextDocument, request::Request,
};
use parking_lot::Mutex;
use psp_types::Notification;
use serde_json::Value;
use super::{
PluginCatalogNotification, PluginCatalogRpcHandler,
dap::{DapClient, DapRpcHandler, DebuggerData},
psp::{ClonableCallback, PluginServerRpc, PluginServerRpcHandler, RpcCallback},
wasi::{load_all_volts, start_volt},
};
use crate::plugin::{
install_volt, psp::PluginHandlerNotification, wasi::enable_volt,
};
pub struct PluginCatalog {
workspace: Option<PathBuf>,
plugin_rpc: PluginCatalogRpcHandler,
plugins: HashMap<PluginId, PluginServerRpcHandler>,
daps: HashMap<DapId, DapRpcHandler>,
debuggers: HashMap<String, DebuggerData>,
plugin_configurations: HashMap<String, HashMap<String, serde_json::Value>>,
unactivated_volts: HashMap<VoltID, VoltMetadata>,
open_files: HashMap<PathBuf, String>,
}
impl PluginCatalog {
pub fn new(
workspace: Option<PathBuf>,
disabled_volts: Vec<VoltID>,
extra_plugin_paths: Vec<PathBuf>,
plugin_configurations: HashMap<String, HashMap<String, serde_json::Value>>,
plugin_rpc: PluginCatalogRpcHandler,
) -> Self {
let plugin = Self {
workspace,
plugin_rpc: plugin_rpc.clone(),
plugin_configurations,
plugins: HashMap::new(),
daps: HashMap::new(),
debuggers: HashMap::new(),
unactivated_volts: HashMap::new(),
open_files: HashMap::new(),
};
thread::spawn(move || {
load_all_volts(plugin_rpc, &extra_plugin_paths, disabled_volts);
});
plugin
}
#[allow(clippy::too_many_arguments)]
pub fn handle_server_request(
&mut self,
plugin_id: Option<PluginId>,
request_sent: Option<Arc<AtomicUsize>>,
method: Cow<'static, str>,
params: Value,
language_id: Option<String>,
path: Option<PathBuf>,
check: bool,
f: Box<dyn ClonableCallback<Value, RpcError>>,
) {
if let Some(plugin_id) = plugin_id {
if let Some(plugin) = self.plugins.get(&plugin_id) {
plugin.server_request_async(
method,
params,
language_id,
path,
check,
move |result| {
f(plugin_id, result);
},
);
} else {
f(
plugin_id,
Err(RpcError {
code: 0,
message: "plugin doesn't exist".to_string(),
}),
);
}
return;
}
if let Some(request_sent) = request_sent {
// if there are no plugins installed the callback of the client is not called
// so check if plugins list is empty
if self.plugins.is_empty() {
// Add a request
request_sent.fetch_add(1, Ordering::Relaxed);
// make a direct callback with an "error"
f(
lapce_rpc::plugin::PluginId(0),
Err(RpcError {
code: 0,
message: "no available plugin could make a callback, because the plugins list is empty".to_string(),
}),
);
return;
} else {
request_sent.fetch_add(self.plugins.len(), Ordering::Relaxed);
}
}
for (plugin_id, plugin) in self.plugins.iter() {
let f = dyn_clone::clone_box(&*f);
let plugin_id = *plugin_id;
plugin.server_request_async(
method.clone(),
params.clone(),
language_id.clone(),
path.clone(),
check,
move |result| {
f(plugin_id, result);
},
);
}
}
#[allow(clippy::too_many_arguments)]
pub fn handle_server_notification(
&mut self,
plugin_id: Option<PluginId>,
method: impl Into<Cow<'static, str>>,
params: Value,
language_id: Option<String>,
path: Option<PathBuf>,
check: bool,
) {
if let Some(plugin_id) = plugin_id {
if let Some(plugin) = self.plugins.get(&plugin_id) {
plugin.server_notification(method, params, language_id, path, check);
}
return;
}
// Otherwise send it to all plugins
let method = method.into();
for (_, plugin) in self.plugins.iter() {
plugin.server_notification(
method.clone(),
params.clone(),
language_id.clone(),
path.clone(),
check,
);
}
}
pub fn shutdown_volt(
&mut self,
volt: VoltInfo,
f: Box<dyn ClonableCallback<Value, RpcError>>,
) {
let id = volt.id();
for (plugin_id, plugin) in self.plugins.iter() {
if plugin.volt_id == id {
let f = dyn_clone::clone_box(&*f);
let plugin_id = *plugin_id;
plugin.server_request_async(
lsp_types::request::Shutdown::METHOD,
Value::Null,
None,
None,
false,
move |result| {
f(plugin_id, result);
},
);
plugin.shutdown();
}
}
}
fn start_unactivated_volts(&mut self, to_be_activated: Vec<VoltID>) {
for id in to_be_activated.iter() {
let workspace = self.workspace.clone();
if let Some(meta) = self.unactivated_volts.remove(id) {
let configurations =
self.plugin_configurations.get(&meta.name).cloned();
tracing::debug!("{:?} {:?}", id, configurations);
let plugin_rpc = self.plugin_rpc.clone();
thread::spawn(move || {
if let Err(err) =
start_volt(workspace, configurations, plugin_rpc, meta)
{
tracing::error!("{:?}", err);
}
});
}
}
}
fn check_unactivated_volts(&mut self) {
let to_be_activated: Vec<VoltID> = self
.unactivated_volts
.iter()
.filter_map(|(id, meta)| {
let contains = meta
.activation
.as_ref()
.and_then(|a| a.language.as_ref())
.map(|l| {
self.open_files
.iter()
.any(|(_, language_id)| l.contains(language_id))
})
.unwrap_or(false);
if contains {
return Some(id.clone());
}
if let Some(workspace) = self.workspace.as_ref() {
if let Some(globs) = meta
.activation
.as_ref()
.and_then(|a| a.workspace_contains.as_ref())
{
let mut builder = globset::GlobSetBuilder::new();
for glob in globs {
match globset::Glob::new(glob) {
Ok(glob) => {
builder.add(glob);
}
Err(err) => {
tracing::error!("{:?}", err);
}
}
}
match builder.build() {
Ok(matcher) => {
if !matcher.is_empty() {
for entry in walkdir::WalkDir::new(workspace)
.into_iter()
.flatten()
{
if matcher.is_match(entry.path()) {
return Some(id.clone());
}
}
}
}
Err(err) => {
tracing::error!("{:?}", err);
}
}
}
}
None
})
.collect();
self.start_unactivated_volts(to_be_activated);
}
pub fn handle_did_open_text_document(&mut self, document: TextDocumentItem) {
match document.uri.to_file_path() {
Ok(path) => {
self.open_files.insert(path, document.language_id.clone());
}
Err(err) => {
tracing::error!("{:?}", err);
}
}
let to_be_activated: Vec<VoltID> = self
.unactivated_volts
.iter()
.filter_map(|(id, meta)| {
let contains = meta
.activation
.as_ref()
.and_then(|a| a.language.as_ref())
.map(|l| l.contains(&document.language_id))?;
if contains { Some(id.clone()) } else { None }
})
.collect();
self.start_unactivated_volts(to_be_activated);
let path = document.uri.to_file_path().ok();
for (_, plugin) in self.plugins.iter() {
plugin.server_notification(
DidOpenTextDocument::METHOD,
DidOpenTextDocumentParams {
text_document: document.clone(),
},
Some(document.language_id.clone()),
path.clone(),
true,
);
}
}
pub fn handle_did_save_text_document(
&mut self,
language_id: String,
path: PathBuf,
text_document: TextDocumentIdentifier,
text: Rope,
) {
for (_, plugin) in self.plugins.iter() {
plugin.handle_rpc(PluginServerRpc::DidSaveTextDocument {
language_id: language_id.clone(),
path: path.clone(),
text_document: text_document.clone(),
text: text.clone(),
});
}
}
pub fn handle_did_change_text_document(
&mut self,
language_id: String,
document: VersionedTextDocumentIdentifier,
delta: RopeDelta,
text: Rope,
new_text: Rope,
) {
let change = Arc::new(Mutex::new((None, None)));
for (_, plugin) in self.plugins.iter() {
plugin.handle_rpc(PluginServerRpc::DidChangeTextDocument {
language_id: language_id.clone(),
document: document.clone(),
delta: delta.clone(),
text: text.clone(),
new_text: new_text.clone(),
change: change.clone(),
});
}
}
pub fn format_semantic_tokens(
&self,
plugin_id: PluginId,
tokens: SemanticTokens,
text: Rope,
f: Box<dyn RpcCallback<Vec<LineStyle>, RpcError>>,
) {
if let Some(plugin) = self.plugins.get(&plugin_id) {
plugin.handle_rpc(PluginServerRpc::FormatSemanticTokens {
tokens,
text,
f,
});
} else {
f.call(Err(RpcError {
code: 0,
message: "plugin doesn't exist".to_string(),
}));
}
}
pub fn dap_variable(
&self,
dap_id: DapId,
reference: usize,
f: Box<dyn RpcCallback<Vec<dap_types::Variable>, RpcError>>,
) {
if let Some(dap) = self.daps.get(&dap_id) {
dap.variables_async(
reference,
|result: Result<dap_types::VariablesResponse, RpcError>| {
f.call(result.map(|resp| resp.variables))
},
);
} else {
f.call(Err(RpcError {
code: 0,
message: "plugin doesn't exist".to_string(),
}));
}
}
pub fn dap_get_scopes(
&self,
dap_id: DapId,
frame_id: usize,
f: Box<
dyn RpcCallback<
Vec<(dap_types::Scope, Vec<dap_types::Variable>)>,
RpcError,
>,
>,
) {
if let Some(dap) = self.daps.get(&dap_id) {
let local_dap = dap.clone();
dap.scopes_async(
frame_id,
move |result: Result<dap_types::ScopesResponse, RpcError>| {
match result {
Ok(resp) => {
let scopes = resp.scopes.clone();
if let Some(scope) = resp.scopes.first() {
let scope = scope.to_owned();
thread::spawn(move || {
local_dap.variables_async(
scope.variables_reference,
move |result: Result<
dap_types::VariablesResponse,
RpcError,
>| {
let resp: Vec<(
dap_types::Scope,
Vec<dap_types::Variable>,
)> = scopes
.iter()
.enumerate()
.map(|(index, s)| {
(
s.clone(),
if index == 0 {
result
.as_ref()
.map(|resp| {
resp.variables
.clone()
})
.unwrap_or_default()
} else {
Vec::new()
},
)
})
.collect();
f.call(Ok(resp));
},
);
});
} else {
f.call(Ok(Vec::new()));
}
}
Err(e) => {
f.call(Err(e));
}
}
},
);
} else {
f.call(Err(RpcError {
code: 0,
message: "plugin doesn't exist".to_string(),
}));
}
}
pub fn handle_notification(&mut self, notification: PluginCatalogNotification) {
use PluginCatalogNotification::*;
match notification {
UnactivatedVolts(volts) => {
tracing::debug!("UnactivatedVolts {:?}", volts);
for volt in volts {
let id = volt.id();
self.unactivated_volts.insert(id, volt);
}
self.check_unactivated_volts();
}
UpdatePluginConfigs(configs) => {
tracing::debug!("UpdatePluginConfigs {:?}", configs);
self.plugin_configurations = configs;
}
PluginServerLoaded(plugin) => {
// TODO: check if the server has did open registered
match self.plugin_rpc.proxy_rpc.get_open_files_content() {
Ok(ProxyResponse::GetOpenFilesContentResponse { items }) => {
for item in items {
let language_id = Some(item.language_id.clone());
let path = item.uri.to_file_path().ok();
plugin.server_notification(
DidOpenTextDocument::METHOD,
DidOpenTextDocumentParams {
text_document: item,
},
language_id,
path,
true,
);
}
}
Ok(_) => {}
Err(err) => {
tracing::error!("{:?}", err);
}
}
let plugin_id = plugin.plugin_id;
let spawned_by = plugin.spawned_by;
self.plugins.insert(plugin.plugin_id, plugin);
if let Some(spawned_by) = spawned_by {
if let Some(plugin) = self.plugins.get(&spawned_by) {
plugin.handle_rpc(PluginServerRpc::Handler(
PluginHandlerNotification::SpawnedPluginLoaded {
plugin_id,
},
));
}
}
}
InstallVolt(volt) => {
tracing::debug!("InstallVolt {:?}", volt);
let workspace = self.workspace.clone();
let configurations =
self.plugin_configurations.get(&volt.name).cloned();
let catalog_rpc = self.plugin_rpc.clone();
catalog_rpc.stop_volt(volt.clone());
thread::spawn(move || {
if let Err(err) =
install_volt(catalog_rpc, workspace, configurations, volt)
{
tracing::error!("{:?}", err);
}
});
}
ReloadVolt(volt) => {
tracing::debug!("ReloadVolt {:?}", volt);
let volt_id = volt.id();
let ids: Vec<PluginId> = self.plugins.keys().cloned().collect();
for id in ids {
if self.plugins.get(&id).unwrap().volt_id == volt_id {
let plugin = self.plugins.remove(&id).unwrap();
plugin.shutdown();
}
}
if let Err(err) = self.plugin_rpc.unactivated_volts(vec![volt]) {
tracing::error!("{:?}", err);
}
}
StopVolt(volt) => {
tracing::debug!("StopVolt {:?}", volt);
let volt_id = volt.id();
let ids: Vec<PluginId> = self.plugins.keys().cloned().collect();
for id in ids {
if self.plugins.get(&id).unwrap().volt_id == volt_id {
let plugin = self.plugins.remove(&id).unwrap();
plugin.shutdown();
}
}
}
EnableVolt(volt) => {
tracing::debug!("EnableVolt {:?}", volt);
let volt_id = volt.id();
for (_, volt) in self.plugins.iter() {
if volt.volt_id == volt_id {
return;
}
}
let plugin_rpc = self.plugin_rpc.clone();
thread::spawn(move || {
if let Err(err) = enable_volt(plugin_rpc, volt) {
tracing::error!("{:?}", err);
}
});
}
DapLoaded(dap_rpc) => {
self.daps.insert(dap_rpc.dap_id, dap_rpc);
}
DapDisconnected(dap_id) => {
self.daps.remove(&dap_id);
}
DapStart {
config,
breakpoints,
} => {
let workspace = self.workspace.clone();
let plugin_rpc = self.plugin_rpc.clone();
if let Some(debugger) = config
.ty
.as_ref()
.and_then(|ty| self.debuggers.get(ty).cloned())
{
thread::spawn(move || {
match DapClient::start(
DapServer {
program: debugger.program,
args: debugger.args.unwrap_or_default(),
cwd: workspace,
},
config.clone(),
breakpoints,
plugin_rpc.clone(),
) {
Ok(dap_rpc) => {
if let Err(err) =
plugin_rpc.dap_loaded(dap_rpc.clone())
{
tracing::error!("{:?}", err);
}
if let Err(err) = dap_rpc.launch(&config) {
tracing::error!("{:?}", err);
}
}
Err(err) => {
tracing::error!("{:?}", err);
}
}
});
} else {
self.plugin_rpc.core_rpc.show_message(
"debug fail".to_owned(),
ShowMessageParams {
typ: MessageType::ERROR,
message: "Debugger not found. Please install the appropriate plugin.".to_owned(),
},
)
}
}
DapProcessId {
dap_id,
process_id,
term_id,
} => {
if let Some(dap) = self.daps.get(&dap_id) {
if let Err(err) =
dap.termain_process_tx.send((term_id, process_id))
{
tracing::error!("{:?}", err);
}
}
}
DapContinue { dap_id, thread_id } => {
if let Some(dap) = self.daps.get(&dap_id).cloned() {
let plugin_rpc = self.plugin_rpc.clone();
thread::spawn(move || {
if dap.continue_thread(thread_id).is_ok() {
plugin_rpc.core_rpc.dap_continued(dap_id);
}
});
}
}
DapPause { dap_id, thread_id } => {
if let Some(dap) = self.daps.get(&dap_id).cloned() {
thread::spawn(move || {
if let Err(err) = dap.pause_thread(thread_id) {
tracing::error!("{:?}", err);
}
});
}
}
DapStepOver { dap_id, thread_id } => {
if let Some(dap) = self.daps.get(&dap_id).cloned() {
dap.next(thread_id);
}
}
DapStepInto { dap_id, thread_id } => {
if let Some(dap) = self.daps.get(&dap_id).cloned() {
dap.step_in(thread_id);
}
}
DapStepOut { dap_id, thread_id } => {
if let Some(dap) = self.daps.get(&dap_id).cloned() {
dap.step_out(thread_id);
}
}
DapStop { dap_id } => {
if let Some(dap) = self.daps.get(&dap_id) {
dap.stop();
}
}
DapDisconnect { dap_id } => {
if let Some(dap) = self.daps.get(&dap_id).cloned() {
thread::spawn(move || {
if let Err(err) = dap.disconnect() {
tracing::error!("{:?}", err);
}
});
}
}
DapRestart {
dap_id,
breakpoints,
} => {
if let Some(dap) = self.daps.get(&dap_id) {
dap.restart(breakpoints);
}
}
DapSetBreakpoints {
dap_id,
path,
breakpoints,
} => {
if let Some(dap) = self.daps.get(&dap_id) {
let core_rpc = self.plugin_rpc.core_rpc.clone();
dap.set_breakpoints_async(
path.clone(),
breakpoints,
move |result: Result<SetBreakpointsResponse, RpcError>| {
match result {
Ok(resp) => {
core_rpc.dap_breakpoints_resp(
dap_id,
path,
resp.breakpoints.unwrap_or_default(),
);
}
Err(err) => {
tracing::error!("{:?}", err);
}
}
},
);
}
}
RegisterDebuggerType {
debugger_type,
program,
args,
} => {
self.debuggers.insert(
debugger_type.clone(),
DebuggerData {
debugger_type,
program,
args,
},
);
}
Shutdown => {
for (_, plugin) in self.plugins.iter() {
plugin.shutdown();
}
}
}
}
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-proxy/src/plugin/wasi.rs | lapce-proxy/src/plugin/wasi.rs | #[cfg(test)]
mod tests;
use std::{
collections::{HashMap, VecDeque},
fs,
io::{Read, Seek, Write},
path::{Path, PathBuf},
process,
sync::{Arc, RwLock},
thread,
};
use anyhow::{Result, anyhow};
use jsonrpc_lite::{Id, Params};
use lapce_core::directory::Directory;
use lapce_rpc::{
RpcError,
plugin::{PluginId, VoltID, VoltInfo, VoltMetadata},
style::LineStyle,
};
use lapce_xi_rope::{Rope, RopeDelta};
use lsp_types::{
DocumentFilter, InitializeParams, InitializedParams,
TextDocumentContentChangeEvent, TextDocumentIdentifier, Url,
VersionedTextDocumentIdentifier, WorkDoneProgressParams, WorkspaceFolder,
notification::Initialized, request::Initialize,
};
use parking_lot::Mutex;
use psp_types::{Notification, Request};
use serde_json::Value;
use wasi_experimental_http_wasmtime::{HttpCtx, HttpState};
use wasmtime_wasi::WasiCtxBuilder;
use super::{
PluginCatalogRpcHandler, client_capabilities,
psp::{
PluginHandlerNotification, PluginHostHandler, PluginServerHandler,
PluginServerRpc, ResponseSender, RpcCallback, handle_plugin_server_message,
},
volt_icon,
};
use crate::plugin::psp::PluginServerRpcHandler;
#[derive(Default)]
pub struct WasiPipe {
buffer: VecDeque<u8>,
}
impl WasiPipe {
pub fn new() -> Self {
Self::default()
}
}
impl Read for WasiPipe {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
let amt = std::cmp::min(buf.len(), self.buffer.len());
for (i, byte) in self.buffer.drain(..amt).enumerate() {
buf[i] = byte;
}
Ok(amt)
}
}
impl Write for WasiPipe {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
self.buffer.extend(buf);
Ok(buf.len())
}
fn flush(&mut self) -> std::io::Result<()> {
Ok(())
}
}
impl Seek for WasiPipe {
fn seek(&mut self, _pos: std::io::SeekFrom) -> std::io::Result<u64> {
Err(std::io::Error::other("can not seek in a pipe"))
}
}
pub struct Plugin {
#[allow(dead_code)]
id: PluginId,
host: PluginHostHandler,
configurations: Option<HashMap<String, serde_json::Value>>,
}
impl PluginServerHandler for Plugin {
fn method_registered(&mut self, method: &str) -> bool {
self.host.method_registered(method)
}
fn document_supported(
&mut self,
language_id: Option<&str>,
path: Option<&Path>,
) -> bool {
self.host.document_supported(language_id, path)
}
fn handle_handler_notification(
&mut self,
notification: PluginHandlerNotification,
) {
use PluginHandlerNotification::*;
match notification {
Initialize => {
self.initialize();
}
InitializeResult(result) => {
self.host.server_capabilities = result.capabilities;
}
Shutdown => {
self.shutdown();
}
SpawnedPluginLoaded { plugin_id } => {
self.host.handle_spawned_plugin_loaded(plugin_id);
}
}
}
fn handle_host_notification(
&mut self,
method: String,
params: Params,
from: String,
) {
if let Err(err) = self.host.handle_notification(method, params, from) {
tracing::error!("{:?}", err);
}
}
fn handle_host_request(
&mut self,
id: Id,
method: String,
params: Params,
resp: ResponseSender,
) {
self.host.handle_request(id, method, params, resp);
}
fn handle_did_save_text_document(
&self,
language_id: String,
path: PathBuf,
text_document: TextDocumentIdentifier,
text: Rope,
) {
self.host.handle_did_save_text_document(
language_id,
path,
text_document,
text,
);
}
fn handle_did_change_text_document(
&mut self,
language_id: String,
document: VersionedTextDocumentIdentifier,
delta: RopeDelta,
text: Rope,
new_text: Rope,
change: Arc<
Mutex<(
Option<TextDocumentContentChangeEvent>,
Option<TextDocumentContentChangeEvent>,
)>,
>,
) {
self.host.handle_did_change_text_document(
language_id,
document,
delta,
text,
new_text,
change,
);
}
fn format_semantic_tokens(
&self,
tokens: lsp_types::SemanticTokens,
text: Rope,
f: Box<dyn RpcCallback<Vec<LineStyle>, RpcError>>,
) {
self.host.format_semantic_tokens(tokens, text, f);
}
}
impl Plugin {
fn initialize(&mut self) {
let workspace = self.host.workspace.clone();
let configurations = self.configurations.as_ref().map(unflatten_map);
let root_uri = workspace.map(|p| Url::from_directory_path(p).unwrap());
let server_rpc = self.host.server_rpc.clone();
self.host.server_rpc.server_request_async(
Initialize::METHOD,
#[allow(deprecated)]
InitializeParams {
process_id: Some(process::id()),
root_path: None,
root_uri: root_uri.clone(),
capabilities: client_capabilities(),
trace: None,
client_info: None,
locale: None,
initialization_options: configurations,
workspace_folders: root_uri.map(|uri| {
vec![WorkspaceFolder {
name: uri.as_str().to_string(),
uri,
}]
}),
work_done_progress_params: WorkDoneProgressParams::default(),
},
None,
None,
false,
move |value| match value {
Ok(value) => {
if let Ok(result) = serde_json::from_value(value) {
server_rpc.handle_rpc(PluginServerRpc::Handler(
PluginHandlerNotification::InitializeResult(result),
));
server_rpc.server_notification(
Initialized::METHOD,
InitializedParams {},
None,
None,
false,
);
}
}
Err(err) => {
tracing::error!("{:?}", err);
}
},
);
}
fn shutdown(&self) {}
}
pub fn load_all_volts(
plugin_rpc: PluginCatalogRpcHandler,
extra_plugin_paths: &[PathBuf],
disabled_volts: Vec<VoltID>,
) {
let all_volts = find_all_volts(extra_plugin_paths);
let volts = all_volts
.into_iter()
.filter_map(|meta| {
meta.wasm.as_ref()?;
let icon = volt_icon(&meta);
plugin_rpc.core_rpc.volt_installed(meta.clone(), icon);
if disabled_volts.contains(&meta.id()) {
return None;
}
Some(meta)
})
.collect();
if let Err(err) = plugin_rpc.unactivated_volts(volts) {
tracing::error!("{:?}", err);
}
}
/// Find all installed volts.
/// `plugin_dev_path` allows launching Lapce with a plugin on your local system for testing
/// purposes.
/// As well, this function skips any volt in the typical plugin directory that match the name
/// of the dev plugin so as to support developing a plugin you actively use.
pub fn find_all_volts(extra_plugin_paths: &[PathBuf]) -> Vec<VoltMetadata> {
let Some(plugin_dir) = Directory::plugins_directory() else {
return Vec::new();
};
let mut plugins: Vec<VoltMetadata> = plugin_dir
.read_dir()
.ok()
.into_iter()
.flatten()
.filter_map(|result| {
let entry = result.ok()?;
let metadata = entry.metadata().ok()?;
// Ignore any loose files or '.' prefixed hidden directories
if metadata.is_file() || entry.file_name().to_str()?.starts_with('.') {
return None;
}
Some(entry.path())
})
.filter_map(|path| match load_volt(&path) {
Ok(metadata) => Some(metadata),
Err(e) => {
tracing::error!("Failed to load plugin: {:?}", e);
None
}
})
.collect();
for plugin_path in extra_plugin_paths {
let mut metadata = match load_volt(plugin_path) {
Ok(metadata) => metadata,
Err(e) => {
tracing::error!("Failed to load extra plugin: {:?}", e);
continue;
}
};
let pos = plugins.iter().position(|meta| {
meta.name == metadata.name && meta.author == metadata.author
});
if let Some(pos) = pos {
std::mem::swap(&mut plugins[pos], &mut metadata);
} else {
plugins.push(metadata);
}
}
plugins
}
/// Returns an instance of "VoltMetadata" or an error if there is no file in the path,
/// the contents of the file cannot be read into a string, or the content read cannot
/// be converted to an instance of "VoltMetadata".
///
/// # Examples
///
/// ```
/// use std::fs::File;
/// use std::io::Write;
/// use lapce_proxy::plugin::wasi::load_volt;
/// use lapce_rpc::plugin::VoltMetadata;
///
/// let parent_path = std::env::current_dir().unwrap();
/// let mut file = File::create(parent_path.join("volt.toml")).unwrap();
/// let _ = writeln!(file, "name = \"plugin\" \n version = \"0.1\"");
/// let _ = writeln!(file, "display-name = \"Plugin\" \n author = \"Author\"");
/// let _ = writeln!(file, "description = \"Useful plugin\"");///
/// let volt_metadata = match load_volt(&parent_path) {
/// Ok(volt_metadata) => volt_metadata,
/// Err(error) => panic!("{}", error),
/// };
/// assert_eq!(
/// volt_metadata,
/// VoltMetadata {
/// name: "plugin".to_string(),
/// version: "0.1".to_string(),
/// display_name: "Plugin".to_string(),
/// author: "Author".to_string(),
/// description: "Useful plugin".to_string(),
/// icon: None,
/// repository: None,
/// wasm: None,
/// color_themes: None,
/// icon_themes: None,
/// dir: parent_path.canonicalize().ok(),
/// activation: None,
/// config: None
/// }
/// );
/// let _ = std::fs::remove_file(parent_path.join("volt.toml"));
/// ```
pub fn load_volt(path: &Path) -> Result<VoltMetadata> {
let path = path.canonicalize()?;
let mut file = fs::File::open(path.join("volt.toml"))?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
let mut meta: VoltMetadata = toml::from_str(&contents)?;
meta.dir = Some(path.clone());
meta.wasm = meta.wasm.as_ref().and_then(|wasm| {
Some(path.join(wasm).canonicalize().ok()?.to_str()?.to_string())
});
// FIXME: This does `meta.color_themes = Some([])` in case, for example,
// it cannot find matching files, but in that case it should do `meta.color_themes = None`
meta.color_themes = meta.color_themes.as_ref().map(|themes| {
themes
.iter()
.filter_map(|theme| {
Some(path.join(theme).canonicalize().ok()?.to_str()?.to_string())
})
.collect()
});
// FIXME: This does `meta.icon_themes = Some([])` in case, for example,
// it cannot find matching files, but in that case it should do `meta.icon_themes = None`
meta.icon_themes = meta.icon_themes.as_ref().map(|themes| {
themes
.iter()
.filter_map(|theme| {
Some(path.join(theme).canonicalize().ok()?.to_str()?.to_string())
})
.collect()
});
Ok(meta)
}
pub fn enable_volt(
plugin_rpc: PluginCatalogRpcHandler,
volt: VoltInfo,
) -> Result<()> {
let path = Directory::plugins_directory()
.ok_or_else(|| anyhow!("can't get plugin directory"))?
.join(volt.id().to_string());
let meta = load_volt(&path)?;
plugin_rpc.unactivated_volts(vec![meta])?;
Ok(())
}
pub fn start_volt(
workspace: Option<PathBuf>,
configurations: Option<HashMap<String, serde_json::Value>>,
plugin_rpc: PluginCatalogRpcHandler,
meta: VoltMetadata,
) -> Result<()> {
let engine = wasmtime::Engine::default();
let module = wasmtime::Module::from_file(
&engine,
meta.wasm
.as_ref()
.ok_or_else(|| anyhow!("no wasm in plugin"))?,
)?;
let mut linker = wasmtime::Linker::new(&engine);
wasmtime_wasi::add_to_linker(&mut linker, |s| s)?;
HttpState::new()?.add_to_linker(&mut linker, |_| HttpCtx {
allowed_hosts: Some(vec!["insecure:allow-all".to_string()]),
max_concurrent_requests: Some(100),
})?;
let volt_path = meta
.dir
.as_ref()
.ok_or_else(|| anyhow!("plugin meta doesn't have dir"))?;
#[cfg(target_os = "linux")]
let volt_libc = {
match std::process::Command::new("ldd").arg("--version").output() {
Ok(cmd) => {
if String::from_utf8_lossy(&cmd.stdout)
.to_lowercase()
.split_terminator('\n')
.next()
.unwrap_or("")
.contains("musl")
{
"musl"
} else {
"glibc"
}
}
_ => "glibc",
}
};
#[cfg(not(target_os = "linux"))]
let volt_libc = "";
let stdin = Arc::new(RwLock::new(WasiPipe::new()));
let stdout = Arc::new(RwLock::new(WasiPipe::new()));
let stderr = Arc::new(RwLock::new(WasiPipe::new()));
let wasi = WasiCtxBuilder::new()
.inherit_env()?
.env("VOLT_OS", std::env::consts::OS)?
.env("VOLT_ARCH", std::env::consts::ARCH)?
.env("VOLT_LIBC", volt_libc)?
.env(
"VOLT_URI",
Url::from_directory_path(volt_path)
.map_err(|_| anyhow!("can't convert folder path to uri"))?
.as_ref(),
)?
.stdin(Box::new(wasi_common::pipe::ReadPipe::from_shared(
stdin.clone(),
)))
.stdout(Box::new(wasi_common::pipe::WritePipe::from_shared(
stdout.clone(),
)))
.stderr(Box::new(wasi_common::pipe::WritePipe::from_shared(
stderr.clone(),
)))
.preopened_dir(
wasmtime_wasi::Dir::open_ambient_dir(
volt_path,
wasmtime_wasi::ambient_authority(),
)?,
"/",
)?
.build();
let mut store = wasmtime::Store::new(&engine, wasi);
let (io_tx, io_rx) = crossbeam_channel::unbounded();
let rpc = PluginServerRpcHandler::new(meta.id(), None, None, io_tx);
let local_rpc = rpc.clone();
let local_stdin = stdin.clone();
let volt_name = format!("volt {}", meta.name);
linker.func_wrap("lapce", "host_handle_rpc", move || {
if let Ok(msg) = wasi_read_string(&stdout) {
if let Some(resp) =
handle_plugin_server_message(&local_rpc, &msg, &volt_name)
{
if let Ok(msg) = serde_json::to_string(&resp) {
if let Err(err) = writeln!(local_stdin.write().unwrap(), "{msg}")
{
tracing::error!("{:?}", err);
}
}
}
}
})?;
let plugin_meta = meta.clone();
linker.func_wrap("lapce", "host_handle_stderr", move || {
if let Ok(msg) = wasi_read_string(&stderr) {
tracing_log::log::log!(target: &format!("lapce_proxy::plugin::wasi::{}::{}", plugin_meta.author, plugin_meta.name), tracing_log::log::Level::Debug, "{msg}");
}
})?;
linker.module(&mut store, "", &module)?;
let local_rpc = rpc.clone();
thread::spawn(move || {
let mut exist_id = None;
{
let instance = linker.instantiate(&mut store, &module).unwrap();
let handle_rpc = instance
.get_func(&mut store, "handle_rpc")
.ok_or_else(|| anyhow!("can't convet to function"))
.unwrap()
.typed::<(), ()>(&mut store)
.unwrap();
for msg in io_rx {
if msg
.get_method()
.map(|x| x == lsp_types::request::Shutdown::METHOD)
.unwrap_or_default()
{
exist_id = msg.get_id();
break;
}
if let Ok(msg) = serde_json::to_string(&msg) {
if let Err(err) = writeln!(stdin.write().unwrap(), "{msg}") {
tracing::error!("{:?}", err);
}
}
if let Err(err) = handle_rpc.call(&mut store, ()) {
tracing::error!("{:?}", err);
}
}
}
if let Some(id) = exist_id {
local_rpc.handle_server_response(id, Ok(Value::Null));
}
});
let id = PluginId::next();
let mut plugin = Plugin {
id,
host: PluginHostHandler::new(
workspace,
meta.dir.clone(),
meta.id(),
meta.display_name.clone(),
meta.activation
.iter()
.flat_map(|m| m.language.iter().flatten())
.cloned()
.map(|s| DocumentFilter {
language: Some(s),
pattern: None,
scheme: None,
})
.chain(
meta.activation
.iter()
.flat_map(|m| m.workspace_contains.iter().flatten())
.cloned()
.map(|s| DocumentFilter {
language: None,
pattern: Some(s),
scheme: None,
}),
)
.collect(),
plugin_rpc.core_rpc.clone(),
rpc.clone(),
plugin_rpc.clone(),
),
configurations,
};
let local_rpc = rpc.clone();
thread::spawn(move || {
local_rpc.mainloop(&mut plugin);
});
if plugin_rpc.plugin_server_loaded(rpc.clone()).is_err() {
rpc.shutdown();
}
Ok(())
}
fn wasi_read_string(stdout: &Arc<RwLock<WasiPipe>>) -> Result<String> {
let mut buf = String::new();
stdout.write().unwrap().read_to_string(&mut buf)?;
Ok(buf)
}
fn unflatten_map(map: &HashMap<String, serde_json::Value>) -> serde_json::Value {
let mut new = serde_json::json!({});
for (key, value) in map.iter() {
let mut current = new.as_object_mut().unwrap();
let parts: Vec<&str> = key.split('.').collect();
let total_parts = parts.len();
for (i, part) in parts.into_iter().enumerate() {
if i + 1 < total_parts {
if !current.get(part).map(|v| v.is_object()).unwrap_or(false) {
current.insert(part.to_string(), serde_json::json!({}));
}
current = current.get_mut(part).unwrap().as_object_mut().unwrap();
} else {
current.insert(part.to_string(), value.clone());
}
}
}
new
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-proxy/src/plugin/psp.rs | lapce-proxy/src/plugin/psp.rs | use std::{
borrow::Cow,
collections::HashMap,
path::{Path, PathBuf},
sync::{
Arc,
atomic::{AtomicU64, Ordering},
},
thread,
};
use anyhow::{Result, anyhow};
use crossbeam_channel::{Receiver, Sender};
use dyn_clone::DynClone;
use floem_editor_core::buffer::rope_text::{RopeText, RopeTextRef};
use jsonrpc_lite::{Id, JsonRpc, Params};
use lapce_core::{encoding::offset_utf16_to_utf8, rope_text_pos::RopeTextPosition};
use lapce_rpc::{
RpcError,
core::{CoreRpcHandler, ServerStatusParams},
plugin::{PluginId, VoltID},
style::{LineStyle, Style},
};
use lapce_xi_rope::{Rope, RopeDelta};
use lsp_types::{
CancelParams, CodeActionProviderCapability, DidChangeTextDocumentParams,
DidSaveTextDocumentParams, DocumentSelector, FoldingRangeProviderCapability,
HoverProviderCapability, ImplementationProviderCapability, InitializeResult,
LogMessageParams, MessageType, OneOf, ProgressParams, PublishDiagnosticsParams,
Range, Registration, RegistrationParams, SemanticTokens, SemanticTokensLegend,
SemanticTokensServerCapabilities, ServerCapabilities, ShowMessageParams,
TextDocumentContentChangeEvent, TextDocumentIdentifier,
TextDocumentSaveRegistrationOptions, TextDocumentSyncCapability,
TextDocumentSyncKind, TextDocumentSyncSaveOptions,
VersionedTextDocumentIdentifier,
notification::{
Cancel, DidChangeTextDocument, DidOpenTextDocument, DidSaveTextDocument,
Initialized, LogMessage, Notification, Progress, PublishDiagnostics,
ShowMessage,
},
request::{
CallHierarchyIncomingCalls, CallHierarchyPrepare, CodeActionRequest,
CodeActionResolveRequest, CodeLensRequest, CodeLensResolve, Completion,
DocumentSymbolRequest, FoldingRangeRequest, Formatting, GotoDefinition,
GotoImplementation, GotoTypeDefinition, HoverRequest, Initialize,
InlayHintRequest, InlineCompletionRequest, PrepareRenameRequest, References,
RegisterCapability, Rename, ResolveCompletionItem, SelectionRangeRequest,
SemanticTokensFullRequest, SignatureHelpRequest, WorkDoneProgressCreate,
WorkspaceSymbolRequest,
},
};
use parking_lot::Mutex;
use psp_types::{
ExecuteProcess, ExecuteProcessParams, ExecuteProcessResult,
RegisterDebuggerType, RegisterDebuggerTypeParams, Request, SendLspNotification,
SendLspNotificationParams, SendLspRequest, SendLspRequestParams,
SendLspRequestResult, StartLspServer, StartLspServerParams,
StartLspServerResult,
};
use serde::Serialize;
use serde_json::Value;
use super::{
PluginCatalogRpcHandler,
lsp::{DocumentFilter, LspClient},
};
pub enum ResponseHandler<Resp, Error> {
Chan(Sender<Result<Resp, Error>>),
Callback(Box<dyn RpcCallback<Resp, Error>>),
}
impl<Resp, Error> ResponseHandler<Resp, Error> {
pub fn invoke(self, result: Result<Resp, Error>) {
match self {
ResponseHandler::Chan(tx) => {
if let Err(err) = tx.send(result) {
tracing::error!("{:?}", err);
}
}
ResponseHandler::Callback(f) => f.call(result),
}
}
}
pub trait ClonableCallback<Resp, Error>:
FnOnce(PluginId, Result<Resp, Error>) + Send + DynClone
{
}
impl<Resp, Error, F: Send + FnOnce(PluginId, Result<Resp, Error>) + DynClone>
ClonableCallback<Resp, Error> for F
{
}
pub trait RpcCallback<Resp, Error>: Send {
fn call(self: Box<Self>, result: Result<Resp, Error>);
}
impl<Resp, Error, F: Send + FnOnce(Result<Resp, Error>)> RpcCallback<Resp, Error>
for F
{
fn call(self: Box<F>, result: Result<Resp, Error>) {
(*self)(result)
}
}
#[allow(clippy::large_enum_variant)]
pub enum PluginHandlerNotification {
Initialize,
InitializeResult(InitializeResult),
Shutdown,
SpawnedPluginLoaded { plugin_id: PluginId },
}
#[allow(clippy::large_enum_variant)]
pub enum PluginServerRpc {
Shutdown,
Handler(PluginHandlerNotification),
ServerRequest {
id: Id,
method: Cow<'static, str>,
params: Params,
language_id: Option<String>,
path: Option<PathBuf>,
rh: ResponseHandler<Value, RpcError>,
},
ServerNotification {
method: Cow<'static, str>,
params: Params,
language_id: Option<String>,
path: Option<PathBuf>,
},
HostRequest {
id: Id,
method: String,
params: Params,
resp: ResponseSender,
},
HostNotification {
method: String,
params: Params,
from: String,
},
DidSaveTextDocument {
language_id: String,
path: PathBuf,
text_document: TextDocumentIdentifier,
text: Rope,
},
DidChangeTextDocument {
language_id: String,
document: VersionedTextDocumentIdentifier,
delta: RopeDelta,
text: Rope,
new_text: Rope,
change: Arc<
Mutex<(
Option<TextDocumentContentChangeEvent>,
Option<TextDocumentContentChangeEvent>,
)>,
>,
},
FormatSemanticTokens {
tokens: SemanticTokens,
text: Rope,
f: Box<dyn RpcCallback<Vec<LineStyle>, RpcError>>,
},
}
#[derive(Clone)]
pub struct PluginServerRpcHandler {
pub spawned_by: Option<PluginId>,
pub plugin_id: PluginId,
pub volt_id: VoltID,
rpc_tx: Sender<PluginServerRpc>,
rpc_rx: Receiver<PluginServerRpc>,
io_tx: Sender<JsonRpc>,
id: Arc<AtomicU64>,
server_pending: Arc<Mutex<HashMap<Id, ResponseHandler<Value, RpcError>>>>,
}
#[derive(Clone)]
pub struct ResponseSender {
tx: Sender<Result<Value, RpcError>>,
}
impl ResponseSender {
pub fn new(tx: Sender<Result<Value, RpcError>>) -> Self {
Self { tx }
}
pub fn send(&self, result: impl Serialize) {
let result = serde_json::to_value(result).map_err(|e| RpcError {
code: 0,
message: e.to_string(),
});
if let Err(err) = self.tx.send(result) {
tracing::error!("{:?}", err);
}
}
pub fn send_null(&self) {
if let Err(err) = self.tx.send(Ok(Value::Null)) {
tracing::error!("{:?}", err);
}
}
pub fn send_err(&self, code: i64, message: impl Into<String>) {
if let Err(err) = self.tx.send(Err(RpcError {
code,
message: message.into(),
})) {
tracing::error!("{:?}", err);
}
}
}
pub trait PluginServerHandler {
fn document_supported(
&mut self,
language_id: Option<&str>,
path: Option<&Path>,
) -> bool;
fn method_registered(&mut self, method: &str) -> bool;
fn handle_host_notification(
&mut self,
method: String,
params: Params,
from: String,
);
fn handle_host_request(
&mut self,
id: Id,
method: String,
params: Params,
chan: ResponseSender,
);
fn handle_handler_notification(
&mut self,
notification: PluginHandlerNotification,
);
fn handle_did_save_text_document(
&self,
language_id: String,
path: PathBuf,
text_document: TextDocumentIdentifier,
text: Rope,
);
fn handle_did_change_text_document(
&mut self,
language_id: String,
document: VersionedTextDocumentIdentifier,
delta: RopeDelta,
text: Rope,
new_text: Rope,
change: Arc<
Mutex<(
Option<TextDocumentContentChangeEvent>,
Option<TextDocumentContentChangeEvent>,
)>,
>,
);
fn format_semantic_tokens(
&self,
tokens: SemanticTokens,
text: Rope,
f: Box<dyn RpcCallback<Vec<LineStyle>, RpcError>>,
);
}
impl PluginServerRpcHandler {
pub fn new(
volt_id: VoltID,
spawned_by: Option<PluginId>,
plugin_id: Option<PluginId>,
io_tx: Sender<JsonRpc>,
) -> Self {
let (rpc_tx, rpc_rx) = crossbeam_channel::unbounded();
let rpc = Self {
spawned_by,
volt_id,
plugin_id: plugin_id.unwrap_or_else(PluginId::next),
rpc_tx,
rpc_rx,
io_tx,
id: Arc::new(AtomicU64::new(0)),
server_pending: Arc::new(Mutex::new(HashMap::new())),
};
rpc.initialize();
rpc
}
fn initialize(&self) {
self.handle_rpc(PluginServerRpc::Handler(
PluginHandlerNotification::Initialize,
));
}
fn send_server_request(
&self,
id: Id,
method: &str,
params: Params,
rh: ResponseHandler<Value, RpcError>,
) {
{
let mut pending = self.server_pending.lock();
pending.insert(id.clone(), rh);
}
let msg = JsonRpc::request_with_params(id, method, params);
self.send_server_rpc(msg);
}
fn send_server_notification(&self, method: &str, params: Params) {
let msg = JsonRpc::notification_with_params(method, params);
self.send_server_rpc(msg);
}
fn send_server_rpc(&self, msg: JsonRpc) {
if let Err(err) = self.io_tx.send(msg) {
tracing::error!("{:?}", err);
}
}
pub fn handle_rpc(&self, rpc: PluginServerRpc) {
if let Err(err) = self.rpc_tx.send(rpc) {
tracing::error!("{:?}", err);
}
}
/// Send a notification.
/// The callback is called when the function is actually sent.
pub fn server_notification<P: Serialize>(
&self,
method: impl Into<Cow<'static, str>>,
params: P,
language_id: Option<String>,
path: Option<PathBuf>,
check: bool,
) {
let params = Params::from(serde_json::to_value(params).unwrap());
let method = method.into();
if check {
if let Err(err) = self.rpc_tx.send(PluginServerRpc::ServerNotification {
method,
params,
language_id,
path,
}) {
tracing::error!("{:?}", err);
}
} else {
self.send_server_notification(&method, params);
}
}
/// Make a request to plugin/language server and get the response.
///
/// When check is true, the request will be in the handler mainloop to
/// do checks like if the server has the capability of the request.
///
/// When check is false, the request will be sent out straight away.
pub fn server_request<P: Serialize>(
&self,
method: impl Into<Cow<'static, str>>,
params: P,
language_id: Option<String>,
path: Option<PathBuf>,
check: bool,
) -> Result<Value, RpcError> {
let (tx, rx) = crossbeam_channel::bounded(1);
self.server_request_common(
method.into(),
params,
language_id,
path,
check,
ResponseHandler::Chan(tx),
);
rx.recv().unwrap_or_else(|_| {
Err(RpcError {
code: 0,
message: "io error".to_string(),
})
})
}
pub fn server_request_async<P: Serialize>(
&self,
method: impl Into<Cow<'static, str>>,
params: P,
language_id: Option<String>,
path: Option<PathBuf>,
check: bool,
f: impl RpcCallback<Value, RpcError> + 'static,
) {
self.server_request_common(
method.into(),
params,
language_id,
path,
check,
ResponseHandler::Callback(Box::new(f)),
);
}
fn server_request_common<P: Serialize>(
&self,
method: Cow<'static, str>,
params: P,
language_id: Option<String>,
path: Option<PathBuf>,
check: bool,
rh: ResponseHandler<Value, RpcError>,
) {
let id = self.id.fetch_add(1, Ordering::Relaxed);
let params = Params::from(serde_json::to_value(params).unwrap());
if check {
if let Err(err) = self.rpc_tx.send(PluginServerRpc::ServerRequest {
id: Id::Num(id as i64),
method,
params,
language_id,
path,
rh,
}) {
tracing::error!("{:?}", err);
}
} else {
self.send_server_request(Id::Num(id as i64), &method, params, rh);
}
}
pub fn handle_server_response(&self, id: Id, result: Result<Value, RpcError>) {
if let Some(handler) = { self.server_pending.lock().remove(&id) } {
handler.invoke(result);
}
}
pub fn shutdown(&self) {
// to kill lsp
self.handle_rpc(PluginServerRpc::Handler(
PluginHandlerNotification::Shutdown,
));
// to end PluginServerRpcHandler::mainloop
self.handle_rpc(PluginServerRpc::Shutdown);
}
pub fn mainloop<H>(&self, handler: &mut H)
where
H: PluginServerHandler,
{
for msg in &self.rpc_rx {
match msg {
PluginServerRpc::ServerRequest {
id,
method,
params,
language_id,
path,
rh,
} => {
if handler
.document_supported(language_id.as_deref(), path.as_deref())
&& handler.method_registered(&method)
{
self.send_server_request(id, &method, params, rh);
} else {
rh.invoke(Err(RpcError {
code: 0,
message: "server not capable".to_string(),
}));
}
}
PluginServerRpc::ServerNotification {
method,
params,
language_id,
path,
} => {
if handler
.document_supported(language_id.as_deref(), path.as_deref())
&& handler.method_registered(&method)
{
self.send_server_notification(&method, params);
}
}
PluginServerRpc::HostRequest {
id,
method,
params,
resp,
} => {
handler.handle_host_request(id, method, params, resp);
}
PluginServerRpc::HostNotification {
method,
params,
from,
} => {
handler.handle_host_notification(method, params, from);
}
PluginServerRpc::DidSaveTextDocument {
language_id,
path,
text_document,
text,
} => {
handler.handle_did_save_text_document(
language_id,
path,
text_document,
text,
);
}
PluginServerRpc::DidChangeTextDocument {
language_id,
document,
delta,
text,
new_text,
change,
} => {
handler.handle_did_change_text_document(
language_id,
document,
delta,
text,
new_text,
change,
);
}
PluginServerRpc::FormatSemanticTokens { tokens, text, f } => {
handler.format_semantic_tokens(tokens, text, f);
}
PluginServerRpc::Handler(notification) => {
handler.handle_handler_notification(notification)
}
PluginServerRpc::Shutdown => {
return;
}
}
}
}
}
pub fn handle_plugin_server_message(
server_rpc: &PluginServerRpcHandler,
message: &str,
from: &str,
) -> Option<JsonRpc> {
match JsonRpc::parse(message) {
Ok(value @ JsonRpc::Request(_)) => {
let (tx, rx) = crossbeam_channel::bounded(1);
let id = value.get_id().unwrap();
let rpc = PluginServerRpc::HostRequest {
id: id.clone(),
method: value.get_method().unwrap().to_string(),
params: value.get_params().unwrap(),
resp: ResponseSender::new(tx),
};
server_rpc.handle_rpc(rpc);
let result = rx.recv().unwrap();
let resp = match result {
Ok(v) => JsonRpc::success(id, &v),
Err(e) => JsonRpc::error(
id,
jsonrpc_lite::Error {
code: e.code,
message: e.message,
data: None,
},
),
};
Some(resp)
}
Ok(value @ JsonRpc::Notification(_)) => {
let rpc = PluginServerRpc::HostNotification {
method: value.get_method().unwrap().to_string(),
params: value.get_params().unwrap(),
from: from.to_string(),
};
server_rpc.handle_rpc(rpc);
None
}
Ok(value @ JsonRpc::Success(_)) => {
let result = value.get_result().unwrap().clone();
server_rpc.handle_server_response(value.get_id().unwrap(), Ok(result));
None
}
Ok(value @ JsonRpc::Error(_)) => {
let error = value.get_error().unwrap();
server_rpc.handle_server_response(
value.get_id().unwrap(),
Err(RpcError {
code: error.code,
message: error.message.clone(),
}),
);
None
}
Err(err) => {
eprintln!("parse error {err} message {message}");
None
}
}
}
struct SaveRegistration {
include_text: bool,
filters: Vec<DocumentFilter>,
}
#[derive(Default)]
struct ServerRegistrations {
save: Option<SaveRegistration>,
}
pub struct PluginHostHandler {
volt_id: VoltID,
volt_display_name: String,
pwd: Option<PathBuf>,
pub(crate) workspace: Option<PathBuf>,
document_selector: Vec<DocumentFilter>,
core_rpc: CoreRpcHandler,
catalog_rpc: PluginCatalogRpcHandler,
pub server_rpc: PluginServerRpcHandler,
pub server_capabilities: ServerCapabilities,
server_registrations: ServerRegistrations,
/// Language servers that this plugin has spawned.
/// Note that these plugin ids could be 'dead' if the LSP died/exited.
spawned_lsp: HashMap<PluginId, SpawnedLspInfo>,
}
impl PluginHostHandler {
#[allow(clippy::too_many_arguments)]
pub fn new(
workspace: Option<PathBuf>,
pwd: Option<PathBuf>,
volt_id: VoltID,
volt_display_name: String,
document_selector: DocumentSelector,
core_rpc: CoreRpcHandler,
server_rpc: PluginServerRpcHandler,
catalog_rpc: PluginCatalogRpcHandler,
) -> Self {
let document_selector = document_selector
.iter()
.map(DocumentFilter::from_lsp_filter_loose)
.collect();
Self {
pwd,
workspace,
volt_id,
volt_display_name,
document_selector,
core_rpc,
catalog_rpc,
server_rpc,
server_capabilities: ServerCapabilities::default(),
server_registrations: ServerRegistrations::default(),
spawned_lsp: HashMap::new(),
}
}
pub fn document_supported(
&self,
language_id: Option<&str>,
path: Option<&Path>,
) -> bool {
match language_id {
Some(language_id) => {
for filter in self.document_selector.iter() {
if (filter.language_id.is_none()
|| filter.language_id.as_deref() == Some(language_id))
&& (path.is_none()
|| filter.pattern.is_none()
|| filter
.pattern
.as_ref()
.unwrap()
.is_match(path.as_ref().unwrap()))
{
return true;
}
}
false
}
None => true,
}
}
pub fn method_registered(&mut self, method: &str) -> bool {
match method {
Initialize::METHOD => true,
Initialized::METHOD => true,
Completion::METHOD => {
self.server_capabilities.completion_provider.is_some()
}
ResolveCompletionItem::METHOD => self
.server_capabilities
.completion_provider
.as_ref()
.and_then(|c| c.resolve_provider)
.unwrap_or(false),
DidOpenTextDocument::METHOD => {
match &self.server_capabilities.text_document_sync {
Some(TextDocumentSyncCapability::Kind(kind)) => {
kind != &TextDocumentSyncKind::NONE
}
Some(TextDocumentSyncCapability::Options(options)) => options
.open_close
.or_else(|| {
options
.change
.map(|kind| kind != TextDocumentSyncKind::NONE)
})
.unwrap_or(false),
None => false,
}
}
DidChangeTextDocument::METHOD => {
match &self.server_capabilities.text_document_sync {
Some(TextDocumentSyncCapability::Kind(kind)) => {
kind != &TextDocumentSyncKind::NONE
}
Some(TextDocumentSyncCapability::Options(options)) => options
.change
.map(|kind| kind != TextDocumentSyncKind::NONE)
.unwrap_or(false),
None => false,
}
}
SignatureHelpRequest::METHOD => {
self.server_capabilities.signature_help_provider.is_some()
}
HoverRequest::METHOD => self
.server_capabilities
.hover_provider
.as_ref()
.map(|c| match c {
HoverProviderCapability::Simple(is_capable) => *is_capable,
HoverProviderCapability::Options(_) => true,
})
.unwrap_or(false),
GotoDefinition::METHOD => self
.server_capabilities
.definition_provider
.as_ref()
.map(|d| match d {
OneOf::Left(is_capable) => *is_capable,
OneOf::Right(_) => true,
})
.unwrap_or(false),
GotoTypeDefinition::METHOD => {
self.server_capabilities.type_definition_provider.is_some()
}
References::METHOD => self
.server_capabilities
.references_provider
.as_ref()
.map(|r| match r {
OneOf::Left(is_capable) => *is_capable,
OneOf::Right(_) => true,
})
.unwrap_or(false),
GotoImplementation::METHOD => self
.server_capabilities
.implementation_provider
.as_ref()
.map(|r| match r {
ImplementationProviderCapability::Simple(is_capable) => {
*is_capable
}
ImplementationProviderCapability::Options(_) => {
// todo
false
}
})
.unwrap_or(false),
FoldingRangeRequest::METHOD => self
.server_capabilities
.folding_range_provider
.as_ref()
.map(|r| match r {
FoldingRangeProviderCapability::Simple(support) => *support,
FoldingRangeProviderCapability::FoldingProvider(_) => {
// todo
true
}
FoldingRangeProviderCapability::Options(_) => {
// todo
true
}
})
.unwrap_or(false),
CodeActionRequest::METHOD => self
.server_capabilities
.code_action_provider
.as_ref()
.map(|a| match a {
CodeActionProviderCapability::Simple(is_capable) => *is_capable,
CodeActionProviderCapability::Options(_) => true,
})
.unwrap_or(false),
Formatting::METHOD => self
.server_capabilities
.document_formatting_provider
.as_ref()
.map(|f| match f {
OneOf::Left(is_capable) => *is_capable,
OneOf::Right(_) => true,
})
.unwrap_or(false),
SemanticTokensFullRequest::METHOD => {
self.server_capabilities.semantic_tokens_provider.is_some()
}
InlayHintRequest::METHOD => {
self.server_capabilities.inlay_hint_provider.is_some()
}
InlineCompletionRequest::METHOD => self
.server_capabilities
.inline_completion_provider
.is_some(),
DocumentSymbolRequest::METHOD => {
self.server_capabilities.document_symbol_provider.is_some()
}
WorkspaceSymbolRequest::METHOD => {
self.server_capabilities.workspace_symbol_provider.is_some()
}
PrepareRenameRequest::METHOD => {
self.server_capabilities.rename_provider.is_some()
}
Rename::METHOD => self.server_capabilities.rename_provider.is_some(),
SelectionRangeRequest::METHOD => {
self.server_capabilities.selection_range_provider.is_some()
}
CodeActionResolveRequest::METHOD => {
self.server_capabilities.code_action_provider.is_some()
}
CodeLensRequest::METHOD => {
self.server_capabilities.code_lens_provider.is_some()
}
CodeLensResolve::METHOD => self
.server_capabilities
.code_lens_provider
.as_ref()
.and_then(|x| x.resolve_provider)
.unwrap_or(false),
CallHierarchyPrepare::METHOD => {
self.server_capabilities.call_hierarchy_provider.is_some()
}
CallHierarchyIncomingCalls::METHOD => {
self.server_capabilities.call_hierarchy_provider.is_some()
}
_ => false,
}
}
fn check_save_capability(&self, language_id: &str, path: &Path) -> (bool, bool) {
if self.document_supported(Some(language_id), Some(path)) {
let (should_send, include_text) = self
.server_capabilities
.text_document_sync
.as_ref()
.and_then(|sync| match sync {
TextDocumentSyncCapability::Kind(_) => None,
TextDocumentSyncCapability::Options(options) => Some(options),
})
.and_then(|o| o.save.as_ref())
.map(|o| match o {
TextDocumentSyncSaveOptions::Supported(is_supported) => {
(*is_supported, true)
}
TextDocumentSyncSaveOptions::SaveOptions(options) => {
(true, options.include_text.unwrap_or(false))
}
})
.unwrap_or((false, false));
return (should_send, include_text);
}
if let Some(options) = self.server_registrations.save.as_ref() {
for filter in options.filters.iter() {
if (filter.language_id.is_none()
|| filter.language_id.as_deref() == Some(language_id))
&& (filter.pattern.is_none()
|| filter.pattern.as_ref().unwrap().is_match(path))
{
return (true, options.include_text);
}
}
}
(false, false)
}
fn register_capabilities(&mut self, registrations: Vec<Registration>) {
for registration in registrations {
if let Err(err) = self.register_capability(registration) {
tracing::error!("{:?}", err);
}
}
}
fn register_capability(&mut self, registration: Registration) -> Result<()> {
match registration.method.as_str() {
DidSaveTextDocument::METHOD => {
let options = registration
.register_options
.ok_or_else(|| anyhow!("don't have options"))?;
let options: TextDocumentSaveRegistrationOptions =
serde_json::from_value(options)?;
self.server_registrations.save = Some(SaveRegistration {
include_text: options.include_text.unwrap_or(false),
filters: options
.text_document_registration_options
.document_selector
.map(|s| {
s.iter()
.map(DocumentFilter::from_lsp_filter_loose)
.collect()
})
.unwrap_or_default(),
});
}
_ => {
eprintln!(
"don't handle register capability for {}",
registration.method
);
}
}
Ok(())
}
pub fn handle_request(
&mut self,
_id: Id,
method: String,
params: Params,
resp: ResponseSender,
) {
if let Err(err) = self.process_request(method, params, resp.clone()) {
resp.send_err(0, err.to_string());
}
}
pub fn process_request(
&mut self,
method: String,
params: Params,
resp: ResponseSender,
) -> Result<()> {
match method.as_str() {
WorkDoneProgressCreate::METHOD => {
resp.send_null();
}
RegisterCapability::METHOD => {
let params: RegistrationParams =
serde_json::from_value(serde_json::to_value(params)?)?;
self.register_capabilities(params.registrations);
resp.send_null();
}
ExecuteProcess::METHOD => {
let params: ExecuteProcessParams =
serde_json::from_value(serde_json::to_value(params)?)?;
let output = std::process::Command::new(params.program)
.args(params.args)
.output()?;
resp.send(ExecuteProcessResult {
success: output.status.success(),
stdout: Some(output.stdout),
stderr: Some(output.stderr),
});
}
RegisterDebuggerType::METHOD => {
let params: RegisterDebuggerTypeParams =
serde_json::from_value(serde_json::to_value(params)?)?;
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | true |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-proxy/src/plugin/lsp.rs | lapce-proxy/src/plugin/lsp.rs | #[cfg(target_os = "windows")]
use std::os::windows::process::CommandExt;
use std::{
io::{BufRead, BufReader, BufWriter, Write},
path::{Path, PathBuf},
process::{self, Child, Command, Stdio},
sync::Arc,
thread,
};
use anyhow::{Result, anyhow};
use jsonrpc_lite::{Id, Params};
use lapce_core::meta;
use lapce_rpc::{
RpcError,
plugin::{PluginId, VoltID},
style::LineStyle,
};
use lapce_xi_rope::Rope;
use lsp_types::{
notification::{Initialized, Notification},
request::{Initialize, Request},
*,
};
use parking_lot::Mutex;
use serde_json::Value;
use super::{
client_capabilities,
psp::{
PluginHandlerNotification, PluginHostHandler, PluginServerHandler,
PluginServerRpcHandler, ResponseSender, RpcCallback,
handle_plugin_server_message,
},
};
use crate::{buffer::Buffer, plugin::PluginCatalogRpcHandler};
const HEADER_CONTENT_LENGTH: &str = "content-length";
const HEADER_CONTENT_TYPE: &str = "content-type";
pub enum LspRpc {
Request {
id: u64,
method: String,
params: Params,
},
Notification {
method: String,
params: Params,
},
Response {
id: u64,
result: Value,
},
Error {
id: u64,
error: RpcError,
},
}
pub struct LspClient {
plugin_rpc: PluginCatalogRpcHandler,
server_rpc: PluginServerRpcHandler,
process: Child,
workspace: Option<PathBuf>,
host: PluginHostHandler,
options: Option<Value>,
}
impl PluginServerHandler for LspClient {
fn method_registered(&mut self, method: &str) -> bool {
self.host.method_registered(method)
}
fn document_supported(
&mut self,
lanaguage_id: Option<&str>,
path: Option<&Path>,
) -> bool {
self.host.document_supported(lanaguage_id, path)
}
fn handle_handler_notification(
&mut self,
notification: PluginHandlerNotification,
) {
use PluginHandlerNotification::*;
match notification {
Initialize => {
self.initialize();
}
InitializeResult(result) => {
self.host.server_capabilities = result.capabilities;
}
Shutdown => {
self.shutdown();
}
SpawnedPluginLoaded { .. } => {}
}
}
fn handle_host_request(
&mut self,
id: Id,
method: String,
params: Params,
resp: ResponseSender,
) {
self.host.handle_request(id, method, params, resp);
}
fn handle_host_notification(
&mut self,
method: String,
params: Params,
from: String,
) {
if let Err(err) = self.host.handle_notification(method, params, from) {
tracing::error!("{:?}", err);
}
}
fn handle_did_save_text_document(
&self,
language_id: String,
path: PathBuf,
text_document: TextDocumentIdentifier,
text: lapce_xi_rope::Rope,
) {
self.host.handle_did_save_text_document(
language_id,
path,
text_document,
text,
);
}
fn handle_did_change_text_document(
&mut self,
language_id: String,
document: lsp_types::VersionedTextDocumentIdentifier,
delta: lapce_xi_rope::RopeDelta,
text: lapce_xi_rope::Rope,
new_text: lapce_xi_rope::Rope,
change: Arc<
Mutex<(
Option<TextDocumentContentChangeEvent>,
Option<TextDocumentContentChangeEvent>,
)>,
>,
) {
self.host.handle_did_change_text_document(
language_id,
document,
delta,
text,
new_text,
change,
);
}
fn format_semantic_tokens(
&self,
tokens: SemanticTokens,
text: Rope,
f: Box<dyn RpcCallback<Vec<LineStyle>, RpcError>>,
) {
self.host.format_semantic_tokens(tokens, text, f);
}
}
impl LspClient {
#[allow(clippy::too_many_arguments)]
fn new(
plugin_rpc: PluginCatalogRpcHandler,
document_selector: DocumentSelector,
workspace: Option<PathBuf>,
volt_id: VoltID,
volt_display_name: String,
spawned_by: Option<PluginId>,
plugin_id: Option<PluginId>,
pwd: Option<PathBuf>,
server_uri: Url,
args: Vec<String>,
options: Option<Value>,
) -> Result<Self> {
let server = match server_uri.scheme() {
"file" => {
let path = server_uri.to_file_path().map_err(|_| anyhow!(""))?;
#[cfg(unix)]
if let Err(err) = std::process::Command::new("chmod")
.arg("+x")
.arg(&path)
.output()
{
tracing::error!("{:?}", err);
}
path.to_str().ok_or_else(|| anyhow!(""))?.to_string()
}
"urn" => server_uri.path().to_string(),
_ => return Err(anyhow!("uri not supported")),
};
let mut process = Self::process(workspace.as_ref(), &server, &args)?;
let stdin = process.stdin.take().unwrap();
let stdout = process.stdout.take().unwrap();
let stderr = process.stderr.take().unwrap();
let mut writer = Box::new(BufWriter::new(stdin));
let (io_tx, io_rx) = crossbeam_channel::unbounded();
let server_rpc = PluginServerRpcHandler::new(
volt_id.clone(),
spawned_by,
plugin_id,
io_tx.clone(),
);
thread::spawn(move || {
for msg in io_rx {
if msg
.get_method()
.map(|x| x == lsp_types::request::Shutdown::METHOD)
.unwrap_or_default()
{
break;
}
if let Ok(msg) = serde_json::to_string(&msg) {
tracing::debug!("write to lsp: {}", msg);
let msg =
format!("Content-Length: {}\r\n\r\n{}", msg.len(), msg);
if let Err(err) = writer.write(msg.as_bytes()) {
tracing::error!("{:?}", err);
}
if let Err(err) = writer.flush() {
tracing::error!("{:?}", err);
}
}
}
});
let local_server_rpc = server_rpc.clone();
let core_rpc = plugin_rpc.core_rpc.clone();
let volt_id_closure = volt_id.clone();
let name = volt_display_name.clone();
thread::spawn(move || {
let mut reader = Box::new(BufReader::new(stdout));
loop {
match read_message(&mut reader) {
Ok(message_str) => {
if !message_str.contains("$/progress") {
tracing::debug!("read from lsp: {}", message_str);
}
if let Some(resp) = handle_plugin_server_message(
&local_server_rpc,
&message_str,
&name,
) {
if let Err(err) = io_tx.send(resp) {
tracing::error!("{:?}", err);
}
}
}
Err(_err) => {
core_rpc.log(
lapce_rpc::core::LogLevel::Error,
format!("lsp server {server} stopped!"),
Some(format!(
"lapce_proxy::plugin::lsp::{}::{}::stopped",
volt_id_closure.author, volt_id_closure.name
)),
);
return;
}
};
}
});
let core_rpc = plugin_rpc.core_rpc.clone();
let volt_id_closure = volt_id.clone();
thread::spawn(move || {
let mut reader = Box::new(BufReader::new(stderr));
loop {
let mut line = String::new();
match reader.read_line(&mut line) {
Ok(n) => {
if n == 0 {
return;
}
core_rpc.log(
lapce_rpc::core::LogLevel::Trace,
line.trim_end().to_string(),
Some(format!(
"lapce_proxy::plugin::lsp::{}::{}::stderr",
volt_id_closure.author, volt_id_closure.name
)),
);
}
Err(_) => {
return;
}
}
}
});
let host = PluginHostHandler::new(
workspace.clone(),
pwd,
volt_id,
volt_display_name,
document_selector,
plugin_rpc.core_rpc.clone(),
server_rpc.clone(),
plugin_rpc.clone(),
);
Ok(Self {
plugin_rpc,
server_rpc,
process,
workspace,
host,
options,
})
}
#[allow(clippy::too_many_arguments)]
pub fn start(
plugin_rpc: PluginCatalogRpcHandler,
document_selector: DocumentSelector,
workspace: Option<PathBuf>,
volt_id: VoltID,
volt_display_name: String,
spawned_by: Option<PluginId>,
plugin_id: Option<PluginId>,
pwd: Option<PathBuf>,
server_uri: Url,
args: Vec<String>,
options: Option<Value>,
) -> Result<PluginId> {
let mut lsp = Self::new(
plugin_rpc,
document_selector,
workspace,
volt_id,
volt_display_name,
spawned_by,
plugin_id,
pwd,
server_uri,
args,
options,
)?;
let plugin_id = lsp.server_rpc.plugin_id;
let rpc = lsp.server_rpc.clone();
thread::spawn(move || {
rpc.mainloop(&mut lsp);
});
Ok(plugin_id)
}
fn initialize(&mut self) {
let root_uri = self
.workspace
.clone()
.map(|p| Url::from_directory_path(p).unwrap());
tracing::debug!("initialization_options {:?}", self.options);
#[allow(deprecated)]
let params = InitializeParams {
process_id: Some(process::id()),
root_uri: root_uri.clone(),
initialization_options: self.options.clone(),
capabilities: client_capabilities(),
trace: Some(TraceValue::Verbose),
workspace_folders: root_uri.map(|uri| {
vec![WorkspaceFolder {
name: uri.as_str().to_string(),
uri,
}]
}),
client_info: Some(ClientInfo {
name: meta::NAME.to_owned(),
version: Some(meta::VERSION.to_owned()),
}),
locale: None,
root_path: None,
work_done_progress_params: WorkDoneProgressParams::default(),
};
match self.server_rpc.server_request(
Initialize::METHOD,
params,
None,
None,
false,
) {
Ok(value) => {
let result: InitializeResult =
serde_json::from_value(value).unwrap();
self.host.server_capabilities = result.capabilities;
self.server_rpc.server_notification(
Initialized::METHOD,
InitializedParams {},
None,
None,
false,
);
if self
.plugin_rpc
.plugin_server_loaded(self.server_rpc.clone())
.is_err()
{
self.server_rpc.shutdown();
self.shutdown();
}
}
Err(err) => {
tracing::error!("{:?}", err);
}
}
// move |result| {
// if let Ok(value) = result {
// let result: InitializeResult =
// serde_json::from_value(value).unwrap();
// server_rpc.handle_rpc(PluginServerRpc::Handler(
// PluginHandlerNotification::InitializeDone(result),
// ));
// }
// },
// );
}
fn shutdown(&mut self) {
if let Err(err) = self.process.kill() {
tracing::error!("{:?}", err);
}
if let Err(err) = self.process.wait() {
tracing::error!("{:?}", err);
}
}
fn process(
workspace: Option<&PathBuf>,
server: &str,
args: &[String],
) -> Result<Child> {
let mut process = Command::new(server);
if let Some(workspace) = workspace {
process.current_dir(workspace);
}
process.args(args);
#[cfg(target_os = "windows")]
let process = process.creation_flags(0x08000000);
let child = process
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?;
Ok(child)
}
}
pub struct DocumentFilter {
/// The document must have this language id, if it exists
pub language_id: Option<String>,
/// The document's path must match this glob, if it exists
pub pattern: Option<globset::GlobMatcher>,
// TODO: URI Scheme from lsp-types document filter
}
impl DocumentFilter {
/// Constructs a document filter from the LSP version
/// This ignores any fields that are badly constructed
pub(crate) fn from_lsp_filter_loose(
filter: &lsp_types::DocumentFilter,
) -> DocumentFilter {
DocumentFilter {
language_id: filter.language.clone(),
// TODO: clean this up
pattern: filter
.pattern
.as_deref()
.map(globset::Glob::new)
.and_then(Result::ok)
.map(|x| globset::Glob::compile_matcher(&x)),
}
}
}
pub enum LspHeader {
ContentType,
ContentLength(usize),
}
fn parse_header(s: &str) -> Result<LspHeader> {
let split: Vec<String> =
s.splitn(2, ": ").map(|s| s.trim().to_lowercase()).collect();
if split.len() != 2 {
return Err(anyhow!("Malformed"));
};
match split[0].as_ref() {
HEADER_CONTENT_TYPE => Ok(LspHeader::ContentType),
HEADER_CONTENT_LENGTH => {
Ok(LspHeader::ContentLength(split[1].parse::<usize>()?))
}
_ => Err(anyhow!("Unknown parse error occurred")),
}
}
pub fn read_message<T: BufRead>(reader: &mut T) -> Result<String> {
let mut buffer = String::new();
let mut content_length: Option<usize> = None;
loop {
buffer.clear();
let _ = reader.read_line(&mut buffer)?;
// eprin
match &buffer {
s if s.trim().is_empty() => break,
s => {
match parse_header(s)? {
LspHeader::ContentLength(len) => content_length = Some(len),
LspHeader::ContentType => (),
};
}
};
}
let content_length = content_length
.ok_or_else(|| anyhow!("missing content-length header: {}", buffer))?;
let mut body_buffer = vec![0; content_length];
reader.read_exact(&mut body_buffer)?;
let body = String::from_utf8(body_buffer)?;
Ok(body)
}
pub fn get_change_for_sync_kind(
sync_kind: TextDocumentSyncKind,
buffer: &Buffer,
content_change: &TextDocumentContentChangeEvent,
) -> Option<Vec<TextDocumentContentChangeEvent>> {
match sync_kind {
TextDocumentSyncKind::NONE => None,
TextDocumentSyncKind::FULL => {
let text_document_content_change_event =
TextDocumentContentChangeEvent {
range: None,
range_length: None,
text: buffer.get_document(),
};
Some(vec![text_document_content_change_event])
}
TextDocumentSyncKind::INCREMENTAL => Some(vec![content_change.clone()]),
_ => None,
}
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-proxy/src/plugin/mod.rs | lapce-proxy/src/plugin/mod.rs | pub mod catalog;
pub mod dap;
pub mod lsp;
pub mod psp;
pub mod wasi;
use std::{
borrow::Cow,
collections::HashMap,
fs,
path::{Path, PathBuf},
sync::{
Arc,
atomic::{AtomicBool, AtomicU64, AtomicUsize, Ordering},
},
time::Duration,
};
use anyhow::{Result, anyhow};
use crossbeam_channel::{Receiver, Sender};
use dyn_clone::DynClone;
use flate2::read::GzDecoder;
use lapce_core::directory::Directory;
use lapce_rpc::{
RequestId, RpcError,
core::CoreRpcHandler,
dap_types::{self, DapId, RunDebugConfig, SourceBreakpoint, ThreadId},
plugin::{PluginId, VoltInfo, VoltMetadata},
proxy::ProxyRpcHandler,
style::LineStyle,
terminal::TermId,
};
use lapce_xi_rope::{Rope, RopeDelta};
use lsp_types::{
CallHierarchyClientCapabilities, CallHierarchyIncomingCall,
CallHierarchyIncomingCallsParams, CallHierarchyItem, CallHierarchyPrepareParams,
ClientCapabilities, CodeAction, CodeActionCapabilityResolveSupport,
CodeActionClientCapabilities, CodeActionContext, CodeActionKind,
CodeActionKindLiteralSupport, CodeActionLiteralSupport, CodeActionParams,
CodeActionResponse, CodeLens, CodeLensParams, CompletionClientCapabilities,
CompletionItem, CompletionItemCapability,
CompletionItemCapabilityResolveSupport, CompletionParams, CompletionResponse,
Diagnostic, DocumentFormattingParams, DocumentSymbolClientCapabilities,
DocumentSymbolParams, DocumentSymbolResponse, FoldingRange,
FoldingRangeClientCapabilities, FoldingRangeParams, FormattingOptions,
GotoCapability, GotoDefinitionParams, GotoDefinitionResponse, Hover,
HoverClientCapabilities, HoverParams, InlayHint, InlayHintClientCapabilities,
InlayHintParams, InlineCompletionClientCapabilities, InlineCompletionParams,
InlineCompletionResponse, InlineCompletionTriggerKind, Location, MarkupKind,
MessageActionItemCapabilities, ParameterInformationSettings,
PartialResultParams, Position, PrepareRenameResponse,
PublishDiagnosticsClientCapabilities, Range, ReferenceContext, ReferenceParams,
RenameParams, SelectionRange, SelectionRangeParams, SemanticTokens,
SemanticTokensClientCapabilities, SemanticTokensParams,
ShowMessageRequestClientCapabilities, SignatureHelp,
SignatureHelpClientCapabilities, SignatureHelpParams,
SignatureInformationSettings, SymbolInformation, TextDocumentClientCapabilities,
TextDocumentIdentifier, TextDocumentItem, TextDocumentPositionParams,
TextDocumentSyncClientCapabilities, TextEdit, Url,
VersionedTextDocumentIdentifier, WindowClientCapabilities,
WorkDoneProgressParams, WorkspaceClientCapabilities, WorkspaceEdit,
WorkspaceSymbolClientCapabilities, WorkspaceSymbolParams,
request::{
CallHierarchyIncomingCalls, CallHierarchyPrepare, CodeActionRequest,
CodeActionResolveRequest, CodeLensRequest, CodeLensResolve, Completion,
DocumentSymbolRequest, FoldingRangeRequest, Formatting, GotoDefinition,
GotoImplementation, GotoImplementationResponse, GotoTypeDefinition,
GotoTypeDefinitionParams, GotoTypeDefinitionResponse, HoverRequest,
InlayHintRequest, InlineCompletionRequest, PrepareRenameRequest, References,
Rename, Request, ResolveCompletionItem, SelectionRangeRequest,
SemanticTokensFullRequest, SignatureHelpRequest, WorkspaceSymbolRequest,
},
};
use parking_lot::Mutex;
use serde::{Deserialize, Serialize, de::DeserializeOwned};
use serde_json::{Map, Value};
use tar::Archive;
use tracing::error;
use self::{
catalog::PluginCatalog,
dap::DapRpcHandler,
psp::{ClonableCallback, PluginServerRpcHandler, RpcCallback},
wasi::{load_volt, start_volt},
};
use crate::buffer::language_id_from_path;
pub type PluginName = String;
#[allow(clippy::large_enum_variant)]
pub enum PluginCatalogRpc {
ServerRequest {
plugin_id: Option<PluginId>,
request_sent: Option<Arc<AtomicUsize>>,
method: Cow<'static, str>,
params: Value,
language_id: Option<String>,
path: Option<PathBuf>,
check: bool,
f: Box<dyn ClonableCallback<Value, RpcError>>,
},
ServerNotification {
plugin_id: Option<PluginId>,
method: Cow<'static, str>,
params: Value,
language_id: Option<String>,
path: Option<PathBuf>,
check: bool,
},
FormatSemanticTokens {
plugin_id: PluginId,
tokens: SemanticTokens,
text: Rope,
f: Box<dyn RpcCallback<Vec<LineStyle>, RpcError>>,
},
DapVariable {
dap_id: DapId,
reference: usize,
f: Box<dyn RpcCallback<Vec<dap_types::Variable>, RpcError>>,
},
DapGetScopes {
dap_id: DapId,
frame_id: usize,
f: Box<
dyn RpcCallback<
Vec<(dap_types::Scope, Vec<dap_types::Variable>)>,
RpcError,
>,
>,
},
DidOpenTextDocument {
document: TextDocumentItem,
},
DidChangeTextDocument {
language_id: String,
document: VersionedTextDocumentIdentifier,
delta: RopeDelta,
text: Rope,
new_text: Rope,
},
DidSaveTextDocument {
language_id: String,
path: PathBuf,
text_document: TextDocumentIdentifier,
text: Rope,
},
Handler(PluginCatalogNotification),
RemoveVolt {
volt: VoltInfo,
f: Box<dyn ClonableCallback<Value, RpcError>>,
},
Shutdown,
}
#[allow(clippy::large_enum_variant)]
pub enum PluginCatalogNotification {
UpdatePluginConfigs(HashMap<String, HashMap<String, serde_json::Value>>),
UnactivatedVolts(Vec<VoltMetadata>),
PluginServerLoaded(PluginServerRpcHandler),
InstallVolt(VoltInfo),
StopVolt(VoltInfo),
EnableVolt(VoltInfo),
ReloadVolt(VoltMetadata),
DapLoaded(DapRpcHandler),
DapDisconnected(DapId),
DapStart {
config: RunDebugConfig,
breakpoints: HashMap<PathBuf, Vec<SourceBreakpoint>>,
},
DapProcessId {
dap_id: DapId,
process_id: Option<u32>,
term_id: TermId,
},
DapContinue {
dap_id: DapId,
thread_id: ThreadId,
},
DapStepOver {
dap_id: DapId,
thread_id: ThreadId,
},
DapStepInto {
dap_id: DapId,
thread_id: ThreadId,
},
DapStepOut {
dap_id: DapId,
thread_id: ThreadId,
},
DapPause {
dap_id: DapId,
thread_id: ThreadId,
},
DapStop {
dap_id: DapId,
},
DapDisconnect {
dap_id: DapId,
},
DapRestart {
dap_id: DapId,
breakpoints: HashMap<PathBuf, Vec<SourceBreakpoint>>,
},
DapSetBreakpoints {
dap_id: DapId,
path: PathBuf,
breakpoints: Vec<SourceBreakpoint>,
},
RegisterDebuggerType {
debugger_type: String,
program: String,
args: Option<Vec<String>>,
},
Shutdown,
}
#[derive(Clone)]
pub struct PluginCatalogRpcHandler {
core_rpc: CoreRpcHandler,
proxy_rpc: ProxyRpcHandler,
plugin_tx: Sender<PluginCatalogRpc>,
plugin_rx: Arc<Mutex<Option<Receiver<PluginCatalogRpc>>>>,
#[allow(dead_code)]
id: Arc<AtomicU64>,
#[allow(dead_code, clippy::type_complexity)]
pending: Arc<Mutex<HashMap<u64, Sender<Result<Value, RpcError>>>>>,
}
impl PluginCatalogRpcHandler {
pub fn new(core_rpc: CoreRpcHandler, proxy_rpc: ProxyRpcHandler) -> Self {
let (plugin_tx, plugin_rx) = crossbeam_channel::unbounded();
Self {
core_rpc,
proxy_rpc,
plugin_tx,
plugin_rx: Arc::new(Mutex::new(Some(plugin_rx))),
id: Arc::new(AtomicU64::new(0)),
pending: Arc::new(Mutex::new(HashMap::new())),
}
}
#[allow(dead_code)]
fn handle_response(&self, id: RequestId, result: Result<Value, RpcError>) {
if let Some(chan) = { self.pending.lock().remove(&id) } {
if let Err(err) = chan.send(result) {
tracing::error!("{:?}", err);
}
}
}
pub fn mainloop(&self, plugin: &mut PluginCatalog) {
let plugin_rx = self.plugin_rx.lock().take().unwrap();
for msg in plugin_rx {
match msg {
PluginCatalogRpc::ServerRequest {
plugin_id,
request_sent,
method,
params,
language_id,
path,
check,
f,
} => {
plugin.handle_server_request(
plugin_id,
request_sent,
method,
params,
language_id,
path,
check,
f,
);
}
PluginCatalogRpc::ServerNotification {
plugin_id,
method,
params,
language_id,
path,
check,
} => {
plugin.handle_server_notification(
plugin_id,
method,
params,
language_id,
path,
check,
);
}
PluginCatalogRpc::Handler(notification) => {
plugin.handle_notification(notification);
}
PluginCatalogRpc::FormatSemanticTokens {
plugin_id,
tokens,
text,
f,
} => {
plugin.format_semantic_tokens(plugin_id, tokens, text, f);
}
PluginCatalogRpc::DidOpenTextDocument { document } => {
plugin.handle_did_open_text_document(document);
}
PluginCatalogRpc::DidSaveTextDocument {
language_id,
path,
text_document,
text,
} => {
plugin.handle_did_save_text_document(
language_id,
path,
text_document,
text,
);
}
PluginCatalogRpc::DidChangeTextDocument {
language_id,
document,
delta,
text,
new_text,
} => {
plugin.handle_did_change_text_document(
language_id,
document,
delta,
text,
new_text,
);
}
PluginCatalogRpc::DapVariable {
dap_id,
reference,
f,
} => {
plugin.dap_variable(dap_id, reference, f);
}
PluginCatalogRpc::DapGetScopes {
dap_id,
frame_id,
f,
} => {
plugin.dap_get_scopes(dap_id, frame_id, f);
}
PluginCatalogRpc::Shutdown => {
return;
}
PluginCatalogRpc::RemoveVolt { volt, f } => {
plugin.shutdown_volt(volt, f);
}
}
}
}
pub fn shutdown(&self) {
if let Err(err) =
self.catalog_notification(PluginCatalogNotification::Shutdown)
{
tracing::error!("{:?}", err);
}
if let Err(err) = self.plugin_tx.send(PluginCatalogRpc::Shutdown) {
tracing::error!("{:?}", err);
}
}
fn catalog_notification(
&self,
notification: PluginCatalogNotification,
) -> Result<()> {
self.plugin_tx
.send(PluginCatalogRpc::Handler(notification))
.map_err(|e| anyhow!(e.to_string()))?;
Ok(())
}
fn send_request_to_all_plugins<P, Resp>(
&self,
method: &'static str,
params: P,
language_id: Option<String>,
path: Option<PathBuf>,
cb: impl FnOnce(PluginId, Result<Resp, RpcError>) + Clone + Send + 'static,
) where
P: Serialize,
Resp: DeserializeOwned,
{
let got_success = Arc::new(AtomicBool::new(false));
let request_sent = Arc::new(AtomicUsize::new(0));
let err_received = Arc::new(AtomicUsize::new(0));
self.send_request(
None,
Some(request_sent.clone()),
method,
params,
language_id,
path,
true,
move |plugin_id, result| {
if got_success.load(Ordering::Acquire) {
return;
}
let result = match result {
Ok(value) => {
if let Ok(item) = serde_json::from_value::<Resp>(value) {
got_success.store(true, Ordering::Release);
Ok(item)
} else {
Err(RpcError {
code: 0,
message: "deserialize error".to_string(),
})
}
}
Err(e) => Err(e),
};
if result.is_ok() {
cb(plugin_id, result)
} else {
let rx = err_received.fetch_add(1, Ordering::Relaxed) + 1;
if request_sent.load(Ordering::Acquire) == rx {
cb(plugin_id, result)
}
}
},
);
}
#[allow(clippy::too_many_arguments)]
pub(crate) fn send_request<P: Serialize>(
&self,
plugin_id: Option<PluginId>,
request_sent: Option<Arc<AtomicUsize>>,
method: impl Into<Cow<'static, str>>,
params: P,
language_id: Option<String>,
path: Option<PathBuf>,
check: bool,
f: impl FnOnce(PluginId, Result<Value, RpcError>) + Send + DynClone + 'static,
) {
let params = serde_json::to_value(params).unwrap();
let rpc = PluginCatalogRpc::ServerRequest {
plugin_id,
request_sent,
method: method.into(),
params,
language_id,
path,
check,
f: Box::new(f),
};
if let Err(err) = self.plugin_tx.send(rpc) {
tracing::error!("{:?}", err);
}
}
#[allow(clippy::too_many_arguments)]
pub(crate) fn send_notification<P: Serialize>(
&self,
plugin_id: Option<PluginId>,
method: impl Into<Cow<'static, str>>,
params: P,
language_id: Option<String>,
path: Option<PathBuf>,
check: bool,
) {
let params = serde_json::to_value(params).unwrap();
let rpc = PluginCatalogRpc::ServerNotification {
plugin_id,
method: method.into(),
params,
language_id,
path,
check,
};
if let Err(err) = self.plugin_tx.send(rpc) {
tracing::error!("{:?}", err);
}
}
pub fn format_semantic_tokens(
&self,
plugin_id: PluginId,
tokens: SemanticTokens,
text: Rope,
f: Box<dyn RpcCallback<Vec<LineStyle>, RpcError>>,
) {
if let Err(err) =
self.plugin_tx.send(PluginCatalogRpc::FormatSemanticTokens {
plugin_id,
tokens,
text,
f,
})
{
tracing::error!("{:?}", err);
}
}
pub fn did_save_text_document(&self, path: &Path, text: Rope) {
let text_document =
TextDocumentIdentifier::new(Url::from_file_path(path).unwrap());
let language_id = language_id_from_path(path).unwrap_or("").to_string();
if let Err(err) =
self.plugin_tx.send(PluginCatalogRpc::DidSaveTextDocument {
language_id,
text_document,
path: path.into(),
text,
})
{
tracing::error!("{:?}", err);
}
}
pub fn did_change_text_document(
&self,
path: &Path,
rev: u64,
delta: RopeDelta,
text: Rope,
new_text: Rope,
) {
let document = VersionedTextDocumentIdentifier::new(
Url::from_file_path(path).unwrap(),
rev as i32,
);
let language_id = language_id_from_path(path).unwrap_or("").to_string();
if let Err(err) =
self.plugin_tx
.send(PluginCatalogRpc::DidChangeTextDocument {
language_id,
document,
delta,
text,
new_text,
})
{
tracing::error!("{:?}", err);
}
}
pub fn get_definition(
&self,
path: &Path,
position: Position,
cb: impl FnOnce(PluginId, Result<GotoDefinitionResponse, RpcError>)
+ Clone
+ Send
+ 'static,
) {
let uri = Url::from_file_path(path).unwrap();
let method = GotoDefinition::METHOD;
let params = GotoDefinitionParams {
text_document_position_params: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position,
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
};
let language_id =
Some(language_id_from_path(path).unwrap_or("").to_string());
self.send_request_to_all_plugins(
method,
params,
language_id,
Some(path.to_path_buf()),
cb,
);
}
pub fn get_type_definition(
&self,
path: &Path,
position: Position,
cb: impl FnOnce(PluginId, Result<GotoTypeDefinitionResponse, RpcError>)
+ Clone
+ Send
+ 'static,
) {
let uri = Url::from_file_path(path).unwrap();
let method = GotoTypeDefinition::METHOD;
let params = GotoTypeDefinitionParams {
text_document_position_params: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position,
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
};
let language_id =
Some(language_id_from_path(path).unwrap_or("").to_string());
self.send_request_to_all_plugins(
method,
params,
language_id,
Some(path.to_path_buf()),
cb,
);
}
pub fn call_hierarchy_incoming(
&self,
path: &Path,
item: CallHierarchyItem,
cb: impl FnOnce(
PluginId,
Result<Option<Vec<CallHierarchyIncomingCall>>, RpcError>,
) + Clone
+ Send
+ 'static,
) {
let method = CallHierarchyIncomingCalls::METHOD;
let params = CallHierarchyIncomingCallsParams {
item,
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: Default::default(),
};
let language_id =
Some(language_id_from_path(path).unwrap_or("").to_string());
self.send_request_to_all_plugins(
method,
params,
language_id,
Some(path.to_path_buf()),
cb,
);
}
pub fn show_call_hierarchy(
&self,
path: &Path,
position: Position,
cb: impl FnOnce(PluginId, Result<Option<Vec<CallHierarchyItem>>, RpcError>)
+ Clone
+ Send
+ 'static,
) {
let uri = Url::from_file_path(path).unwrap();
let method = CallHierarchyPrepare::METHOD;
let params = CallHierarchyPrepareParams {
text_document_position_params: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position,
},
work_done_progress_params: WorkDoneProgressParams::default(),
};
let language_id =
Some(language_id_from_path(path).unwrap_or("").to_string());
self.send_request_to_all_plugins(
method,
params,
language_id,
Some(path.to_path_buf()),
cb,
);
}
pub fn get_references(
&self,
path: &Path,
position: Position,
cb: impl FnOnce(PluginId, Result<Vec<Location>, RpcError>)
+ Clone
+ Send
+ 'static,
) {
let uri = Url::from_file_path(path).unwrap();
let method = References::METHOD;
let params = ReferenceParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position,
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
context: ReferenceContext {
include_declaration: false,
},
};
let language_id =
Some(language_id_from_path(path).unwrap_or("").to_string());
self.send_request_to_all_plugins(
method,
params,
language_id,
Some(path.to_path_buf()),
cb,
);
}
pub fn get_lsp_folding_range(
&self,
path: &Path,
cb: impl FnOnce(
PluginId,
std::result::Result<Option<Vec<FoldingRange>>, RpcError>,
) + Clone
+ Send
+ 'static,
) {
let uri = Url::from_file_path(path).unwrap();
let method = FoldingRangeRequest::METHOD;
let params = FoldingRangeParams {
text_document: TextDocumentIdentifier { uri },
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
};
let language_id =
Some(language_id_from_path(path).unwrap_or("").to_string());
self.send_request_to_all_plugins(
method,
params,
language_id,
Some(path.to_path_buf()),
cb,
);
}
pub fn go_to_implementation(
&self,
path: &Path,
position: Position,
cb: impl FnOnce(PluginId, Result<Option<GotoImplementationResponse>, RpcError>)
+ Clone
+ Send
+ 'static,
) {
let uri = Url::from_file_path(path).unwrap();
let method = GotoImplementation::METHOD;
let params = GotoTypeDefinitionParams {
text_document_position_params: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position,
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
};
let language_id =
Some(language_id_from_path(path).unwrap_or("").to_string());
self.send_request_to_all_plugins(
method,
params,
language_id,
Some(path.to_path_buf()),
cb,
);
}
pub fn get_code_actions(
&self,
path: &Path,
position: Position,
diagnostics: Vec<Diagnostic>,
cb: impl FnOnce(PluginId, Result<CodeActionResponse, RpcError>)
+ Clone
+ Send
+ 'static,
) {
let uri = Url::from_file_path(path).unwrap();
let method = CodeActionRequest::METHOD;
let params = CodeActionParams {
text_document: TextDocumentIdentifier { uri },
range: Range {
start: position,
end: position,
},
context: CodeActionContext {
diagnostics,
only: None,
trigger_kind: None,
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
};
let language_id =
Some(language_id_from_path(path).unwrap_or("").to_string());
self.send_request_to_all_plugins(
method,
params,
language_id,
Some(path.to_path_buf()),
cb,
);
}
pub fn get_code_lens(
&self,
path: &Path,
cb: impl FnOnce(PluginId, Result<Option<Vec<CodeLens>>, RpcError>)
+ Clone
+ Send
+ 'static,
) {
let uri = Url::from_file_path(path).unwrap();
let method = CodeLensRequest::METHOD;
let params = CodeLensParams {
text_document: TextDocumentIdentifier { uri },
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
};
let language_id =
Some(language_id_from_path(path).unwrap_or("").to_string());
self.send_request_to_all_plugins(
method,
params,
language_id,
Some(path.to_path_buf()),
cb,
);
}
pub fn get_code_lens_resolve(
&self,
path: &Path,
code_lens: &CodeLens,
cb: impl FnOnce(PluginId, Result<CodeLens, RpcError>) + Clone + Send + 'static,
) {
let method = CodeLensResolve::METHOD;
let language_id =
Some(language_id_from_path(path).unwrap_or("").to_string());
self.send_request_to_all_plugins(
method,
code_lens,
language_id,
Some(path.to_path_buf()),
cb,
);
}
pub fn get_inlay_hints(
&self,
path: &Path,
range: Range,
cb: impl FnOnce(PluginId, Result<Vec<InlayHint>, RpcError>)
+ Clone
+ Send
+ 'static,
) {
let uri = Url::from_file_path(path).unwrap();
let method = InlayHintRequest::METHOD;
let params = InlayHintParams {
text_document: TextDocumentIdentifier { uri },
work_done_progress_params: WorkDoneProgressParams::default(),
range,
};
let language_id =
Some(language_id_from_path(path).unwrap_or("").to_string());
self.send_request_to_all_plugins(
method,
params,
language_id,
Some(path.to_path_buf()),
cb,
);
}
pub fn get_inline_completions(
&self,
path: &Path,
position: Position,
trigger_kind: InlineCompletionTriggerKind,
cb: impl FnOnce(PluginId, Result<InlineCompletionResponse, RpcError>)
+ Clone
+ Send
+ 'static,
) {
let uri = Url::from_file_path(path).unwrap();
let method = InlineCompletionRequest::METHOD;
let params = InlineCompletionParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position,
},
context: lsp_types::InlineCompletionContext {
trigger_kind,
selected_completion_info: None,
},
work_done_progress_params: WorkDoneProgressParams::default(),
};
let language_id =
Some(language_id_from_path(path).unwrap_or("").to_string());
self.send_request_to_all_plugins(
method,
params,
language_id,
Some(path.to_path_buf()),
cb,
);
}
pub fn get_document_symbols(
&self,
path: &Path,
cb: impl FnOnce(PluginId, Result<DocumentSymbolResponse, RpcError>)
+ Clone
+ Send
+ 'static,
) {
let uri = Url::from_file_path(path).unwrap();
let method = DocumentSymbolRequest::METHOD;
let params = DocumentSymbolParams {
text_document: TextDocumentIdentifier { uri },
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
};
let language_id =
Some(language_id_from_path(path).unwrap_or("").to_string());
self.send_request_to_all_plugins(
method,
params,
language_id,
Some(path.to_path_buf()),
cb,
);
}
pub fn get_workspace_symbols(
&self,
query: String,
cb: impl FnOnce(PluginId, Result<Vec<SymbolInformation>, RpcError>)
+ Clone
+ Send
+ 'static,
) {
let method = WorkspaceSymbolRequest::METHOD;
let params = WorkspaceSymbolParams {
query,
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
};
self.send_request_to_all_plugins(method, params, None, None, cb);
}
pub fn get_document_formatting(
&self,
path: &Path,
cb: impl FnOnce(PluginId, Result<Vec<TextEdit>, RpcError>)
+ Clone
+ Send
+ 'static,
) {
let uri = Url::from_file_path(path).unwrap();
let method = Formatting::METHOD;
let params = DocumentFormattingParams {
text_document: TextDocumentIdentifier { uri },
options: FormattingOptions {
tab_size: 4,
insert_spaces: true,
..Default::default()
},
work_done_progress_params: WorkDoneProgressParams::default(),
};
let language_id =
Some(language_id_from_path(path).unwrap_or("").to_string());
self.send_request_to_all_plugins(
method,
params,
language_id,
Some(path.to_path_buf()),
cb,
);
}
pub fn prepare_rename(
&self,
path: &Path,
position: Position,
cb: impl FnOnce(PluginId, Result<PrepareRenameResponse, RpcError>)
+ Clone
+ Send
+ 'static,
) {
let uri = Url::from_file_path(path).unwrap();
let method = PrepareRenameRequest::METHOD;
let params = TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position,
};
let language_id =
Some(language_id_from_path(path).unwrap_or("").to_string());
self.send_request_to_all_plugins(
method,
params,
language_id,
Some(path.to_path_buf()),
cb,
);
}
pub fn rename(
&self,
path: &Path,
position: Position,
new_name: String,
cb: impl FnOnce(PluginId, Result<WorkspaceEdit, RpcError>)
+ Clone
+ Send
+ 'static,
) {
let uri = Url::from_file_path(path).unwrap();
let method = Rename::METHOD;
let params = RenameParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position,
},
new_name,
work_done_progress_params: WorkDoneProgressParams::default(),
};
let language_id =
Some(language_id_from_path(path).unwrap_or("").to_string());
self.send_request_to_all_plugins(
method,
params,
language_id,
Some(path.to_path_buf()),
cb,
);
}
pub fn get_semantic_tokens(
&self,
path: &Path,
cb: impl FnOnce(PluginId, Result<SemanticTokens, RpcError>)
+ Clone
+ Send
+ 'static,
) {
let uri = Url::from_file_path(path).unwrap();
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | true |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-proxy/src/plugin/dap.rs | lapce-proxy/src/plugin/dap.rs | use std::{
collections::HashMap,
io::{BufReader, BufWriter, Write},
path::PathBuf,
process::{Child, Command, Stdio},
sync::{
Arc,
atomic::{AtomicU64, Ordering},
},
thread,
};
use anyhow::{Result, anyhow};
use crossbeam_channel::{Receiver, Sender};
use lapce_rpc::{
RpcError,
dap_types::{
self, ConfigurationDone, Continue, ContinueArguments, ContinueResponse,
DapEvent, DapId, DapPayload, DapRequest, DapResponse, DapServer,
DebuggerCapabilities, Disconnect, Initialize, Launch, Next, NextArguments,
Pause, PauseArguments, Request, RunDebugConfig, RunInTerminal,
RunInTerminalArguments, RunInTerminalResponse, Scope, Scopes,
ScopesArguments, ScopesResponse, SetBreakpoints, SetBreakpointsArguments,
SetBreakpointsResponse, Source, SourceBreakpoint, StackTrace,
StackTraceArguments, StackTraceResponse, StepIn, StepInArguments, StepOut,
StepOutArguments, Terminate, ThreadId, Threads, ThreadsResponse, Variable,
Variables, VariablesArguments, VariablesResponse,
},
terminal::TermId,
};
use parking_lot::Mutex;
use serde_json::Value;
use super::{
PluginCatalogRpcHandler,
psp::{ResponseHandler, RpcCallback},
};
pub struct DapClient {
plugin_rpc: PluginCatalogRpcHandler,
pub(crate) dap_rpc: DapRpcHandler,
dap_server: DapServer,
config: RunDebugConfig,
breakpoints: HashMap<PathBuf, Vec<SourceBreakpoint>>,
term_id: Option<TermId>,
capabilities: Option<DebuggerCapabilities>,
terminated: bool,
disconnected: bool,
restarted: bool,
}
impl DapClient {
pub fn new(
dap_server: DapServer,
config: RunDebugConfig,
breakpoints: HashMap<PathBuf, Vec<SourceBreakpoint>>,
plugin_rpc: PluginCatalogRpcHandler,
) -> Result<Self> {
let dap_rpc = DapRpcHandler::new(config.dap_id);
Ok(Self {
plugin_rpc,
dap_server,
config,
dap_rpc,
breakpoints,
term_id: None,
capabilities: None,
terminated: false,
disconnected: false,
restarted: false,
})
}
pub fn start(
dap_server: DapServer,
config: RunDebugConfig,
breakpoints: HashMap<PathBuf, Vec<SourceBreakpoint>>,
plugin_rpc: PluginCatalogRpcHandler,
) -> Result<DapRpcHandler> {
let mut dap = Self::new(dap_server, config, breakpoints, plugin_rpc)?;
dap.start_process()?;
let dap_rpc = dap.dap_rpc.clone();
dap.initialize()?;
{
let dap_rpc = dap_rpc.clone();
thread::spawn(move || {
dap_rpc.mainloop(&mut dap);
});
}
Ok(dap_rpc)
}
fn start_process(&self) -> Result<()> {
let program = self.dap_server.program.clone();
let mut process = Self::process(
&program,
&self.dap_server.args,
self.dap_server.cwd.as_ref(),
)?;
let stdin = process.stdin.take().unwrap();
let stdout = process.stdout.take().unwrap();
// let stderr = process.stderr.take().unwrap();
let dap_rpc = self.dap_rpc.clone();
let io_rx = self.dap_rpc.io_rx.clone();
let io_tx = self.dap_rpc.io_tx.clone();
let mut writer = Box::new(BufWriter::new(stdin));
thread::spawn(move || -> Result<()> {
for msg in io_rx {
if let Ok(msg) = serde_json::to_string(&msg) {
tracing::debug!("write to dap server: {}", msg);
let msg =
format!("Content-Length: {}\r\n\r\n{}", msg.len(), msg);
writer.write_all(msg.as_bytes())?;
writer.flush()?;
}
}
tracing::debug!("thread(write to dap) exited");
Ok(())
});
{
let plugin_rpc = self.plugin_rpc.clone();
thread::spawn(move || {
let mut reader = Box::new(BufReader::new(stdout));
loop {
match crate::plugin::lsp::read_message(&mut reader) {
Ok(message_str) => {
tracing::debug!("read from dap server: {}", message_str);
dap_rpc.handle_server_message(&message_str);
}
Err(_err) => {
if let Err(err) = io_tx
.send(DapPayload::Event(DapEvent::Initialized(None)))
{
tracing::error!("{:?}", err);
}
plugin_rpc.core_rpc.log(
lapce_rpc::core::LogLevel::Error,
format!("dap server {program} stopped!"),
None,
);
dap_rpc.disconnected();
tracing::debug!("thread(read from dap) exited");
return;
}
};
}
});
}
Ok(())
}
fn process(
server: &str,
args: &[String],
cwd: Option<&PathBuf>,
) -> Result<Child> {
let mut process = Command::new(server);
if let Some(cwd) = cwd {
process.current_dir(cwd);
}
process.args(args);
// CREATE_NO_WINDOW
// (https://learn.microsoft.com/en-us/windows/win32/procthread/process-creation-flags)
// TODO: We set this because
#[cfg(target_os = "windows")]
std::os::windows::process::CommandExt::creation_flags(
&mut process,
0x08000000,
);
let child = process
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?;
Ok(child)
}
fn handle_host_request(&mut self, req: &DapRequest) -> Result<Value> {
match req.command.as_str() {
RunInTerminal::COMMAND => {
let value = req
.arguments
.as_ref()
.ok_or_else(|| anyhow!("no arguments"))?;
let args: RunInTerminalArguments =
serde_json::from_value(value.clone())?;
let mut config = self.config.clone();
config.debug_command = Some(args.args);
self.plugin_rpc.core_rpc.run_in_terminal(config);
let (term_id, process_id) =
self.dap_rpc.termain_process_rx.recv()?;
self.term_id = Some(term_id);
let resp = RunInTerminalResponse {
process_id,
shell_process_id: None,
};
let resp = serde_json::to_value(resp)?;
Ok(resp)
}
_ => Err(anyhow!("not implemented")),
}
}
fn handle_host_event(&mut self, event: &DapEvent) -> Result<()> {
match event {
DapEvent::Initialized(_) => {
for (path, breakpoints) in self.breakpoints.clone().into_iter() {
match self.dap_rpc.set_breakpoints(path.clone(), breakpoints) {
Ok(breakpoints) => {
self.plugin_rpc.core_rpc.dap_breakpoints_resp(
self.config.dap_id,
path,
breakpoints.breakpoints.unwrap_or_default(),
);
}
Err(err) => {
tracing::error!("{:?}", err);
}
}
}
// send dap configurations here
self.dap_rpc.request_async::<ConfigurationDone>((), |rs| {
if let Err(e) = rs {
tracing::error!("request ConfigurationDone: {:?}", e)
}
});
}
DapEvent::Stopped(stopped) => {
let all_threads_stopped =
stopped.all_threads_stopped.unwrap_or_default();
let mut stack_frames = HashMap::new();
if all_threads_stopped {
if let Ok(response) = self.dap_rpc.threads() {
for thread in response.threads {
if let Ok(frames) = self.dap_rpc.stack_trace(thread.id) {
stack_frames.insert(thread.id, frames.stack_frames);
}
}
}
}
let current_thread = if all_threads_stopped {
Some(stopped.thread_id.unwrap_or_default())
} else {
stopped.thread_id
};
let active_frame = current_thread
.and_then(|thread_id| stack_frames.get(&thread_id))
.and_then(|stack_frames| stack_frames.first());
let mut vars = Vec::new();
if let Some(frame) = active_frame {
if let Ok(scopes) = self.dap_rpc.scopes(frame.id) {
for scope in scopes {
let result =
self.dap_rpc.variables(scope.variables_reference);
vars.push((scope, result.unwrap_or_default()));
}
}
}
self.plugin_rpc.core_rpc.dap_stopped(
self.config.dap_id,
stopped.clone(),
stack_frames,
vars,
);
// if all_threads_stopped {
// if let Ok(response) = self.dap_rpc.threads() {
// for thread in response.threads {
// self.fetch_stack_trace(thread.id);
// }
// self.select_thread_id(
// stopped.thread_id.unwrap_or_default(),
// false,
// );
// }
// } else if let Some(thread_id) = stopped.thread_id {
// self.select_thread_id(thread_id, false);
// }
}
DapEvent::Continued(_) => {
self.plugin_rpc.core_rpc.dap_continued(self.dap_rpc.dap_id);
}
DapEvent::Exited(_exited) => {}
DapEvent::Terminated(_) => {
self.terminated = true;
// self.plugin_rpc.core_rpc.dap_terminated(self.dap_rpc.dap_id);
if let Some(term_id) = self.term_id {
self.plugin_rpc.proxy_rpc.terminal_close(term_id);
}
if let Err(err) = self.check_restart() {
tracing::error!("{:?}", err);
}
}
DapEvent::Thread { .. } => {}
DapEvent::Output(_) => {}
DapEvent::Breakpoint { .. } => {}
DapEvent::Module { .. } => {}
DapEvent::LoadedSource { .. } => {}
DapEvent::Process(_) => {}
DapEvent::Capabilities(_) => {}
DapEvent::Memory(_) => {}
}
Ok(())
}
pub(crate) fn initialize(&mut self) -> Result<()> {
let params = dap_types::InitializeParams {
client_id: Some("lapce".to_owned()),
client_name: Some("Lapce".to_owned()),
adapter_id: "".to_string(),
locale: Some("en-us".to_owned()),
lines_start_at_one: Some(true),
columns_start_at_one: Some(true),
path_format: Some("path".to_owned()),
supports_variable_type: Some(true),
supports_variable_paging: Some(false),
supports_run_in_terminal_request: Some(true),
supports_memory_references: Some(false),
supports_progress_reporting: Some(false),
supports_invalidated_event: Some(false),
};
let resp = self
.dap_rpc
.request::<Initialize>(params)
.map_err(|e| anyhow!(e.message))?;
self.capabilities = Some(resp);
Ok(())
}
fn stop(&self) {
let dap_rpc = self.dap_rpc.clone();
if self
.capabilities
.as_ref()
.and_then(|c| c.supports_terminate_request)
.unwrap_or(false)
{
thread::spawn(move || {
if let Err(err) = dap_rpc.terminate() {
tracing::error!("{:?}", err);
}
});
} else {
thread::spawn(move || {
if let Err(err) = dap_rpc.disconnect() {
tracing::error!("{:?}", err);
}
});
}
}
// check if the DAP was restared when we received terminated or disconnected
// if the DAP doesn't suports terminate request, then we also need to wait for
// disconnected
fn check_restart(&mut self) -> Result<()> {
if !self.restarted {
return Ok(());
}
if !self
.capabilities
.as_ref()
.and_then(|c| c.supports_terminate_request)
.unwrap_or(false)
&& !self.disconnected
{
return Ok(());
}
self.restarted = false;
if self.disconnected {
self.start_process()?;
self.initialize()?;
}
self.terminated = false;
self.disconnected = false;
let dap_rpc = self.dap_rpc.clone();
let config = self.config.clone();
thread::spawn(move || {
if let Err(err) = dap_rpc.launch(&config) {
tracing::error!("{:?}", err);
}
});
Ok(())
}
fn restart(&mut self, breakpoints: HashMap<PathBuf, Vec<SourceBreakpoint>>) {
self.restarted = true;
self.breakpoints = breakpoints;
if !self.terminated {
self.stop();
} else if let Err(err) = self.check_restart() {
tracing::error!("{:?}", err);
}
}
}
#[allow(clippy::large_enum_variant)]
pub enum DapRpc {
HostRequest(DapRequest),
HostEvent(DapEvent),
Stop,
Restart(HashMap<PathBuf, Vec<SourceBreakpoint>>),
Shutdown,
Disconnected,
}
#[derive(Clone)]
pub struct DebuggerData {
pub debugger_type: String,
pub program: String,
pub args: Option<Vec<String>>,
}
#[derive(Clone)]
pub struct DapRpcHandler {
pub dap_id: DapId,
rpc_tx: Sender<DapRpc>,
rpc_rx: Receiver<DapRpc>,
io_tx: Sender<DapPayload>,
io_rx: Receiver<DapPayload>,
pub(crate) termain_process_tx: Sender<(TermId, Option<u32>)>,
termain_process_rx: Receiver<(TermId, Option<u32>)>,
seq_counter: Arc<AtomicU64>,
server_pending: Arc<Mutex<HashMap<u64, ResponseHandler<DapResponse, RpcError>>>>,
}
impl DapRpcHandler {
fn new(dap_id: DapId) -> Self {
let (rpc_tx, rpc_rx) = crossbeam_channel::unbounded();
let (io_tx, io_rx) = crossbeam_channel::unbounded();
let (termain_process_tx, termain_process_rx) =
crossbeam_channel::unbounded();
Self {
dap_id,
io_tx,
io_rx,
rpc_rx,
rpc_tx,
termain_process_tx,
termain_process_rx,
seq_counter: Arc::new(AtomicU64::new(0)),
server_pending: Arc::new(Mutex::new(HashMap::new())),
}
}
pub fn mainloop(&self, dap_client: &mut DapClient) {
for msg in &self.rpc_rx {
match msg {
DapRpc::HostRequest(req) => {
let result = dap_client.handle_host_request(&req);
let seq = self.seq_counter.fetch_add(1, Ordering::Relaxed);
let resp = DapResponse {
seq,
request_seq: req.seq,
success: result.is_ok(),
command: req.command.clone(),
message: result.as_ref().err().map(|e| e.to_string()),
body: result.ok(),
};
if let Err(err) = self.io_tx.send(DapPayload::Response(resp)) {
tracing::error!("{:?}", err);
}
}
DapRpc::HostEvent(event) => {
if let Err(err) = dap_client.handle_host_event(&event) {
tracing::error!("{:?}", err);
}
}
DapRpc::Stop => {
dap_client.stop();
}
DapRpc::Restart(breakpoints) => {
dap_client.restart(breakpoints);
}
DapRpc::Shutdown => {
if let Some(term_id) = dap_client.term_id {
dap_client.plugin_rpc.proxy_rpc.terminal_close(term_id);
}
return;
}
DapRpc::Disconnected => {
dap_client.disconnected = true;
if let Some(term_id) = dap_client.term_id {
dap_client.plugin_rpc.proxy_rpc.terminal_close(term_id);
}
if let Err(err) = dap_client.check_restart() {
tracing::error!("{:?}", err);
}
}
}
}
}
fn request_async<R: Request>(
&self,
params: R::Arguments,
f: impl RpcCallback<R::Result, RpcError> + 'static,
) {
self.request_common::<R>(
R::COMMAND,
params,
ResponseHandler::Callback(Box::new(
|result: Result<DapResponse, RpcError>| {
let result = match result {
Ok(resp) => {
if resp.success {
serde_json::from_value(resp.body.into()).map_err(
|e| RpcError {
code: 0,
message: e.to_string(),
},
)
} else {
Err(RpcError {
code: 0,
message: resp.message.unwrap_or_default(),
})
}
}
Err(e) => Err(e),
};
Box::new(f).call(result);
},
)),
);
}
fn request<R: Request>(
&self,
params: R::Arguments,
) -> Result<R::Result, RpcError> {
let (tx, rx) = crossbeam_channel::bounded(1);
self.request_common::<R>(R::COMMAND, params, ResponseHandler::Chan(tx));
let resp = rx
.recv_timeout(std::time::Duration::from_secs(30))
.map_err(|_| RpcError {
code: 0,
message: "io error".to_string(),
})??;
if resp.success {
let resp: R::Result =
serde_json::from_value(resp.body.into()).map_err(|e| RpcError {
code: 0,
message: e.to_string(),
})?;
Ok(resp)
} else {
Err(RpcError {
code: 0,
message: resp.message.unwrap_or_default(),
})
}
}
fn request_common<R: Request>(
&self,
command: &'static str,
arguments: R::Arguments,
rh: ResponseHandler<DapResponse, RpcError>,
) {
let seq = self.seq_counter.fetch_add(1, Ordering::Relaxed);
let arguments: Value = serde_json::to_value(arguments).unwrap();
{
let mut pending = self.server_pending.lock();
pending.insert(seq, rh);
}
if let Err(err) = self.io_tx.send(DapPayload::Request(DapRequest {
seq,
command: command.to_string(),
arguments: Some(arguments),
})) {
tracing::error!("{:?}", err);
}
}
fn handle_server_response(&self, resp: DapResponse) {
if let Some(rh) = { self.server_pending.lock().remove(&resp.request_seq) } {
rh.invoke(Ok(resp));
}
}
pub fn handle_server_message(&self, message_str: &str) {
if let Ok(payload) = serde_json::from_str::<DapPayload>(message_str) {
match payload {
DapPayload::Request(req) => {
if let Err(err) = self.rpc_tx.send(DapRpc::HostRequest(req)) {
tracing::error!("{:?}", err);
}
}
DapPayload::Event(event) => {
if let Err(err) = self.rpc_tx.send(DapRpc::HostEvent(event)) {
tracing::error!("{:?}", err);
}
}
DapPayload::Response(resp) => {
self.handle_server_response(resp);
}
}
}
}
pub fn launch(&self, config: &RunDebugConfig) -> Result<()> {
let params = serde_json::json!({
"program": config.program,
"args": config.args,
"cwd": config.cwd,
"runInTerminal": true,
"env": config.env
});
let _resp = self
.request::<Launch>(params)
.map_err(|e| anyhow!(e.message))?;
Ok(())
}
pub fn stop(&self) {
if let Err(err) = self.rpc_tx.send(DapRpc::Stop) {
tracing::error!("{:?}", err);
}
}
pub fn restart(&self, breakpoints: HashMap<PathBuf, Vec<SourceBreakpoint>>) {
if let Err(err) = self.rpc_tx.send(DapRpc::Restart(breakpoints)) {
tracing::error!("{:?}", err);
}
}
fn disconnected(&self) {
if let Err(err) = self.rpc_tx.send(DapRpc::Disconnected) {
tracing::error!("{:?}", err);
}
}
pub fn disconnect(&self) -> Result<()> {
self.request::<Disconnect>(())
.map_err(|e| anyhow!(e.message))?;
Ok(())
}
fn terminate(&self) -> Result<()> {
self.request::<Terminate>(())
.map_err(|e| anyhow!(e.message))?;
Ok(())
}
pub fn set_breakpoints_async(
&self,
file: PathBuf,
breakpoints: Vec<SourceBreakpoint>,
f: impl RpcCallback<SetBreakpointsResponse, RpcError> + 'static,
) {
let params = SetBreakpointsArguments {
source: Source {
path: Some(file),
name: None,
source_reference: None,
presentation_hint: None,
origin: None,
sources: None,
adapter_data: None,
checksums: None,
},
breakpoints: Some(breakpoints),
source_modified: Some(false),
};
self.request_async::<SetBreakpoints>(params, f);
}
pub fn set_breakpoints(
&self,
file: PathBuf,
breakpoints: Vec<SourceBreakpoint>,
) -> Result<SetBreakpointsResponse> {
let params = SetBreakpointsArguments {
source: Source {
path: Some(file),
name: None,
source_reference: None,
presentation_hint: None,
origin: None,
sources: None,
adapter_data: None,
checksums: None,
},
breakpoints: Some(breakpoints),
source_modified: Some(false),
};
let resp = self
.request::<SetBreakpoints>(params)
.map_err(|e| anyhow!(e.message))?;
Ok(resp)
}
pub fn continue_thread(&self, thread_id: ThreadId) -> Result<ContinueResponse> {
let params = ContinueArguments { thread_id };
let resp = self
.request::<Continue>(params)
.map_err(|e| anyhow!(e.message))?;
Ok(resp)
}
pub fn pause_thread(&self, thread_id: ThreadId) -> Result<()> {
let params = PauseArguments { thread_id };
self.request::<Pause>(params)
.map_err(|e| anyhow!(e.message))?;
Ok(())
}
pub fn threads(&self) -> Result<ThreadsResponse> {
let resp = self
.request::<Threads>(())
.map_err(|e| anyhow!(e.message))?;
Ok(resp)
}
pub fn stack_trace(&self, thread_id: ThreadId) -> Result<StackTraceResponse> {
let params = StackTraceArguments {
thread_id,
..Default::default()
};
let resp = self
.request::<StackTrace>(params)
.map_err(|e| anyhow!(e.message))?;
Ok(resp)
}
pub fn scopes(&self, frame_id: usize) -> Result<Vec<Scope>> {
let args = ScopesArguments { frame_id };
let response = self
.request::<Scopes>(args)
.map_err(|e| anyhow!(e.message))?;
Ok(response.scopes)
}
pub fn scopes_async(
&self,
frame_id: usize,
f: impl RpcCallback<ScopesResponse, RpcError> + 'static,
) {
let args = ScopesArguments { frame_id };
self.request_async::<Scopes>(args, f);
}
pub fn variables(&self, variables_reference: usize) -> Result<Vec<Variable>> {
let args = VariablesArguments {
variables_reference,
filter: None,
start: None,
count: None,
format: None,
};
let response = self
.request::<Variables>(args)
.map_err(|e| anyhow!(e.message))?;
Ok(response.variables)
}
pub fn variables_async(
&self,
variables_reference: usize,
f: impl RpcCallback<VariablesResponse, RpcError> + 'static,
) {
let args = VariablesArguments {
variables_reference,
filter: None,
start: None,
count: None,
format: None,
};
self.request_async::<Variables>(args, f);
}
pub fn next(&self, thread_id: ThreadId) {
let args = NextArguments {
thread_id,
granularity: None,
};
self.request_async::<Next>(args, move |_| {});
}
pub fn step_in(&self, thread_id: ThreadId) {
let args = StepInArguments {
thread_id,
target_id: None,
granularity: None,
};
self.request_async::<StepIn>(args, move |_| {});
}
pub fn step_out(&self, thread_id: ThreadId) {
let args = StepOutArguments {
thread_id,
granularity: None,
};
self.request_async::<StepOut>(args, move |_| {});
}
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-proxy/src/plugin/wasi/tests.rs | lapce-proxy/src/plugin/wasi/tests.rs | use std::collections::HashMap;
use lapce_rpc::plugin::VoltMetadata;
use serde_json::{Value, json};
use super::{load_volt, unflatten_map};
#[test]
fn test_unflatten_map() {
let map: HashMap<String, Value> = serde_json::from_value(json!({
"a.b.c": "d",
"a.d": ["e"],
}))
.unwrap();
assert_eq!(
unflatten_map(&map),
json!({
"a": {
"b": {
"c": "d",
},
"d": ["e"],
}
})
);
}
#[test]
fn test_load_volt() {
let lapce_proxy_dir = std::env::current_dir()
.expect("Can't get \"lapce-proxy\" directory")
.join("src")
.join("plugin")
.join("wasi")
.join("plugins");
// Invalid path (file does not exist)
let path = lapce_proxy_dir.join("some-path");
match path.canonicalize() {
Ok(path) => panic!("{path:?} file must not exast, but it is"),
Err(err) => assert_eq!(err.kind(), std::io::ErrorKind::NotFound),
};
// This should return Err since the file does not exist
if let Ok(volt_metadata) = load_volt(&lapce_proxy_dir) {
panic!(
"Unexpected result from `lapce_proxy::plugin::wasi::load_volt` function: {volt_metadata:?}"
);
}
// Invalid file (not readable into a string)
// Making sure the file exists
let path = lapce_proxy_dir.join("smiley.png");
let path = match path.canonicalize() {
Ok(path) => path,
Err(err) => panic!("{path:?} file must exast, but: {err:?}"),
};
// Making sure the data in the file is invalid utf-8
match std::fs::read_to_string(path.clone()) {
Ok(str) => panic!(
"{path:?} file must be invalid utf-8, but it is valid utf-8: {str:?}",
),
Err(err) => assert_eq!(err.kind(), std::io::ErrorKind::InvalidData),
}
// This should return Err since the `*.png` file cannot be read as a String
if let Ok(volt_metadata) = load_volt(&path) {
panic!(
"Unexpected result from `lapce_proxy::plugin::wasi::load_volt` function: {volt_metadata:?}",
);
}
// Invalid data in file (cannot be read as VoltMetadata)
// Making sure the file exists
let path = lapce_proxy_dir
.join("some_author.test-plugin-one")
.join("Light.svg");
let path = match path.canonicalize() {
Ok(path) => path,
Err(err) => panic!("{path:?} file must exast, but: {err:?}"),
};
// Making sure the data in the file is valid utf-8 (*.svg file is must be a valid utf-8)
match std::fs::read_to_string(path.clone()) {
Ok(_) => {}
Err(err) => panic!("{path:?} file must be valid utf-8, but {err:?}"),
}
// This should return Err since the data in the file cannot be interpreted as VoltMetadata
if let Ok(volt_metadata) = load_volt(&path) {
panic!(
"Unexpected result from `lapce_proxy::plugin::wasi::load_volt` function: {volt_metadata:?}",
);
}
let parent_path = lapce_proxy_dir.join("some_author.test-plugin-one");
let volt_metadata = match load_volt(&parent_path) {
Ok(volt_metadata) => volt_metadata,
Err(error) => panic!("{}", error),
};
let wasm_path = parent_path
.join("lapce.wasm")
.canonicalize()
.ok()
.as_ref()
.and_then(|path| path.to_str())
.map(ToOwned::to_owned);
let color_themes_pathes = ["Dark.toml", "Light.toml"]
.into_iter()
.filter_map(|theme| {
parent_path
.join(theme)
.canonicalize()
.ok()
.as_ref()
.and_then(|path| path.to_str())
.map(ToOwned::to_owned)
})
.collect::<Vec<_>>();
let icon_themes_pathes = ["Dark.svg", "Light.svg"]
.into_iter()
.filter_map(|theme| {
parent_path
.join(theme)
.canonicalize()
.ok()
.as_ref()
.and_then(|path| path.to_str())
.map(ToOwned::to_owned)
})
.collect::<Vec<_>>();
assert_eq!(
volt_metadata,
VoltMetadata {
name: "some-useful-plugin".to_string(),
version: "0.1.56".to_string(),
display_name: "Some Useful Plugin Name".to_string(),
author: "some_author".to_string(),
description: "very useful plugin".to_string(),
icon: Some("icon.svg".to_string()),
repository: Some("https://github.com/lapce".to_string()),
wasm: wasm_path,
color_themes: Some(color_themes_pathes),
icon_themes: Some(icon_themes_pathes),
dir: parent_path.canonicalize().ok(),
activation: None,
config: None
}
);
let parent_path = lapce_proxy_dir.join("some_author.test-plugin-two");
let volt_metadata = match load_volt(&parent_path) {
Ok(volt_metadata) => volt_metadata,
Err(error) => panic!("{}", error),
};
let wasm_path = parent_path
.join("lapce.wasm")
.canonicalize()
.ok()
.as_ref()
.and_then(|path| path.to_str())
.map(ToOwned::to_owned);
let color_themes_pathes = ["Light.toml"]
.into_iter()
.filter_map(|theme| {
parent_path
.join(theme)
.canonicalize()
.ok()
.as_ref()
.and_then(|path| path.to_str())
.map(ToOwned::to_owned)
})
.collect::<Vec<_>>();
let icon_themes_pathes = ["Light.svg"]
.into_iter()
.filter_map(|theme| {
parent_path
.join(theme)
.canonicalize()
.ok()
.as_ref()
.and_then(|path| path.to_str())
.map(ToOwned::to_owned)
})
.collect::<Vec<_>>();
assert_eq!(
volt_metadata,
VoltMetadata {
name: "some-useful-plugin".to_string(),
version: "0.1.56".to_string(),
display_name: "Some Useful Plugin Name".to_string(),
author: "some_author.".to_string(),
description: "very useful plugin".to_string(),
icon: Some("icon.svg".to_string()),
repository: Some("https://github.com/lapce".to_string()),
wasm: wasm_path,
color_themes: Some(color_themes_pathes),
icon_themes: Some(icon_themes_pathes),
dir: parent_path.canonicalize().ok(),
activation: None,
config: None
}
);
let parent_path = lapce_proxy_dir.join("some_author.test-plugin-three");
let volt_metadata = match load_volt(&parent_path) {
Ok(volt_metadata) => volt_metadata,
Err(error) => panic!("{}", error),
};
assert_eq!(
volt_metadata,
VoltMetadata {
name: "some-useful-plugin".to_string(),
version: "0.1.56".to_string(),
display_name: "Some Useful Plugin Name".to_string(),
author: "some_author".to_string(),
description: "very useful plugin".to_string(),
icon: Some("icon.svg".to_string()),
repository: Some("https://github.com/lapce".to_string()),
wasm: None,
color_themes: Some(Vec::new()),
icon_themes: Some(Vec::new()),
dir: parent_path.canonicalize().ok(),
activation: None,
config: None
}
);
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
lapce/lapce | https://github.com/lapce/lapce/blob/59ce6df700ce4efbe4498a719fe52195e083d2ee/lapce-proxy/src/bin/lapce-proxy.rs | lapce-proxy/src/bin/lapce-proxy.rs | use lapce_proxy::mainloop;
fn main() {
mainloop();
}
| rust | Apache-2.0 | 59ce6df700ce4efbe4498a719fe52195e083d2ee | 2026-01-04T15:32:17.267102Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/lib.rs | yazi-core/src/lib.rs | yazi_macro::mod_pub!(cmp confirm help input mgr notify pick spot tab tasks which);
yazi_macro::mod_flat!(core);
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/core.rs | yazi-core/src/core.rs | use crossterm::cursor::SetCursorStyle;
use ratatui::layout::{Position, Rect};
use yazi_shared::Layer;
use crate::{cmp::Cmp, confirm::Confirm, help::Help, input::Input, mgr::Mgr, notify::Notify, pick::Pick, tab::{Folder, Tab}, tasks::Tasks, which::Which};
pub struct Core {
pub mgr: Mgr,
pub tasks: Tasks,
pub pick: Pick,
pub input: Input,
pub confirm: Confirm,
pub help: Help,
pub cmp: Cmp,
pub which: Which,
pub notify: Notify,
}
impl Core {
pub fn make() -> Self {
Self {
mgr: Mgr::make(),
tasks: Tasks::serve(),
pick: Default::default(),
input: Default::default(),
confirm: Default::default(),
help: Default::default(),
cmp: Default::default(),
which: Default::default(),
notify: Default::default(),
}
}
pub fn cursor(&self) -> Option<(Position, SetCursorStyle)> {
if self.input.visible {
let Rect { x, y, .. } = self.mgr.area(self.input.position);
return Some((
Position { x: x + 1 + self.input.cursor(), y: y + 1 },
self.input.cursor_shape(),
));
}
if let Some((x, y)) = self.help.cursor() {
return Some((Position { x, y }, self.help.cursor_shape()));
}
None
}
pub fn layer(&self) -> Layer {
if self.which.visible {
Layer::Which
} else if self.cmp.visible {
Layer::Cmp
} else if self.help.visible {
Layer::Help
} else if self.confirm.visible {
Layer::Confirm
} else if self.input.visible {
Layer::Input
} else if self.pick.visible {
Layer::Pick
} else if self.active().spot.visible() {
Layer::Spot
} else if self.tasks.visible {
Layer::Tasks
} else {
Layer::Mgr
}
}
}
impl Core {
#[inline]
pub fn active(&self) -> &Tab { self.mgr.active() }
#[inline]
pub fn active_mut(&mut self) -> &mut Tab { self.mgr.active_mut() }
#[inline]
pub fn current_mut(&mut self) -> &mut Folder { self.mgr.current_mut() }
#[inline]
pub fn parent_mut(&mut self) -> Option<&mut Folder> { self.mgr.parent_mut() }
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/tasks/tasks.rs | yazi-core/src/tasks/tasks.rs | use std::{sync::Arc, time::Duration};
use parking_lot::Mutex;
use tokio::{task::JoinHandle, time::sleep};
use yazi_adapter::Dimension;
use yazi_parser::app::TaskSummary;
use yazi_proxy::AppProxy;
use yazi_scheduler::{Ongoing, Scheduler, TaskSnap};
use super::{TASKS_BORDER, TASKS_PADDING, TASKS_PERCENT};
pub struct Tasks {
pub scheduler: Arc<Scheduler>,
handle: JoinHandle<()>,
pub visible: bool,
pub cursor: usize,
pub snaps: Vec<TaskSnap>,
pub summary: TaskSummary,
}
impl Tasks {
pub fn serve() -> Self {
let scheduler = Scheduler::serve();
let ongoing = scheduler.ongoing.clone();
let handle = tokio::spawn(async move {
let mut last = TaskSummary::default();
loop {
sleep(Duration::from_millis(500)).await;
let new = ongoing.lock().summary();
if last != new {
last = new;
AppProxy::update_progress(new);
}
}
});
Self {
scheduler: Arc::new(scheduler),
handle,
visible: false,
cursor: 0,
snaps: Default::default(),
summary: Default::default(),
}
}
pub fn shutdown(&self) {
self.scheduler.shutdown();
self.handle.abort();
}
pub fn limit() -> usize {
((Dimension::available().rows * TASKS_PERCENT / 100)
.saturating_sub(TASKS_BORDER + TASKS_PADDING) as usize)
/ 3
}
pub fn paginate(&self) -> Vec<TaskSnap> {
self.ongoing().lock().values().take(Self::limit()).map(Into::into).collect()
}
pub fn ongoing(&self) -> &Arc<Mutex<Ongoing>> { &self.scheduler.ongoing }
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/tasks/process.rs | yazi-core/src/tasks/process.rs | use std::mem;
use yazi_parser::tasks::ProcessOpenOpt;
use super::Tasks;
impl Tasks {
// TODO: remove
pub fn open_shell_compat(&self, mut opt: ProcessOpenOpt) {
if opt.spread {
self.scheduler.process_open(opt);
return;
}
if opt.args.is_empty() {
return;
}
if opt.args.len() == 2 {
self.scheduler.process_open(opt);
return;
}
let hovered = mem::take(&mut opt.args[0]);
for target in opt.args.into_iter().skip(1) {
self.scheduler.process_open(ProcessOpenOpt {
cwd: opt.cwd.clone(),
cmd: opt.cmd.clone(),
args: vec![hovered.clone(), target],
block: opt.block,
orphan: opt.orphan,
done: None,
spread: opt.spread,
});
}
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/tasks/file.rs | yazi-core/src/tasks/file.rs | use hashbrown::HashSet;
use tracing::debug;
use yazi_shared::url::{UrlBuf, UrlBufCov, UrlLike};
use super::Tasks;
use crate::mgr::Yanked;
impl Tasks {
pub fn file_cut(&self, src: &Yanked, dest: &UrlBuf, force: bool) {
for u in src.iter() {
let Some(Ok(to)) = u.name().map(|n| dest.try_join(n)) else {
debug!("file_cut: cannot join {u:?} with {dest:?}");
continue;
};
if force && *u == to {
debug!("file_cut: same file, skip {to:?}");
} else {
self.scheduler.file_cut(u.0.clone(), to, force);
}
}
}
pub fn file_copy(&self, src: &Yanked, dest: &UrlBuf, force: bool, follow: bool) {
for u in src.iter() {
let Some(Ok(to)) = u.name().map(|n| dest.try_join(n)) else {
debug!("file_copy: cannot join {u:?} with {dest:?}");
continue;
};
if force && *u == to {
debug!("file_copy: same file, skip {to:?}");
} else {
self.scheduler.file_copy(u.0.clone(), to, force, follow);
}
}
}
pub fn file_link(&self, src: &HashSet<UrlBufCov>, dest: &UrlBuf, relative: bool, force: bool) {
for u in src {
let Some(Ok(to)) = u.name().map(|n| dest.try_join(n)) else {
debug!("file_link: cannot join {u:?} with {dest:?}");
continue;
};
if force && *u == to {
debug!("file_link: same file, skip {to:?}");
} else {
self.scheduler.file_link(u.0.clone(), to, relative, force);
}
}
}
pub fn file_hardlink(&self, src: &HashSet<UrlBufCov>, dest: &UrlBuf, force: bool, follow: bool) {
for u in src {
let Some(Ok(to)) = u.name().map(|n| dest.try_join(n)) else {
debug!("file_hardlink: cannot join {u:?} with {dest:?}");
continue;
};
if force && *u == to {
debug!("file_hardlink: same file, skip {to:?}");
} else {
self.scheduler.file_hardlink(u.0.clone(), to, force, follow);
}
}
}
pub fn file_remove(&self, targets: Vec<UrlBuf>, permanently: bool) {
for u in targets {
if permanently {
self.scheduler.file_delete(u);
} else {
self.scheduler.file_trash(u);
}
}
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/tasks/prework.rs | yazi-core/src/tasks/prework.rs | use yazi_config::{YAZI, plugin::MAX_PREWORKERS};
use yazi_fs::{File, Files, FsHash64, SortBy};
use super::Tasks;
use crate::mgr::Mimetype;
impl Tasks {
pub fn fetch_paged(&self, paged: &[File], mimetype: &Mimetype) {
let mut loaded = self.scheduler.runner.prework.loaded.lock();
let mut tasks: [Vec<_>; MAX_PREWORKERS as usize] = Default::default();
for f in paged {
let hash = f.hash_u64();
for g in YAZI.plugin.fetchers(f, mimetype.get(&f.url).unwrap_or_default()) {
match loaded.get_mut(&hash) {
Some(n) if *n & (1 << g.idx) != 0 => continue,
Some(n) => *n |= 1 << g.idx,
None => _ = loaded.put(hash, 1 << g.idx),
}
tasks[g.idx as usize].push(f.clone());
}
}
drop(loaded);
for (i, tasks) in tasks.into_iter().enumerate() {
if !tasks.is_empty() {
self.scheduler.fetch_paged(&YAZI.plugin.fetchers[i], tasks);
}
}
}
pub fn preload_paged(&self, paged: &[File], mimetype: &Mimetype) {
let mut loaded = self.scheduler.runner.prework.loaded.lock();
for f in paged {
let hash = f.hash_u64();
for p in YAZI.plugin.preloaders(f, mimetype.get(&f.url).unwrap_or_default()) {
match loaded.get_mut(&hash) {
Some(n) if *n & (1 << p.idx) != 0 => continue,
Some(n) => *n |= 1 << p.idx,
None => _ = loaded.put(hash, 1 << p.idx),
}
self.scheduler.preload_paged(p, f);
}
}
}
pub fn prework_sorted(&self, targets: &Files) {
if targets.sorter().by != SortBy::Size {
return;
}
let targets: Vec<_> = {
let loading = self.scheduler.runner.prework.sizing.read();
targets
.iter()
.filter(|f| {
f.is_dir() && !targets.sizes.contains_key(&f.urn()) && !loading.contains(&f.url)
})
.map(|f| &f.url)
.collect()
};
if targets.is_empty() {
return;
}
let mut loading = self.scheduler.runner.prework.sizing.write();
for &target in &targets {
loading.insert(target.clone());
}
self.scheduler.prework_size(targets);
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/tasks/mod.rs | yazi-core/src/tasks/mod.rs | yazi_macro::mod_flat!(file plugin prework process tasks);
pub const TASKS_BORDER: u16 = 2;
pub const TASKS_PADDING: u16 = 2;
pub const TASKS_PERCENT: u16 = 80;
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/tasks/plugin.rs | yazi-core/src/tasks/plugin.rs | use yazi_parser::app::PluginOpt;
use super::Tasks;
impl Tasks {
pub fn plugin_entry(&self, opt: PluginOpt) { self.scheduler.plugin_entry(opt); }
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/help/help.rs | yazi-core/src/help/help.rs | use anyhow::Result;
use crossterm::{cursor::SetCursorStyle, event::KeyCode};
use unicode_width::UnicodeWidthStr;
use yazi_adapter::Dimension;
use yazi_config::{KEYMAP, YAZI, keymap::{Chord, Key}};
use yazi_macro::{act, render, render_and};
use yazi_shared::Layer;
use yazi_widgets::Scrollable;
use crate::help::HELP_MARGIN;
#[derive(Default)]
pub struct Help {
pub visible: bool,
pub layer: Layer,
pub(super) bindings: Vec<&'static Chord>,
// Filter
pub keyword: String,
pub in_filter: Option<yazi_widgets::input::Input>,
pub offset: usize,
pub cursor: usize,
}
impl Help {
pub fn r#type(&mut self, key: &Key) -> Result<bool> {
let Some(input) = &mut self.in_filter else { return Ok(false) };
match key {
Key { code: KeyCode::Esc, shift: false, ctrl: false, alt: false, super_: false } => {
self.in_filter = None;
render!();
}
Key { code: KeyCode::Enter, shift: false, ctrl: false, alt: false, super_: false } => {
self.in_filter = None;
return Ok(render_and!(true)); // Don't do the `filter_apply` below, since we already have the filtered results.
}
Key { code: KeyCode::Backspace, shift: false, ctrl: false, alt: false, super_: false } => {
act!(backspace, input)?;
}
_ => {
input.r#type(key)?;
}
}
self.filter_apply();
Ok(true)
}
pub fn filter_apply(&mut self) {
let kw = self.in_filter.as_ref().map_or("", |i| i.value());
if kw.is_empty() {
self.keyword = String::new();
self.bindings = KEYMAP.get(self.layer).iter().collect();
} else if self.keyword != kw {
self.keyword = kw.to_owned();
self.bindings = KEYMAP.get(self.layer).iter().filter(|&c| c.contains(kw)).collect();
}
render!(self.scroll(0));
}
}
impl Help {
// --- Keyword
pub fn keyword(&self) -> Option<String> {
self
.in_filter
.as_ref()
.map(|i| i.value())
.or(Some(self.keyword.as_str()).filter(|&s| !s.is_empty()))
.map(|s| format!("Filter: {s}"))
}
// --- Bindings
pub fn window(&self) -> &[&Chord] {
let end = (self.offset + self.limit()).min(self.bindings.len());
&self.bindings[self.offset..end]
}
// --- Cursor
pub fn cursor(&self) -> Option<(u16, u16)> {
if !self.visible || self.in_filter.is_none() {
return None;
}
if let Some(kw) = self.keyword() {
return Some((kw.width() as u16, Dimension::available().rows));
}
None
}
pub fn rel_cursor(&self) -> usize { self.cursor - self.offset }
pub fn cursor_shape(&self) -> SetCursorStyle {
if YAZI.input.cursor_blink {
SetCursorStyle::BlinkingBlock
} else {
SetCursorStyle::SteadyBlock
}
}
}
impl Scrollable for Help {
fn total(&self) -> usize { self.bindings.len() }
fn limit(&self) -> usize { Dimension::available().rows.saturating_sub(HELP_MARGIN) as usize }
fn cursor_mut(&mut self) -> &mut usize { &mut self.cursor }
fn offset_mut(&mut self) -> &mut usize { &mut self.offset }
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/help/mod.rs | yazi-core/src/help/mod.rs | yazi_macro::mod_flat!(help);
const HELP_MARGIN: u16 = 1;
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/mgr/yanked.rs | yazi-core/src/mgr/yanked.rs | use std::ops::Deref;
use hashbrown::HashSet;
use yazi_dds::Pubsub;
use yazi_fs::FilesOp;
use yazi_macro::err;
use yazi_shared::url::{Url, UrlBuf, UrlBufCov, UrlCov, UrlLike};
#[derive(Debug, Default)]
pub struct Yanked {
pub cut: bool,
urls: HashSet<UrlBufCov>,
version: u64,
revision: u64,
}
impl Deref for Yanked {
type Target = HashSet<UrlBufCov>;
fn deref(&self) -> &Self::Target { &self.urls }
}
impl Yanked {
pub fn new(cut: bool, urls: HashSet<UrlBufCov>) -> Self {
Self { cut, urls, ..Default::default() }
}
pub fn remove<'a>(&mut self, url: impl Into<Url<'a>>) {
if self.urls.remove(&UrlCov::new(url)) {
self.revision += 1;
}
}
pub fn clear(&mut self) {
if self.urls.is_empty() {
return;
}
self.urls.clear();
self.revision += 1;
}
pub fn contains<'a>(&self, url: impl Into<Url<'a>>) -> bool {
self.urls.contains(&UrlCov::new(url))
}
pub fn contains_in(&self, dir: &UrlBuf) -> bool {
self.urls.iter().any(|u| {
let mut it = u.components();
it.next_back().is_some()
&& it.covariant(&dir.components())
&& u.parent().is_some_and(|p| p == *dir)
})
}
pub fn apply_op(&mut self, op: &FilesOp) {
let (removal, addition) = op.diff_recoverable(|u| self.contains(u));
if !removal.is_empty() {
let old = self.urls.len();
self.urls.retain(|u| !removal.contains(u));
self.revision += (old != self.urls.len()) as u64;
}
if !addition.is_empty() {
let old = self.urls.len();
self.urls.extend(addition.into_iter().map(UrlBufCov));
self.revision += (old != self.urls.len()) as u64;
}
}
pub fn catchup_revision(&mut self, force: bool) -> bool {
if self.version == self.revision && !force {
return false;
}
self.version = self.revision;
err!(Pubsub::pub_after_yank(self.cut, &self.urls));
true
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/mgr/tabs.rs | yazi-core/src/mgr/tabs.rs | use std::ops::{Deref, DerefMut};
use yazi_dds::Pubsub;
use yazi_fs::File;
use yazi_macro::err;
use crate::tab::{Folder, Tab};
pub struct Tabs {
pub cursor: usize,
pub items: Vec<Tab>,
}
impl Default for Tabs {
fn default() -> Self { Self { cursor: 0, items: vec![Default::default()] } }
}
impl Tabs {
pub fn set_idx(&mut self, idx: usize) {
// Reset the preview of the last active tab
if let Some(active) = self.items.get_mut(self.cursor) {
active.preview.reset_image();
}
self.cursor = idx;
err!(Pubsub::pub_after_tab(self.active().id));
}
}
impl Tabs {
#[inline]
pub fn active(&self) -> &Tab { &self[self.cursor] }
#[inline]
pub(super) fn active_mut(&mut self) -> &mut Tab { &mut self.items[self.cursor] }
#[inline]
pub fn parent(&self) -> Option<&Folder> { self.active().parent.as_ref() }
#[inline]
pub fn current(&self) -> &Folder { &self.active().current }
#[inline]
pub fn hovered(&self) -> Option<&File> { self.current().hovered() }
}
impl Deref for Tabs {
type Target = Vec<Tab>;
fn deref(&self) -> &Self::Target { &self.items }
}
impl DerefMut for Tabs {
fn deref_mut(&mut self) -> &mut Self::Target { &mut self.items }
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/mgr/mgr.rs | yazi-core/src/mgr/mgr.rs | use std::iter;
use ratatui::layout::Rect;
use yazi_adapter::Dimension;
use yazi_config::popup::{Origin, Position};
use yazi_fs::Splatable;
use yazi_shared::url::{AsUrl, Url, UrlBuf};
use yazi_watcher::Watcher;
use super::{Mimetype, Tabs, Yanked};
use crate::tab::{Folder, Tab};
pub struct Mgr {
pub tabs: Tabs,
pub yanked: Yanked,
pub watcher: Watcher,
pub mimetype: Mimetype,
}
impl Mgr {
pub fn make() -> Self {
Self {
tabs: Default::default(),
yanked: Default::default(),
watcher: Watcher::serve(),
mimetype: Default::default(),
}
}
pub fn area(&self, pos: Position) -> Rect {
if pos.origin == Origin::Hovered {
self.active().hovered_rect_based(pos)
} else {
pos.rect(Dimension::available().into())
}
}
pub fn shutdown(&mut self) { self.tabs.iter_mut().for_each(|t| t.shutdown()); }
}
impl Mgr {
#[inline]
pub fn cwd(&self) -> &UrlBuf { self.active().cwd() }
#[inline]
pub fn active(&self) -> &Tab { self.tabs.active() }
#[inline]
pub fn active_mut(&mut self) -> &mut Tab { self.tabs.active_mut() }
#[inline]
pub fn current_mut(&mut self) -> &mut Folder { &mut self.active_mut().current }
#[inline]
pub fn parent_mut(&mut self) -> Option<&mut Folder> { self.active_mut().parent.as_mut() }
}
impl Splatable for Mgr {
fn tab(&self) -> usize { self.tabs.cursor }
fn selected(&self, tab: usize, mut idx: Option<usize>) -> impl Iterator<Item = Url<'_>> {
idx = idx.and_then(|i| i.checked_sub(1));
tab
.checked_sub(1)
.and_then(|tab| self.tabs.get(tab))
.map(|tab| tab.selected_or_hovered())
.unwrap_or_else(|| Box::new(iter::empty()))
.skip(idx.unwrap_or(0))
.take(if idx.is_some() { 1 } else { usize::MAX })
.map(|u| u.as_url())
}
fn hovered(&self, tab: usize) -> Option<Url<'_>> {
tab
.checked_sub(1)
.and_then(|tab| self.tabs.get(tab))
.and_then(|tab| tab.hovered())
.map(|h| h.url.as_url())
}
fn yanked(&self) -> impl Iterator<Item = Url<'_>> { self.yanked.iter().map(|u| u.as_url()) }
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/mgr/mimetype.rs | yazi-core/src/mgr/mimetype.rs | use hashbrown::HashMap;
use yazi_shared::{pool::Symbol, url::{Url, UrlBufCov, UrlCov}};
#[derive(Default)]
pub struct Mimetype(HashMap<UrlBufCov, Symbol<str>>);
impl Mimetype {
pub fn get<'a, 'b>(&'a self, url: impl Into<Url<'b>>) -> Option<&'a str> {
self.0.get(&UrlCov::new(url)).map(|s| s.as_ref())
}
pub fn owned<'a>(&self, url: impl Into<Url<'a>>) -> Option<Symbol<str>> {
self.0.get(&UrlCov::new(url)).cloned()
}
pub fn contains<'a>(&self, url: impl Into<Url<'a>>) -> bool {
self.0.contains_key(&UrlCov::new(url))
}
pub fn extend(&mut self, iter: impl IntoIterator<Item = (UrlBufCov, Symbol<str>)>) {
self.0.extend(iter);
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/mgr/mod.rs | yazi-core/src/mgr/mod.rs | yazi_macro::mod_flat!(mgr mimetype tabs yanked);
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/spot/mod.rs | yazi-core/src/spot/mod.rs | yazi_macro::mod_flat!(spot);
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/spot/spot.rs | yazi-core/src/spot/spot.rs | use tokio_util::sync::CancellationToken;
use yazi_config::YAZI;
use yazi_fs::File;
use yazi_macro::render;
use yazi_parser::mgr::SpotLock;
use yazi_plugin::isolate;
use yazi_shared::{pool::Symbol, url::UrlBuf};
#[derive(Default)]
pub struct Spot {
pub lock: Option<SpotLock>,
pub skip: usize,
pub(super) ct: Option<CancellationToken>,
}
impl Spot {
pub fn go(&mut self, file: File, mime: Symbol<str>) {
if mime.is_empty() {
return; // Wait till mimetype is resolved to avoid flickering
} else if self.same_lock(&file, &mime) {
return;
}
let Some(spotter) = YAZI.plugin.spotter(&file, &mime) else {
return self.reset();
};
self.abort();
self.ct = Some(isolate::spot(&spotter.run, file, mime, self.skip));
}
pub fn visible(&self) -> bool { self.lock.is_some() }
pub fn abort(&mut self) { self.ct.take().map(|ct| ct.cancel()); }
pub fn reset(&mut self) {
self.abort();
render!(self.lock.take().is_some());
}
pub fn same_url(&self, url: &UrlBuf) -> bool { self.lock.as_ref().is_some_and(|l| *url == l.url) }
pub fn same_file(&self, file: &File, mime: &str) -> bool {
self.same_url(&file.url)
&& self.lock.as_ref().is_some_and(|l| file.cha.hits(l.cha) && mime == l.mime)
}
pub fn same_lock(&self, file: &File, mime: &str) -> bool {
self.same_file(file, mime) && self.lock.as_ref().is_some_and(|l| self.skip == l.skip)
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/input/mod.rs | yazi-core/src/input/mod.rs | yazi_macro::mod_flat!(input);
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/input/input.rs | yazi-core/src/input/input.rs | use std::{ops::{Deref, DerefMut}, rc::Rc};
use tokio::sync::mpsc::UnboundedSender;
use yazi_config::popup::Position;
use yazi_shared::{Ids, errors::InputError};
#[derive(Default)]
pub struct Input {
pub(super) inner: yazi_widgets::input::Input,
pub visible: bool,
pub title: String,
pub position: Position,
// Typing
pub tx: Option<UnboundedSender<Result<String, InputError>>>,
pub ticket: Rc<Ids>,
}
impl Deref for Input {
type Target = yazi_widgets::input::Input;
fn deref(&self) -> &Self::Target { &self.inner }
}
impl DerefMut for Input {
fn deref_mut(&mut self) -> &mut Self::Target { &mut self.inner }
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/pick/pick.rs | yazi-core/src/pick/pick.rs | use anyhow::Result;
use tokio::sync::oneshot::Sender;
use yazi_config::{YAZI, popup::Position};
use yazi_widgets::Scrollable;
#[derive(Default)]
pub struct Pick {
pub title: String,
pub items: Vec<String>,
pub position: Position,
pub offset: usize,
pub cursor: usize,
pub callback: Option<Sender<Result<usize>>>,
pub visible: bool,
}
impl Pick {
pub fn title(&self) -> &str { &self.title }
pub fn window(&self) -> impl Iterator<Item = (usize, &str)> {
self.items.iter().map(AsRef::as_ref).enumerate().skip(self.offset).take(self.limit())
}
}
impl Scrollable for Pick {
fn total(&self) -> usize { self.items.len() }
fn limit(&self) -> usize {
self.position.offset.height.saturating_sub(YAZI.pick.border()) as usize
}
fn cursor_mut(&mut self) -> &mut usize { &mut self.cursor }
fn offset_mut(&mut self) -> &mut usize { &mut self.offset }
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/pick/mod.rs | yazi-core/src/pick/mod.rs | yazi_macro::mod_flat!(pick);
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/cmp/mod.rs | yazi-core/src/cmp/mod.rs | yazi_macro::mod_flat!(cmp);
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/cmp/cmp.rs | yazi-core/src/cmp/cmp.rs | use hashbrown::HashMap;
use yazi_parser::cmp::CmpItem;
use yazi_shared::{Id, url::UrlBuf};
use yazi_widgets::Scrollable;
#[derive(Default)]
pub struct Cmp {
pub caches: HashMap<UrlBuf, Vec<CmpItem>>,
pub cands: Vec<CmpItem>,
pub offset: usize,
pub cursor: usize,
pub ticket: Id,
pub visible: bool,
}
impl Cmp {
// --- Cands
pub fn window(&self) -> &[CmpItem] {
let end = (self.offset + self.limit()).min(self.cands.len());
&self.cands[self.offset..end]
}
pub fn selected(&self) -> Option<&CmpItem> { self.cands.get(self.cursor) }
// --- Cursor
pub fn rel_cursor(&self) -> usize { self.cursor - self.offset }
}
impl Scrollable for Cmp {
fn total(&self) -> usize { self.cands.len() }
fn limit(&self) -> usize { self.cands.len().min(10) }
fn cursor_mut(&mut self) -> &mut usize { &mut self.cursor }
fn offset_mut(&mut self) -> &mut usize { &mut self.offset }
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/confirm/mod.rs | yazi-core/src/confirm/mod.rs | yazi_macro::mod_flat!(confirm);
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/confirm/confirm.rs | yazi-core/src/confirm/confirm.rs | use ratatui::{text::Line, widgets::Paragraph};
use tokio::sync::oneshot::Sender;
use yazi_config::popup::Position;
#[derive(Default)]
pub struct Confirm {
pub title: Line<'static>,
pub body: Paragraph<'static>,
pub list: Paragraph<'static>,
pub position: Position,
pub offset: usize,
pub callback: Option<Sender<bool>>,
pub visible: bool,
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/notify/notify.rs | yazi-core/src/notify/notify.rs | use std::ops::ControlFlow;
use ratatui::layout::Rect;
use tokio::task::JoinHandle;
use super::{Message, NOTIFY_SPACING};
#[derive(Default)]
pub struct Notify {
pub(super) tick_handle: Option<JoinHandle<()>>,
pub messages: Vec<Message>,
}
impl Notify {
pub fn limit(&self, area: Rect) -> usize {
if self.messages.is_empty() {
return 0;
}
let mut height = area.height as usize;
let flow = (0..self.messages.len().min(3)).try_fold(0, |acc, i| {
match height.checked_sub(self.messages[i].height(area.width) + NOTIFY_SPACING as usize) {
Some(h) => {
height = h;
ControlFlow::Continue(acc + 1)
}
None => ControlFlow::Break(acc),
}
});
1.max(match flow {
ControlFlow::Continue(i) => i,
ControlFlow::Break(i) => i,
})
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/notify/mod.rs | yazi-core/src/notify/mod.rs | yazi_macro::mod_pub!(commands);
yazi_macro::mod_flat!(message notify);
pub const NOTIFY_BORDER: u16 = 2;
pub const NOTIFY_SPACING: u16 = 1;
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/notify/message.rs | yazi-core/src/notify/message.rs | use std::time::{Duration, Instant};
use unicode_width::UnicodeWidthStr;
use yazi_parser::app::{NotifyLevel, NotifyOpt};
use super::NOTIFY_BORDER;
pub struct Message {
pub title: String,
pub content: String,
pub level: NotifyLevel,
pub timeout: Duration,
pub instant: Instant,
pub percent: u8,
pub max_width: usize,
}
impl From<NotifyOpt> for Message {
fn from(opt: NotifyOpt) -> Self {
let title = opt.title.lines().next().unwrap_or_default();
let title_width = title.width() + (opt.level.icon().width() + /* Space */ 1);
let max_width = opt.content.lines().map(|s| s.width()).max().unwrap_or(0).max(title_width);
Self {
title: title.to_owned(),
content: opt.content,
level: opt.level,
timeout: opt.timeout,
instant: Instant::now(),
percent: 0,
max_width: max_width + NOTIFY_BORDER as usize,
}
}
}
impl Message {
pub fn height(&self, width: u16) -> usize {
let lines = ratatui::widgets::Paragraph::new(self.content.as_str())
.wrap(ratatui::widgets::Wrap { trim: false })
.line_count(width);
lines + NOTIFY_BORDER as usize
}
}
impl PartialEq for Message {
fn eq(&self, other: &Self) -> bool {
self.level == other.level && self.content == other.content && self.title == other.title
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/notify/commands/push.rs | yazi-core/src/notify/commands/push.rs | use std::time::{Duration, Instant};
use yazi_parser::app::NotifyOpt;
use yazi_proxy::AppProxy;
use crate::notify::{Message, Notify};
impl Notify {
pub fn push(&mut self, opt: NotifyOpt) {
let instant = Instant::now();
let mut msg = Message::from(opt);
msg.timeout += instant - self.messages.first().map_or(instant, |m| m.instant);
if self.messages.iter().all(|m| m != &msg) {
self.messages.push(msg);
AppProxy::update_notify(Duration::ZERO);
}
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/notify/commands/mod.rs | yazi-core/src/notify/commands/mod.rs | yazi_macro::mod_flat!(push tick);
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/notify/commands/tick.rs | yazi-core/src/notify/commands/tick.rs | use std::time::Duration;
use ratatui::layout::Rect;
use yazi_parser::notify::TickOpt;
use yazi_proxy::AppProxy;
use crate::notify::Notify;
impl Notify {
pub fn tick(&mut self, opt: TickOpt, area: Rect) {
self.tick_handle.take().map(|h| h.abort());
let limit = self.limit(area);
if limit == 0 {
return;
}
for m in &mut self.messages[..limit] {
if m.timeout.is_zero() {
m.percent = m.percent.saturating_sub(20);
} else if m.percent < 100 {
m.percent += 20;
} else {
m.timeout = m.timeout.saturating_sub(opt.interval);
}
}
self.messages.retain(|m| m.percent > 0 || !m.timeout.is_zero());
let limit = self.limit(area);
let timeouts: Vec<_> = self.messages[..limit]
.iter()
.filter(|&m| m.percent == 100 && !m.timeout.is_zero())
.map(|m| m.timeout)
.collect();
let interval = if timeouts.len() != limit {
Duration::from_millis(50)
} else if let Some(min) = timeouts.iter().min() {
*min
} else {
return;
};
self.tick_handle = Some(tokio::spawn(async move {
tokio::time::sleep(interval).await;
AppProxy::update_notify(interval);
}));
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/which/which.rs | yazi-core/src/which/which.rs | use yazi_config::{KEYMAP, keymap::{ChordCow, Key}};
use yazi_macro::{emit, render, render_and};
use yazi_shared::Layer;
use crate::which::WhichSorter;
#[derive(Default)]
pub struct Which {
pub times: usize,
pub cands: Vec<ChordCow>,
// Visibility
pub visible: bool,
pub silent: bool,
}
impl Which {
pub fn r#type(&mut self, key: Key) -> bool {
self.cands.retain(|c| c.on.len() > self.times && c.on[self.times] == key);
self.times += 1;
if self.cands.is_empty() {
self.reset();
} else if self.cands.len() == 1 {
emit!(Seq(self.cands.remove(0).into_seq()));
self.reset();
} else if let Some(i) = self.cands.iter().position(|c| c.on.len() == self.times) {
emit!(Seq(self.cands.remove(i).into_seq()));
self.reset();
}
render_and!(true)
}
fn reset(&mut self) {
self.times = 0;
self.cands.clear();
self.visible = false;
self.silent = false;
}
pub fn show_with(&mut self, key: Key, layer: Layer) {
self.times = 1;
self.cands = KEYMAP
.get(layer)
.iter()
.filter(|c| c.on.len() > 1 && c.on[0] == key)
.map(|c| c.into())
.collect();
WhichSorter::default().sort(&mut self.cands);
self.visible = true;
self.silent = false;
render!();
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/which/mod.rs | yazi-core/src/which/mod.rs | yazi_macro::mod_flat!(sorter which);
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/which/sorter.rs | yazi-core/src/which/sorter.rs | use std::{borrow::Cow, mem};
use yazi_config::{YAZI, keymap::ChordCow, which::SortBy};
use yazi_shared::{natsort, translit::Transliterator};
#[derive(Clone, Copy, PartialEq)]
pub struct WhichSorter {
pub by: SortBy,
pub sensitive: bool,
pub reverse: bool,
pub translit: bool,
}
impl Default for WhichSorter {
fn default() -> Self {
Self {
by: YAZI.which.sort_by,
sensitive: YAZI.which.sort_sensitive,
reverse: YAZI.which.sort_reverse,
translit: YAZI.which.sort_translit,
}
}
}
impl WhichSorter {
pub(super) fn sort(&self, items: &mut Vec<ChordCow>) {
if self.by == SortBy::None || items.is_empty() {
return;
}
let mut indices = Vec::with_capacity(items.len());
let mut entities = Vec::with_capacity(items.len());
for (i, ctrl) in items.iter().enumerate() {
indices.push(i);
entities.push(match self.by {
SortBy::None => unreachable!(),
SortBy::Key => Cow::Owned(ctrl.on()),
SortBy::Desc => ctrl.desc_or_run(),
});
}
indices.sort_unstable_by(|&a, &b| {
let ordering = if !self.translit {
natsort(entities[a].as_bytes(), entities[b].as_bytes(), !self.sensitive)
} else {
natsort(
entities[a].as_bytes().transliterate().as_bytes(),
entities[b].as_bytes().transliterate().as_bytes(),
!self.sensitive,
)
};
if self.reverse { ordering.reverse() } else { ordering }
});
*items = indices.into_iter().map(|i| mem::take(&mut items[i])).collect();
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/tab/backstack.rs | yazi-core/src/tab/backstack.rs | use yazi_shared::url::{Url, UrlBuf};
#[derive(Default)]
pub struct Backstack {
cursor: usize,
stack: Vec<UrlBuf>,
}
impl Backstack {
pub fn push(&mut self, url: Url) {
if self.stack.is_empty() {
self.stack.push(url.to_owned());
return;
}
if self.stack[self.cursor] == url {
return;
}
self.cursor += 1;
if self.cursor == self.stack.len() {
self.stack.push(url.to_owned());
} else {
self.stack[self.cursor] = url.to_owned();
self.stack.truncate(self.cursor + 1);
}
// Only keep 30 URLs before the cursor, the cleanup threshold is 60
if self.stack.len() > 60 {
let start = self.cursor.saturating_sub(30);
self.stack.drain(..start);
self.cursor -= start;
}
}
pub fn shift_backward(&mut self) -> Option<&UrlBuf> {
if self.cursor > 0 {
self.cursor -= 1;
Some(&self.stack[self.cursor])
} else {
None
}
}
pub fn shift_forward(&mut self) -> Option<&UrlBuf> {
if self.cursor + 1 >= self.stack.len() {
None
} else {
self.cursor += 1;
Some(&self.stack[self.cursor])
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_backstack() {
let mut bs: Backstack = Backstack::default();
assert_eq!(bs.shift_forward(), None);
bs.push(Url::regular("1"));
assert_eq!(bs.stack[bs.cursor], Url::regular("1"));
bs.push(Url::regular("2"));
bs.push(Url::regular("3"));
assert_eq!(bs.stack[bs.cursor], Url::regular("3"));
assert_eq!(bs.shift_backward().unwrap(), Url::regular("2"));
assert_eq!(bs.shift_backward().unwrap(), Url::regular("1"));
assert_eq!(bs.shift_backward(), None);
assert_eq!(bs.shift_backward(), None);
assert_eq!(bs.stack[bs.cursor], Url::regular("1"));
assert_eq!(bs.shift_forward().unwrap(), Url::regular("2"));
assert_eq!(bs.shift_forward().unwrap(), Url::regular("3"));
assert_eq!(bs.shift_forward(), None);
bs.shift_backward();
bs.push(Url::regular("4"));
assert_eq!(bs.stack[bs.cursor], Url::regular("4"));
assert_eq!(bs.shift_forward(), None);
assert_eq!(bs.shift_backward().unwrap(), Url::regular("2"));
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/tab/finder.rs | yazi-core/src/tab/finder.rs | use anyhow::Result;
use hashbrown::HashMap;
use yazi_fs::{Files, Filter, FilterCase};
use yazi_shared::{path::{AsPath, PathBufDyn}, url::UrlBuf};
use crate::tab::Folder;
pub struct Finder {
pub filter: Filter,
pub matched: HashMap<PathBufDyn, u8>,
lock: FinderLock,
}
#[derive(Default)]
struct FinderLock {
cwd: UrlBuf,
revision: u64,
}
impl Finder {
pub fn new(s: &str, case: FilterCase) -> Result<Self> {
Ok(Self {
filter: Filter::new(s, case)?,
matched: Default::default(),
lock: Default::default(),
})
}
pub fn prev(&self, files: &Files, cursor: usize, include: bool) -> Option<isize> {
for i in !include as usize..files.len() {
let idx = (cursor + files.len() - i) % files.len();
if let Some(s) = files[idx].name()
&& self.filter.matches(s)
{
return Some(idx as isize - cursor as isize);
}
}
None
}
pub fn next(&self, files: &Files, cursor: usize, include: bool) -> Option<isize> {
for i in !include as usize..files.len() {
let idx = (cursor + i) % files.len();
if let Some(s) = files[idx].name()
&& self.filter.matches(s)
{
return Some(idx as isize - cursor as isize);
}
}
None
}
pub fn catchup(&mut self, folder: &Folder) -> bool {
if self.lock == *folder {
return false;
}
self.matched.clear();
let mut i = 0u8;
for file in folder.files.iter() {
if file.name().is_none_or(|s| !self.filter.matches(s)) {
continue;
}
self.matched.insert(file.urn().into(), i);
if self.matched.len() > 99 {
break;
}
i += 1;
}
self.lock = folder.into();
true
}
}
impl Finder {
pub fn matched_idx<T>(&self, folder: &Folder, urn: T) -> Option<u8>
where
T: AsPath,
{
if self.lock == *folder { self.matched.get(&urn.as_path()).copied() } else { None }
}
}
// --- Lock
impl From<&Folder> for FinderLock {
fn from(value: &Folder) -> Self {
Self { cwd: value.url.clone(), revision: value.files.revision }
}
}
impl PartialEq<Folder> for FinderLock {
fn eq(&self, other: &Folder) -> bool {
self.revision == other.files.revision && self.cwd == other.url
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/tab/history.rs | yazi-core/src/tab/history.rs | use std::ops::{Deref, DerefMut};
use hashbrown::HashMap;
use yazi_shared::url::{AsUrl, UrlBuf};
use super::Folder;
#[derive(Default)]
pub struct History(HashMap<UrlBuf, Folder>);
impl Deref for History {
type Target = HashMap<UrlBuf, Folder>;
fn deref(&self) -> &Self::Target { &self.0 }
}
impl DerefMut for History {
fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 }
}
impl History {
pub fn remove_or(&mut self, url: impl AsUrl) -> Folder {
let url = url.as_url();
self.0.remove(&url).unwrap_or_else(|| Folder::from(url))
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/tab/preference.rs | yazi-core/src/tab/preference.rs | use yazi_config::YAZI;
use yazi_fs::{FilesSorter, SortBy};
#[derive(Clone, PartialEq)]
pub struct Preference {
// Sorting
pub sort_by: SortBy,
pub sort_sensitive: bool,
pub sort_reverse: bool,
pub sort_dir_first: bool,
pub sort_translit: bool,
// Display
pub linemode: String,
pub show_hidden: bool,
}
impl Default for Preference {
fn default() -> Self {
Self {
// Sorting
sort_by: YAZI.mgr.sort_by.get(),
sort_sensitive: YAZI.mgr.sort_sensitive.get(),
sort_reverse: YAZI.mgr.sort_reverse.get(),
sort_dir_first: YAZI.mgr.sort_dir_first.get(),
sort_translit: YAZI.mgr.sort_translit.get(),
// Display
linemode: YAZI.mgr.linemode.clone(),
show_hidden: YAZI.mgr.show_hidden.get(),
}
}
}
impl From<&Preference> for FilesSorter {
fn from(value: &Preference) -> Self {
Self {
by: value.sort_by,
sensitive: value.sort_sensitive,
reverse: value.sort_reverse,
dir_first: value.sort_dir_first,
translit: value.sort_translit,
}
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/tab/mod.rs | yazi-core/src/tab/mod.rs | yazi_macro::mod_flat!(backstack finder folder history mode preference preview selected tab);
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/tab/selected.rs | yazi-core/src/tab/selected.rs | use std::ops::Deref;
use hashbrown::HashMap;
use indexmap::IndexMap;
use yazi_fs::FilesOp;
use yazi_shared::{timestamp_us, url::{Url, UrlBuf, UrlBufCov, UrlCov}};
#[derive(Default)]
pub struct Selected {
inner: IndexMap<UrlBufCov, u64>,
parents: HashMap<UrlBufCov, usize>,
}
impl Selected {
pub fn len(&self) -> usize { self.inner.len() }
pub fn is_empty(&self) -> bool { self.inner.is_empty() }
pub fn values(&self) -> impl Iterator<Item = &UrlBuf> { self.inner.keys().map(Deref::deref) }
pub fn contains<'a>(&self, url: impl Into<Url<'a>>) -> bool {
self.inner.contains_key(&UrlCov::new(url))
}
pub fn add<'a>(&mut self, url: impl Into<Url<'a>>) -> bool { self.add_same([url]) == 1 }
pub fn add_many<'a, I, T>(&mut self, urls: I) -> usize
where
I: IntoIterator<Item = T>,
T: Into<Url<'a>>,
{
let mut grouped: HashMap<_, Vec<_>> = Default::default();
for url in urls.into_iter().map(Into::into) {
if let Some(p) = url.parent() {
grouped.entry(p).or_default().push(url);
}
}
grouped.into_values().map(|v| self.add_same(v)).sum()
}
fn add_same<'a, I, T>(&mut self, urls: I) -> usize
where
I: IntoIterator<Item = T>,
T: Into<Url<'a>>,
{
// If it has appeared as a parent
let urls: Vec<_> =
urls.into_iter().map(UrlCov::new).filter(|u| !self.parents.contains_key(u)).collect();
if urls.is_empty() {
return 0;
}
// If it has appeared as a child
let mut parent = urls[0].parent();
let mut parents = vec![];
while let Some(u) = parent {
if self.inner.contains_key(&UrlCov::new(u)) {
return 0;
}
parent = u.parent();
parents.push(u);
}
let (now, len) = (timestamp_us(), self.inner.len());
self.inner.extend(urls.iter().enumerate().map(|(i, u)| (u.into(), now + i as u64)));
for u in parents {
*self.parents.entry_ref(&UrlCov::new(u)).or_default() += self.inner.len() - len;
}
urls.len()
}
#[inline]
pub fn remove<'a>(&mut self, url: impl Into<Url<'a>> + Clone) -> bool {
self.remove_same([url]) == 1
}
pub fn remove_many<'a, I, T>(&mut self, urls: I) -> usize
where
I: IntoIterator<Item = T>,
T: Into<Url<'a>>,
{
let mut grouped: HashMap<_, Vec<_>> = Default::default();
for url in urls.into_iter().map(Into::into) {
if let Some(p) = url.parent() {
grouped.entry(p).or_default().push(url);
}
}
let affected = grouped.into_values().map(|v| self.remove_same(v)).sum();
if affected > 0 {
self.inner.sort_unstable_by(|_, a, _, b| a.cmp(b));
}
affected
}
fn remove_same<'a, I, T>(&mut self, urls: I) -> usize
where
I: IntoIterator<Item = T>,
T: Into<Url<'a>> + Clone,
{
let mut it = urls.into_iter().peekable();
let Some(first) = it.peek().cloned().map(UrlCov::new) else { return 0 };
let count = it.filter_map(|u| self.inner.swap_remove(&UrlCov::new(u))).count();
if count == 0 {
return 0;
}
// FIXME: use UrlCov::parent() instead
let mut parent = first.parent();
while let Some(u) = parent {
let n = self.parents.get_mut(&UrlCov::new(u)).unwrap();
*n -= count;
if *n == 0 {
self.parents.remove(&UrlCov::new(u));
}
parent = u.parent();
}
count
}
pub fn clear(&mut self) {
self.inner.clear();
self.parents.clear();
}
pub fn apply_op(&mut self, op: &FilesOp) {
let (removal, addition) = op.diff_recoverable(|u| self.contains(u));
if !removal.is_empty() {
self.remove_many(&removal);
}
if !addition.is_empty() {
self.add_many(&addition);
}
}
}
#[cfg(test)]
mod tests {
use std::path::Path;
use super::*;
#[test]
fn test_insert_non_conflicting() {
let mut s = Selected::default();
assert!(s.add(Path::new("/a/b")));
assert!(s.add(Path::new("/c/d")));
assert_eq!(s.inner.len(), 2);
}
#[test]
fn test_insert_conflicting_parent() {
let mut s = Selected::default();
assert!(s.add(Path::new("/a")));
assert!(!s.add(Path::new("/a/b")));
}
#[test]
fn test_insert_conflicting_child() {
let mut s = Selected::default();
assert!(s.add(Path::new("/a/b/c")));
assert!(!s.add(Path::new("/a/b")));
assert!(s.add(Path::new("/a/b/d")));
}
#[test]
fn test_remove() {
let mut s = Selected::default();
assert!(s.add(Path::new("/a/b")));
assert!(!s.remove(Path::new("/a/c")));
assert!(s.remove(Path::new("/a/b")));
assert!(!s.remove(Path::new("/a/b")));
assert!(s.inner.is_empty());
assert!(s.parents.is_empty());
}
#[test]
fn insert_many_success() {
let mut s = Selected::default();
assert_eq!(
3,
s.add_same([
Path::new("/parent/child1"),
Path::new("/parent/child2"),
Path::new("/parent/child3")
])
);
}
#[test]
fn insert_many_with_existing_parent_fails() {
let mut s = Selected::default();
s.add(Path::new("/parent"));
assert_eq!(0, s.add_same([Path::new("/parent/child1"), Path::new("/parent/child2")]));
}
#[test]
fn insert_many_with_existing_child_fails() {
let mut s = Selected::default();
s.add(Path::new("/parent/child1"));
assert_eq!(2, s.add_same([Path::new("/parent/child1"), Path::new("/parent/child2")]));
}
#[test]
fn insert_many_empty_urls_list() {
let mut s = Selected::default();
assert_eq!(0, s.add_same([] as [Url; 0]));
}
#[test]
fn insert_many_with_parent_as_child_of_another_url() {
let mut s = Selected::default();
s.add(Path::new("/parent/child"));
assert_eq!(
0,
s.add_same([Path::new("/parent/child/child1"), Path::new("/parent/child/child2")])
);
}
#[test]
fn insert_many_with_direct_parent_fails() {
let mut s = Selected::default();
s.add(Path::new("/a"));
assert_eq!(0, s.add_same([Path::new("/a/b")]));
}
#[test]
fn insert_many_with_nested_child_fails() {
let mut s = Selected::default();
s.add(Path::new("/a/b"));
assert_eq!(0, s.add_same([Path::new("/a")]));
assert_eq!(1, s.add_same([Path::new("/b"), Path::new("/a")]));
}
#[test]
fn insert_many_sibling_directories_success() {
let mut s = Selected::default();
assert_eq!(2, s.add_same([Path::new("/a/b"), Path::new("/a/c")]));
}
#[test]
fn insert_many_with_grandchild_fails() {
let mut s = Selected::default();
s.add(Path::new("/a/b"));
assert_eq!(0, s.add_same([Path::new("/a/b/c")]));
}
#[test]
fn test_insert_many_with_remove() {
let mut s = Selected::default();
let child1 = Path::new("/parent/child1");
let child2 = Path::new("/parent/child2");
let child3 = Path::new("/parent/child3");
assert_eq!(3, s.add_same([child1, child2, child3]));
assert!(s.remove(child1));
assert_eq!(s.inner.len(), 2);
assert!(!s.parents.is_empty());
assert!(s.remove(child2));
assert!(!s.parents.is_empty());
assert!(s.remove(child3));
assert!(s.inner.is_empty());
assert!(s.parents.is_empty());
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/tab/mode.rs | yazi-core/src/tab/mode.rs | use std::{collections::BTreeSet, fmt::Display, mem};
#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub enum Mode {
#[default]
Normal,
Select(usize, BTreeSet<usize>),
Unset(usize, BTreeSet<usize>),
}
impl Mode {
pub fn visual_mut(&mut self) -> Option<(usize, &mut BTreeSet<usize>)> {
match self {
Self::Normal => None,
Self::Select(start, indices) => Some((*start, indices)),
Self::Unset(start, indices) => Some((*start, indices)),
}
}
pub fn take_visual(&mut self) -> Option<(usize, BTreeSet<usize>)> {
match mem::take(self) {
Self::Normal => None,
Self::Select(start, indices) => Some((start, indices)),
Self::Unset(start, indices) => Some((start, indices)),
}
}
}
impl Mode {
pub fn is_select(&self) -> bool { matches!(self, Self::Select(..)) }
pub fn is_unset(&self) -> bool { matches!(self, Self::Unset(..)) }
pub fn is_visual(&self) -> bool { matches!(self, Self::Select(..) | Self::Unset(..)) }
}
impl Display for Mode {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(match self {
Self::Normal => "normal",
Self::Select(..) => "select",
Self::Unset(..) => "unset",
})
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/tab/folder.rs | yazi-core/src/tab/folder.rs | use std::mem;
use yazi_config::{LAYOUT, YAZI};
use yazi_dds::Pubsub;
use yazi_fs::{File, Files, FilesOp, FolderStage, cha::Cha};
use yazi_macro::err;
use yazi_parser::Step;
use yazi_proxy::MgrProxy;
use yazi_shared::{Id, path::{AsPath, PathBufDyn, PathDyn}, url::UrlBuf};
use yazi_widgets::Scrollable;
pub struct Folder {
pub url: UrlBuf,
pub cha: Cha,
pub files: Files,
pub stage: FolderStage,
pub offset: usize,
pub cursor: usize,
pub page: usize,
pub trace: Option<PathBufDyn>,
}
impl Default for Folder {
fn default() -> Self {
Self {
url: Default::default(),
cha: Default::default(),
files: Files::new(YAZI.mgr.show_hidden.get()),
stage: Default::default(),
offset: Default::default(),
cursor: Default::default(),
page: Default::default(),
trace: Default::default(),
}
}
}
impl<T: Into<UrlBuf>> From<T> for Folder {
fn from(value: T) -> Self { Self { url: value.into(), ..Default::default() } }
}
impl Folder {
pub fn update(&mut self, op: FilesOp) -> bool {
let (stage, revision) = (self.stage.clone(), self.files.revision);
match op {
FilesOp::Full(_, _, cha) => {
(self.cha, self.stage) = (cha, FolderStage::Loaded);
}
FilesOp::Part(_, ref files, _) if files.is_empty() => {
(self.cha, self.stage) = (Cha::default(), FolderStage::Loading);
}
FilesOp::Part(_, _, ticket) if ticket == self.files.ticket() => {
self.stage = FolderStage::Loading;
}
FilesOp::Done(_, cha, ticket) if ticket == self.files.ticket() => {
(self.cha, self.stage) = (cha, FolderStage::Loaded);
}
FilesOp::IOErr(_, ref err) => {
(self.cha, self.stage) = (Cha::default(), FolderStage::Failed(err.clone()));
}
_ => {}
}
match op {
FilesOp::Full(_, files, _) => self.files.update_full(files),
FilesOp::Part(_, files, ticket) => self.files.update_part(files, ticket),
FilesOp::Done(..) => {}
FilesOp::Size(_, sizes) => self.files.update_size(sizes),
FilesOp::IOErr(..) => self.files.update_ioerr(),
FilesOp::Creating(_, files) => self.files.update_creating(files),
FilesOp::Deleting(_, urns) => {
let deleted = self.files.update_deleting(urns);
let delta = deleted.into_iter().filter(|&i| i < self.cursor).count() as isize;
self.arrow(-delta);
}
FilesOp::Updating(_, files) => _ = self.files.update_updating(files),
FilesOp::Upserting(_, files) => self.files.update_upserting(files),
}
self.trace = self.trace.take_if(|_| !self.files.is_empty() || self.stage.is_loading());
self.repos(None);
(&stage, revision) != (&self.stage, self.files.revision)
}
pub fn update_pub(&mut self, tab: Id, op: FilesOp) -> bool {
if self.update(op) {
err!(Pubsub::pub_after_load(tab, &self.url, &self.stage));
return true;
}
false
}
pub fn arrow(&mut self, step: impl Into<Step>) -> bool {
let mut b = if self.files.is_empty() {
(mem::take(&mut self.cursor), mem::take(&mut self.offset)) != (0, 0)
} else {
self.scroll(step)
};
self.trace = self.hovered().filter(|_| b).map(|h| h.urn().into()).or(self.trace.take());
b |= self.squeeze_offset();
self.sync_page(false);
b
}
pub fn hover(&mut self, urn: PathDyn) -> bool {
if self.hovered().map(|h| h.urn()) == Some(urn) {
return self.arrow(0);
}
let new = self.files.position(urn).unwrap_or(self.cursor) as isize;
self.arrow(new - self.cursor as isize)
}
pub fn repos(&mut self, urn: Option<PathDyn>) -> bool {
if let Some(u) = urn {
self.hover(u)
} else if let Some(u) = &self.trace {
self.hover(u.clone().as_path())
} else {
self.arrow(0)
}
}
pub fn sync_page(&mut self, force: bool) {
let limit = LAYOUT.get().folder_limit();
if limit == 0 {
return;
}
let new = self.cursor / limit;
if mem::replace(&mut self.page, new) != new || force {
MgrProxy::update_paged_by(new, &self.url);
}
}
fn squeeze_offset(&mut self) -> bool {
let old = self.offset;
let len = self.files.len();
let limit = LAYOUT.get().folder_limit();
let scrolloff = (limit / 2).min(YAZI.mgr.scrolloff.get() as usize);
self.offset = if self.cursor < (self.offset + limit).min(len).saturating_sub(scrolloff) {
len.saturating_sub(limit).min(self.offset)
} else {
len.saturating_sub(limit).min(self.cursor.saturating_sub(limit) + 1 + scrolloff)
};
old != self.offset
}
}
impl Folder {
#[inline]
pub fn hovered(&self) -> Option<&File> { self.files.get(self.cursor) }
#[inline]
pub fn hovered_mut(&mut self) -> Option<&mut File> { self.files.get_mut(self.cursor) }
pub fn paginate(&self, page: usize) -> &[File] {
let len = self.files.len();
let limit = LAYOUT.get().folder_limit();
let start = (page.saturating_sub(1) * limit).min(len.saturating_sub(1));
let end = ((page + 2) * limit).min(len);
&self.files[start..end]
}
}
impl Scrollable for Folder {
fn total(&self) -> usize { self.files.len() }
fn limit(&self) -> usize { LAYOUT.get().folder_limit() }
fn scrolloff(&self) -> usize { (self.limit() / 2).min(YAZI.mgr.scrolloff.get() as usize) }
fn cursor_mut(&mut self) -> &mut usize { &mut self.cursor }
fn offset_mut(&mut self) -> &mut usize { &mut self.offset }
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/tab/preview.rs | yazi-core/src/tab/preview.rs | use std::time::Duration;
use tokio::{pin, task::JoinHandle};
use tokio_stream::{StreamExt, wrappers::UnboundedReceiverStream};
use tokio_util::sync::CancellationToken;
use yazi_adapter::ADAPTOR;
use yazi_config::{LAYOUT, YAZI};
use yazi_fs::{File, Files, FilesOp, cha::Cha};
use yazi_macro::render;
use yazi_parser::mgr::PreviewLock;
use yazi_plugin::{external::Highlighter, isolate};
use yazi_shared::{pool::Symbol, url::{UrlBuf, UrlLike}};
use yazi_vfs::{VfsFiles, VfsFilesOp};
#[derive(Default)]
pub struct Preview {
pub lock: Option<PreviewLock>,
pub skip: usize,
previewer_ct: Option<CancellationToken>,
pub folder_lock: Option<UrlBuf>,
folder_loader: Option<JoinHandle<()>>,
}
impl Preview {
pub fn go(&mut self, file: File, mime: Symbol<str>, force: bool) {
if mime.is_empty() {
return; // Wait till mimetype is resolved to avoid flickering
} else if !force && self.same_lock(&file, &mime) {
return;
}
let Some(previewer) = YAZI.plugin.previewer(&file, &mime) else {
return self.reset();
};
self.abort();
self.previewer_ct = isolate::peek(&previewer.run, file, mime, self.skip);
}
pub fn go_folder(&mut self, file: File, dir: Option<Cha>, mime: Symbol<str>, force: bool) {
if !file.url.is_internal() {
return self.go(file, mime, force);
} else if self.folder_lock.as_ref() == Some(&file.url) {
return self.go(file, mime, force);
}
let wd = file.url_owned();
self.go(file, mime, force);
self.folder_lock = Some(wd.clone());
self.folder_loader.take().map(|h| h.abort());
self.folder_loader = Some(tokio::spawn(async move {
let Some(new) = Files::assert_stale(&wd, dir.unwrap_or_default()).await else { return };
let rx = match Files::from_dir(&wd).await {
Ok(rx) => rx,
Err(e) => return FilesOp::issue_error(&wd, e).await,
};
let stream =
UnboundedReceiverStream::new(rx).chunks_timeout(50000, Duration::from_millis(500));
pin!(stream);
let ticket = FilesOp::prepare(&wd);
while let Some(chunk) = stream.next().await {
FilesOp::Part(wd.clone(), chunk, ticket).emit();
}
FilesOp::Done(wd, new, ticket).emit();
}));
}
pub fn abort(&mut self) {
self.previewer_ct.take().map(|ct| ct.cancel());
Highlighter::abort();
}
pub fn reset(&mut self) {
self.abort();
ADAPTOR.get().image_hide().ok();
render!(self.lock.take().is_some())
}
pub fn reset_image(&mut self) {
self.abort();
ADAPTOR.get().image_hide().ok();
}
pub fn same_url(&self, url: &UrlBuf) -> bool { matches!(&self.lock, Some(l) if l.url == *url) }
pub fn same_file(&self, file: &File, mime: &str) -> bool {
self.same_url(&file.url)
&& matches!(&self.lock , Some(l) if l.cha.hits(file.cha) && l.mime == mime && *l.area == LAYOUT.get().preview)
}
pub fn same_lock(&self, file: &File, mime: &str) -> bool {
self.same_file(file, mime) && matches!(&self.lock, Some(l) if l.skip == self.skip)
}
pub fn same_folder(&self, url: &UrlBuf) -> bool { self.folder_lock.as_ref() == Some(url) }
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-core/src/tab/tab.rs | yazi-core/src/tab/tab.rs | use anyhow::Result;
use ratatui::layout::Rect;
use tokio::task::JoinHandle;
use yazi_adapter::Dimension;
use yazi_config::{LAYOUT, popup::{Origin, Position}};
use yazi_fs::File;
use yazi_shared::{Id, Ids, url::UrlBuf};
use super::{Backstack, Finder, Folder, History, Mode, Preference, Preview};
use crate::{spot::Spot, tab::Selected};
pub struct Tab {
pub id: Id,
pub mode: Mode,
pub pref: Preference,
pub current: Folder,
pub parent: Option<Folder>,
pub backstack: Backstack,
pub history: History,
pub selected: Selected,
pub spot: Spot,
pub preview: Preview,
pub finder: Option<Finder>,
pub search: Option<JoinHandle<Result<()>>>,
}
impl Default for Tab {
fn default() -> Self {
static IDS: Ids = Ids::new();
Self {
id: IDS.next(),
mode: Default::default(),
pref: Default::default(),
current: Default::default(),
parent: Default::default(),
backstack: Default::default(),
history: Default::default(),
selected: Default::default(),
spot: Default::default(),
preview: Default::default(),
finder: Default::default(),
search: Default::default(),
}
}
}
impl Tab {
pub fn shutdown(&mut self) {
self.search.take().map(|h| h.abort());
self.preview.reset();
}
}
impl Tab {
// --- Current
#[inline]
pub fn cwd(&self) -> &UrlBuf { &self.current.url }
#[inline]
pub fn hovered(&self) -> Option<&File> { self.current.hovered() }
#[inline]
pub fn hovered_mut(&mut self) -> Option<&mut File> { self.current.hovered_mut() }
pub fn hovered_rect(&self) -> Option<Rect> {
let y = self.current.files.position(self.hovered()?.urn())? - self.current.offset;
let mut rect = LAYOUT.get().current;
rect.y = rect.y.saturating_sub(1) + y as u16;
rect.height = 1;
Some(rect)
}
pub fn hovered_rect_based(&self, pos: Position) -> Rect {
let ws = Dimension::available().into();
if let Some(r) = self.hovered_rect() {
Position::sticky(ws, r, pos.offset)
} else {
Position::new(Origin::TopCenter, pos.offset).rect(ws)
}
}
pub fn selected_or_hovered(&self) -> Box<dyn Iterator<Item = &UrlBuf> + '_> {
if self.selected.is_empty() {
Box::new(self.hovered().map(|h| &h.url).into_iter())
} else {
Box::new(self.selected.values())
}
}
pub fn hovered_and_selected(&self) -> Box<dyn Iterator<Item = &UrlBuf> + '_> {
let Some(h) = self.hovered() else {
return Box::new([UrlBuf::new()].into_iter().chain(self.selected.values()));
};
if self.selected.is_empty() {
Box::new([&h.url, &h.url].into_iter())
} else {
Box::new([&h.url].into_iter().chain(self.selected.values()))
}
}
// --- History
#[inline]
pub fn hovered_folder(&self) -> Option<&Folder> {
self.hovered().filter(|&h| h.is_dir()).and_then(|h| self.history.get(&h.url))
}
#[inline]
pub fn hovered_folder_mut(&mut self) -> Option<&mut Folder> {
self.current.hovered_mut().filter(|h| h.is_dir()).and_then(|h| self.history.get_mut(&h.url))
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/lib.rs | yazi-plugin/src/lib.rs | yazi_macro::mod_pub!(bindings elements external fs isolate loader process pubsub runtime theme utils);
yazi_macro::mod_flat!(lua);
pub fn init() -> anyhow::Result<()> {
crate::loader::init();
crate::init_lua()?;
Ok(())
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/lua.rs | yazi-plugin/src/lua.rs | use anyhow::{Context, Result};
use futures::executor::block_on;
use mlua::Lua;
use yazi_binding::{Runtime, runtime_mut};
use yazi_boot::BOOT;
use yazi_macro::plugin_preset as preset;
use yazi_shared::RoCell;
pub static LUA: RoCell<Lua> = RoCell::new();
pub(super) fn init_lua() -> Result<()> {
LUA.init(Lua::new());
stage_1(&LUA).context("Lua setup failed")?;
stage_2(&LUA).context("Lua runtime failed")?;
Ok(())
}
fn stage_1(lua: &'static Lua) -> Result<()> {
lua.set_app_data(Runtime::new());
// Base
let globals = lua.globals();
globals.raw_set("ui", crate::elements::compose())?;
globals.raw_set("ya", crate::utils::compose(false))?;
globals.raw_set("fs", crate::fs::compose())?;
globals.raw_set("ps", crate::pubsub::compose())?;
globals.raw_set("rt", crate::runtime::compose())?;
globals.raw_set("th", crate::theme::compose())?;
yazi_binding::Error::install(lua)?;
yazi_binding::Cha::install(lua)?;
crate::loader::install(lua)?;
crate::process::install(lua)?;
yazi_binding::File::install(lua)?;
yazi_binding::Url::install(lua)?;
// Addons
lua.load(preset!("ya")).set_name("ya.lua").exec()?;
// Components
lua.load(preset!("components/current")).set_name("current.lua").exec()?;
lua.load(preset!("components/entity")).set_name("entity.lua").exec()?;
lua.load(preset!("components/header")).set_name("header.lua").exec()?;
lua.load(preset!("components/linemode")).set_name("linemode.lua").exec()?;
lua.load(preset!("components/marker")).set_name("marker.lua").exec()?;
lua.load(preset!("components/modal")).set_name("modal.lua").exec()?;
lua.load(preset!("components/parent")).set_name("parent.lua").exec()?;
lua.load(preset!("components/preview")).set_name("preview.lua").exec()?;
lua.load(preset!("components/progress")).set_name("progress.lua").exec()?;
lua.load(preset!("components/rail")).set_name("rail.lua").exec()?;
lua.load(preset!("components/root")).set_name("root.lua").exec()?;
lua.load(preset!("components/status")).set_name("status.lua").exec()?;
lua.load(preset!("components/tab")).set_name("tab.lua").exec()?;
lua.load(preset!("components/tabs")).set_name("tabs.lua").exec()?;
lua.load(preset!("components/tasks")).set_name("tasks.lua").exec()?;
Ok(())
}
fn stage_2(lua: &'static Lua) -> mlua::Result<()> {
lua.load(preset!("setup")).set_name("setup.lua").exec()?;
lua.load(preset!("compat")).set_name("compat.lua").exec()?;
if let Ok(b) = std::fs::read(BOOT.config_dir.join("init.lua")) {
block_on(lua.load(b).set_name("init.lua").exec_async())?;
}
runtime_mut!(lua)?.initing = false;
Ok(())
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/pubsub/pubsub.rs | yazi-plugin/src/pubsub/pubsub.rs | use mlua::{ExternalResult, Function, Lua, Value};
use yazi_binding::{Id, runtime};
use yazi_dds::ember::Ember;
pub struct Pubsub;
impl Pubsub {
pub(super) fn r#pub(lua: &Lua) -> mlua::Result<Function> {
lua.create_function(|lua, (kind, value): (mlua::String, Value)| {
yazi_dds::Pubsub::r#pub(Ember::from_lua(lua, &kind.to_str()?, value)?).into_lua_err()
})
}
pub(super) fn pub_to(lua: &Lua) -> mlua::Result<Function> {
lua.create_function(|lua, (receiver, kind, value): (Id, mlua::String, Value)| {
yazi_dds::Pubsub::pub_to(*receiver, Ember::from_lua(lua, &kind.to_str()?, value)?)
.into_lua_err()
})
}
pub(super) fn sub(lua: &Lua) -> mlua::Result<Function> {
lua.create_function(|lua, (kind, f): (mlua::String, Function)| {
let rt = runtime!(lua)?;
let Some(cur) = rt.current() else {
return Err("`sub()` must be called in a sync plugin").into_lua_err();
};
if !yazi_dds::Pubsub::sub(cur, &kind.to_str()?, f) {
return Err("`sub()` called twice").into_lua_err();
}
Ok(())
})
}
pub(super) fn sub_remote(lua: &Lua) -> mlua::Result<Function> {
lua.create_function(|lua, (kind, f): (mlua::String, Function)| {
let rt = runtime!(lua)?;
let Some(cur) = rt.current() else {
return Err("`sub_remote()` must be called in a sync plugin").into_lua_err();
};
if !yazi_dds::Pubsub::sub_remote(cur, &kind.to_str()?, f) {
return Err("`sub_remote()` called twice").into_lua_err();
}
Ok(())
})
}
pub(super) fn unsub(lua: &Lua) -> mlua::Result<Function> {
lua.create_function(|lua, kind: mlua::String| {
if let Some(cur) = runtime!(lua)?.current() {
Ok(yazi_dds::Pubsub::unsub(cur, &kind.to_str()?))
} else {
Err("`unsub()` must be called in a sync plugin").into_lua_err()
}
})
}
pub(super) fn unsub_remote(lua: &Lua) -> mlua::Result<Function> {
lua.create_function(|lua, kind: mlua::String| {
if let Some(cur) = runtime!(lua)?.current() {
Ok(yazi_dds::Pubsub::unsub_remote(cur, &kind.to_str()?))
} else {
Err("`unsub_remote()` must be called in a sync plugin").into_lua_err()
}
})
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/pubsub/mod.rs | yazi-plugin/src/pubsub/mod.rs | use mlua::{IntoLua, Lua, Value};
use yazi_binding::{Composer, ComposerGet, ComposerSet};
yazi_macro::mod_flat!(pubsub);
pub(super) fn compose() -> Composer<ComposerGet, ComposerSet> {
fn get(lua: &Lua, key: &[u8]) -> mlua::Result<Value> {
match key {
b"pub" => Pubsub::r#pub(lua)?,
b"pub_to" => Pubsub::pub_to(lua)?,
b"sub" => Pubsub::sub(lua)?,
b"sub_remote" => Pubsub::sub_remote(lua)?,
b"unsub" => Pubsub::unsub(lua)?,
b"unsub_remote" => Pubsub::unsub_remote(lua)?,
_ => return Ok(Value::Nil),
}
.into_lua(lua)
}
fn set(_: &Lua, _: &[u8], value: Value) -> mlua::Result<Value> { Ok(value) }
Composer::new(get, set)
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/process/command.rs | yazi-plugin/src/process/command.rs | use std::{any::TypeId, ffi::OsStr, io, process::Stdio};
use mlua::{AnyUserData, ExternalError, IntoLua, IntoLuaMulti, Lua, MetaMethod, Table, UserData, Value};
use tokio::process::{ChildStderr, ChildStdin, ChildStdout};
use yazi_binding::Error;
use yazi_shared::wtf8::FromWtf8;
use super::{Child, output::Output};
use crate::process::Status;
pub struct Command {
inner: tokio::process::Command,
memory: Option<usize>,
}
const NULL: u8 = 0;
const PIPED: u8 = 1;
const INHERIT: u8 = 2;
impl Command {
pub fn install(lua: &Lua) -> mlua::Result<()> {
let new = lua.create_function(|_, (_, program): (Table, String)| {
let mut inner = tokio::process::Command::new(program);
inner.kill_on_drop(true).stdin(Stdio::null()).stdout(Stdio::null()).stderr(Stdio::null());
Ok(Self { inner, memory: None })
})?;
let command = lua.create_table_from([
// Stdio
("NULL", NULL),
("PIPED", PIPED),
("INHERIT", INHERIT),
])?;
command.set_metatable(Some(lua.create_table_from([(MetaMethod::Call.name(), new)])?))?;
lua.globals().raw_set("Command", command)
}
#[cfg(unix)]
fn spawn(&mut self) -> io::Result<Child> {
if let Some(max) = self.memory {
unsafe {
self.inner.pre_exec(move || {
let rlp = libc::rlimit { rlim_cur: max as _, rlim_max: max as _ };
libc::setrlimit(libc::RLIMIT_AS, &rlp);
Ok(())
});
}
}
self.inner.spawn().map(Child::new)
}
#[cfg(windows)]
fn spawn(&mut self) -> io::Result<Child> {
use std::os::windows::io::RawHandle;
use windows_sys::Win32::System::JobObjects::{AssignProcessToJobObject, CreateJobObjectW, JOB_OBJECT_LIMIT_PROCESS_MEMORY, JOBOBJECT_EXTENDED_LIMIT_INFORMATION, JobObjectExtendedLimitInformation, SetInformationJobObject};
fn assign_job(max: usize, handle: RawHandle) -> io::Result<RawHandle> {
unsafe {
let job = CreateJobObjectW(std::ptr::null_mut(), std::ptr::null());
if job.is_null() {
return Err(io::Error::last_os_error());
}
let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION = std::mem::zeroed();
info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_PROCESS_MEMORY;
info.ProcessMemoryLimit = max;
let result = SetInformationJobObject(
job,
JobObjectExtendedLimitInformation,
&mut info as *mut _ as *mut _,
std::mem::size_of_val(&info) as u32,
);
if result == 0 {
Err(io::Error::last_os_error())
} else if AssignProcessToJobObject(job, handle) == 0 {
Err(io::Error::last_os_error())
} else {
Ok(job)
}
}
}
let child = self.inner.spawn()?;
if let (Some(max), Some(handle)) = (self.memory, child.raw_handle()) {
if let Ok(job) = assign_job(max, handle) {
return Ok(Child::new(child, Some(job)));
}
}
Ok(Child::new(child, None))
}
async fn output(&mut self) -> io::Result<std::process::Output> {
self.inner.stdout(Stdio::piped());
self.inner.stderr(Stdio::piped());
self.spawn()?.wait_with_output().await
}
async fn status(&mut self) -> io::Result<std::process::ExitStatus> {
self.spawn()?.status().await
}
}
impl UserData for Command {
fn add_methods<M: mlua::UserDataMethods<Self>>(methods: &mut M) {
#[inline]
fn make_stdio(v: Value) -> mlua::Result<Stdio> {
match v {
Value::Integer(n) => {
return Ok(match n as u8 {
PIPED => Stdio::piped(),
INHERIT => Stdio::inherit(),
_ => Stdio::null(),
});
}
Value::UserData(ud) => match ud.type_id() {
Some(t) if t == TypeId::of::<ChildStdin>() => {
return Ok(ud.take::<ChildStdin>()?.try_into()?);
}
Some(t) if t == TypeId::of::<ChildStdout>() => {
return Ok(ud.take::<ChildStdout>()?.try_into()?);
}
Some(t) if t == TypeId::of::<ChildStderr>() => {
return Ok(ud.take::<ChildStderr>()?.try_into()?);
}
_ => {}
},
_ => {}
}
Err(
"must be one of Command.NULL, Command.PIPED, Command.INHERIT, or a ChildStdin, ChildStdout, or ChildStderr".into_lua_err(),
)
}
methods.add_function_mut("arg", |lua, (ud, arg): (AnyUserData, Value)| {
let mut me = ud.borrow_mut::<Self>()?;
match arg {
Value::Nil => return lua.create_sequence_from(me.inner.as_std().get_args())?.into_lua(lua),
Value::String(s) => {
me.inner.arg(OsStr::from_wtf8(&s.as_bytes())?);
}
Value::Table(t) => {
for s in t.sequence_values::<mlua::String>() {
me.inner.arg(OsStr::from_wtf8(&s?.as_bytes())?);
}
}
_ => Err("arg must be a string or table of strings".into_lua_err())?,
}
ud.into_lua(lua)
});
methods.add_function_mut("cwd", |_, (ud, dir): (AnyUserData, mlua::String)| {
ud.borrow_mut::<Self>()?.inner.current_dir(dir.to_str()?.as_ref());
Ok(ud)
});
methods.add_function_mut(
"env",
|_, (ud, key, value): (AnyUserData, mlua::String, mlua::String)| {
ud.borrow_mut::<Self>()?
.inner
.env(OsStr::from_wtf8(&key.as_bytes())?, OsStr::from_wtf8(&value.as_bytes())?);
Ok(ud)
},
);
methods.add_function_mut("stdin", |_, (ud, stdio): (AnyUserData, Value)| {
ud.borrow_mut::<Self>()?.inner.stdin(make_stdio(stdio)?);
Ok(ud)
});
methods.add_function_mut("stdout", |_, (ud, stdio): (AnyUserData, Value)| {
ud.borrow_mut::<Self>()?.inner.stdout(make_stdio(stdio)?);
Ok(ud)
});
methods.add_function_mut("stderr", |_, (ud, stdio): (AnyUserData, Value)| {
ud.borrow_mut::<Self>()?.inner.stderr(make_stdio(stdio)?);
Ok(ud)
});
methods.add_function_mut("memory", |_, (ud, max): (AnyUserData, usize)| {
ud.borrow_mut::<Self>()?.memory = Some(max);
Ok(ud)
});
methods.add_method_mut("spawn", |lua, me, ()| match me.spawn() {
Ok(child) => child.into_lua_multi(lua),
Err(e) => (Value::Nil, Error::Io(e)).into_lua_multi(lua),
});
methods.add_async_method_mut("output", |lua, mut me, ()| async move {
match me.output().await {
Ok(output) => Output::new(output).into_lua_multi(&lua),
Err(e) => (Value::Nil, Error::Io(e)).into_lua_multi(&lua),
}
});
methods.add_async_method_mut("status", |lua, mut me, ()| async move {
match me.status().await {
Ok(status) => Status::new(status).into_lua_multi(&lua),
Err(e) => (Value::Nil, Error::Io(e)).into_lua_multi(&lua),
}
});
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/process/process.rs | yazi-plugin/src/process/process.rs | use mlua::Lua;
pub fn install(lua: &Lua) -> mlua::Result<()> {
super::Command::install(lua)?;
Ok(())
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/process/status.rs | yazi-plugin/src/process/status.rs | use mlua::UserData;
pub struct Status {
inner: std::process::ExitStatus,
}
impl Status {
pub fn new(inner: std::process::ExitStatus) -> Self { Self { inner } }
}
impl UserData for Status {
fn add_fields<F: mlua::UserDataFields<Self>>(fields: &mut F) {
fields.add_field_method_get("success", |_, me| Ok(me.inner.success()));
fields.add_field_method_get("code", |_, me| Ok(me.inner.code()));
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/process/mod.rs | yazi-plugin/src/process/mod.rs | yazi_macro::mod_flat!(child command output process status);
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/process/child.rs | yazi-plugin/src/process/child.rs | use std::{ops::DerefMut, process::ExitStatus, time::Duration};
use futures::future::try_join3;
use mlua::{AnyUserData, ExternalError, IntoLua, IntoLuaMulti, Table, UserData, UserDataMethods, Value};
use tokio::{io::{self, AsyncBufReadExt, AsyncReadExt, AsyncWriteExt, BufReader, BufWriter}, process::{ChildStderr, ChildStdin, ChildStdout}, select};
use yazi_binding::Error;
use super::Status;
use crate::process::Output;
pub struct Child {
inner: tokio::process::Child,
stdin: Option<BufWriter<ChildStdin>>,
stdout: Option<BufReader<ChildStdout>>,
stderr: Option<BufReader<ChildStderr>>,
#[cfg(windows)]
job_handle: Option<std::os::windows::io::RawHandle>,
}
#[cfg(windows)]
impl Drop for Child {
fn drop(&mut self) {
if let Some(h) = self.job_handle.take() {
unsafe { windows_sys::Win32::Foundation::CloseHandle(h) };
}
}
}
impl Child {
pub fn new(
mut inner: tokio::process::Child,
#[cfg(windows)] job_handle: Option<std::os::windows::io::RawHandle>,
) -> Self {
let stdin = inner.stdin.take().map(BufWriter::new);
let stdout = inner.stdout.take().map(BufReader::new);
let stderr = inner.stderr.take().map(BufReader::new);
Self {
inner,
stdin,
stdout,
stderr,
#[cfg(windows)]
job_handle,
}
}
pub(super) async fn wait(&mut self) -> io::Result<ExitStatus> {
drop(self.stdin.take());
self.inner.wait().await
}
pub(super) async fn status(&mut self) -> io::Result<ExitStatus> {
drop(self.stdin.take());
drop(self.stdout.take());
drop(self.stderr.take());
self.inner.wait().await
}
async fn read_line(&mut self) -> (Option<Vec<u8>>, u8) {
async fn read(r: Option<impl AsyncBufReadExt + Unpin>) -> Option<Vec<u8>> {
let mut buf = Vec::new();
match r?.read_until(b'\n', &mut buf).await {
Ok(0) | Err(_) => None,
Ok(_) => Some(buf),
}
}
select! {
r @ Some(_) = read(self.stdout.as_mut()) => (r, 0u8),
r @ Some(_) = read(self.stderr.as_mut()) => (r, 1u8),
else => (None, 2u8),
}
}
pub(super) async fn wait_with_output(mut self) -> io::Result<std::process::Output> {
async fn read(r: &mut Option<impl AsyncBufReadExt + Unpin>) -> io::Result<Vec<u8>> {
let mut vec = Vec::new();
if let Some(r) = r.as_mut() {
r.read_to_end(&mut vec).await?;
}
Ok(vec)
}
// Ensure stdin is closed so the child isn't stuck waiting on input while the
// parent is waiting for it to exit.
drop(self.stdin.take());
// Drop happens after `try_join` due to <https://github.com/tokio-rs/tokio/issues/4309>
let mut stdout = self.stdout.take();
let mut stderr = self.stderr.take();
let result = try_join3(self.inner.wait(), read(&mut stdout), read(&mut stderr)).await?;
Ok(std::process::Output { status: result.0, stdout: result.1, stderr: result.2 })
}
}
impl UserData for Child {
fn add_methods<M: UserDataMethods<Self>>(methods: &mut M) {
methods.add_method("id", |_, me, ()| Ok(me.inner.id()));
methods.add_async_method_mut("read", |_, mut me, len: usize| async move {
async fn read(r: Option<impl AsyncBufReadExt + Unpin>, len: usize) -> Option<Vec<u8>> {
let mut r = r?;
let mut buf = vec![0; len];
match r.read(&mut buf).await {
Ok(0) | Err(_) => return None,
Ok(n) => buf.truncate(n),
}
Some(buf)
}
let me = me.deref_mut();
Ok(select! {
Some(r) = read(me.stdout.as_mut(), len) => (r, 0u8),
Some(r) = read(me.stderr.as_mut(), len) => (r, 1u8),
else => (vec![], 2u8)
})
});
methods.add_async_method_mut("read_line", |lua, mut me, ()| async move {
match me.read_line().await {
(Some(b), event) => (lua.create_string(b)?, event).into_lua_multi(&lua),
(None, event) => (Value::Nil, event).into_lua_multi(&lua),
}
});
// TODO: deprecate this method
methods.add_async_method_mut("read_line_with", |lua, mut me, options: Table| async move {
let timeout = Duration::from_millis(options.raw_get("timeout")?);
let Ok(result) = tokio::time::timeout(timeout, me.read_line()).await else {
return (Value::Nil, 3u8).into_lua_multi(&lua);
};
match result {
(Some(b), event) => (lua.create_string(b)?, event).into_lua_multi(&lua),
(None, event) => (Value::Nil, event).into_lua_multi(&lua),
}
});
methods.add_async_method_mut("write_all", |lua, mut me, src: mlua::String| async move {
let Some(stdin) = &mut me.stdin else {
return Err("stdin is not piped".into_lua_err());
};
match stdin.write_all(&src.as_bytes()).await {
Ok(()) => true.into_lua_multi(&lua),
Err(e) => (false, Error::Io(e)).into_lua_multi(&lua),
}
});
methods.add_async_method_mut("flush", |lua, mut me, ()| async move {
let Some(stdin) = &mut me.stdin else {
return Err("stdin is not piped".into_lua_err());
};
match stdin.flush().await {
Ok(()) => true.into_lua_multi(&lua),
Err(e) => (false, Error::Io(e)).into_lua_multi(&lua),
}
});
methods.add_async_method_mut("wait", |lua, mut me, ()| async move {
match me.wait().await {
Ok(status) => Status::new(status).into_lua_multi(&lua),
Err(e) => (Value::Nil, Error::Io(e)).into_lua_multi(&lua),
}
});
methods.add_async_function("wait_with_output", |lua, ud: AnyUserData| async move {
match ud.take::<Self>()?.wait_with_output().await {
Ok(output) => Output::new(output).into_lua_multi(&lua),
Err(e) => (Value::Nil, Error::Io(e)).into_lua_multi(&lua),
}
});
methods.add_async_method_mut("try_wait", |lua, mut me, ()| async move {
match me.inner.try_wait() {
Ok(Some(status)) => Status::new(status).into_lua_multi(&lua),
Ok(None) => Value::Nil.into_lua_multi(&lua),
Err(e) => (Value::Nil, Error::Io(e)).into_lua_multi(&lua),
}
});
methods.add_method_mut("start_kill", |lua, me, ()| match me.inner.start_kill() {
Ok(_) => true.into_lua_multi(lua),
Err(e) => (false, Error::Io(e)).into_lua_multi(lua),
});
methods.add_method_mut("take_stdin", |lua, me, ()| match me.stdin.take() {
Some(stdin) => lua.create_any_userdata(stdin.into_inner())?.into_lua(lua),
None => Ok(Value::Nil),
});
methods.add_method_mut("take_stdout", |lua, me, ()| match me.stdout.take() {
Some(stdout) => lua.create_any_userdata(stdout.into_inner())?.into_lua(lua),
None => Ok(Value::Nil),
});
methods.add_method_mut("take_stderr", |lua, me, ()| match me.stderr.take() {
Some(stderr) => lua.create_any_userdata(stderr.into_inner())?.into_lua(lua),
None => Ok(Value::Nil),
});
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/process/output.rs | yazi-plugin/src/process/output.rs | use mlua::{UserData, Value};
use yazi_binding::cached_field;
use super::Status;
pub struct Output {
inner: std::process::Output,
v_status: Option<Value>,
v_stdout: Option<Value>,
v_stderr: Option<Value>,
}
impl Output {
pub fn new(inner: std::process::Output) -> Self {
Self { inner, v_status: None, v_stdout: None, v_stderr: None }
}
}
impl UserData for Output {
fn add_fields<F: mlua::UserDataFields<Self>>(fields: &mut F) {
cached_field!(fields, status, |_, me| Ok(Status::new(me.inner.status)));
cached_field!(fields, stdout, |lua, me| lua.create_string(&me.inner.stdout));
cached_field!(fields, stderr, |lua, me| lua.create_string(&me.inner.stderr));
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/loader/mod.rs | yazi-plugin/src/loader/mod.rs | yazi_macro::mod_flat!(chunk loader require);
pub(super) fn init() { LOADER.with(<_>::default); }
pub(super) fn install(lua: &mlua::Lua) -> mlua::Result<()> { Require::install(lua) }
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/loader/require.rs | yazi-plugin/src/loader/require.rs | use std::{borrow::Cow, sync::Arc};
use mlua::{ExternalResult, Function, IntoLua, Lua, MetaMethod, MultiValue, ObjectLike, Table, Value};
use yazi_binding::{runtime, runtime_mut};
use super::LOADER;
pub(super) struct Require;
impl Require {
pub(super) fn install(lua: &Lua) -> mlua::Result<()> {
lua.globals().raw_set(
"require",
lua.create_async_function(|lua, id: mlua::String| async move {
let id = id.to_str()?;
let id = Self::absolute_id(&lua, &id)?;
LOADER.ensure(&id, |_| ()).await.into_lua_err()?;
runtime_mut!(lua)?.push(&id);
let mod_ = LOADER.load(&lua, &id);
runtime_mut!(lua)?.pop();
Self::create_mt(&lua, id.into_owned(), mod_?)
})?,
)
}
fn create_mt(lua: &Lua, id: String, r#mod: Table) -> mlua::Result<Table> {
let id: Arc<str> = Arc::from(id);
let mt = lua.create_table_from([
(
MetaMethod::Index.name(),
lua.create_function(move |lua, (ts, key): (Table, mlua::String)| {
match ts.raw_get::<Table>("__mod")?.raw_get::<Value>(&key)? {
Value::Function(_) => {
Self::create_wrapper(lua, id.clone(), &key.to_str()?)?.into_lua(lua)
}
v => Ok(v),
}
})?,
),
(
MetaMethod::NewIndex.name(),
lua.create_function(move |_, (ts, key, value): (Table, mlua::String, Value)| {
ts.raw_get::<Table>("__mod")?.raw_set(key, value)
})?,
),
])?;
let ts = lua.create_table_from([("__mod", r#mod)])?;
ts.set_metatable(Some(mt))?;
Ok(ts)
}
fn create_wrapper(lua: &Lua, id: Arc<str>, f: &str) -> mlua::Result<Function> {
let f: Arc<str> = Arc::from(f);
lua.create_async_function(move |lua, args: MultiValue| {
let (id, f) = (id.clone(), f.clone());
async move {
let (r#mod, args) = Self::split_mod_and_args(&lua, &id, args)?;
runtime_mut!(lua)?.push(&id);
let result = r#mod.call_async_function::<MultiValue>(&f, args).await;
runtime_mut!(lua)?.pop();
result
}
})
}
fn split_mod_and_args(
lua: &Lua,
id: &str,
mut args: MultiValue,
) -> mlua::Result<(Table, MultiValue)> {
let Some(front) = args.pop_front() else {
return Ok((LOADER.try_load(lua, id)?, args));
};
let Value::Table(tbl) = front else {
args.push_front(front);
return Ok((LOADER.try_load(lua, id)?, args));
};
Ok(if let Ok(r#mod) = tbl.raw_get::<Table>("__mod") {
args.push_front(Value::Table(r#mod.clone()));
(r#mod, args)
} else {
args.push_front(Value::Table(tbl));
(LOADER.try_load(lua, id)?, args)
})
}
fn absolute_id<'a>(lua: &Lua, id: &'a str) -> mlua::Result<Cow<'a, str>> {
let Some(stripped) = id.strip_prefix('.') else { return Ok(id.into()) };
Ok(if let Some(cur) = runtime!(lua)?.current() {
format!("{}.{stripped}", cur.split('.').next().unwrap_or(cur)).into()
} else {
stripped.into()
})
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/loader/loader.rs | yazi-plugin/src/loader/loader.rs | use std::{borrow::Cow, ops::Deref};
use anyhow::{Context, Result, bail, ensure};
use hashbrown::HashMap;
use mlua::{ChunkMode, ExternalError, Lua, Table};
use parking_lot::RwLock;
use yazi_boot::BOOT;
use yazi_fs::provider::local::Local;
use yazi_macro::plugin_preset as preset;
use yazi_shared::{BytesExt, LOG_LEVEL, RoCell};
use super::Chunk;
pub static LOADER: RoCell<Loader> = RoCell::new();
pub struct Loader {
cache: RwLock<HashMap<String, Chunk>>,
}
impl Deref for Loader {
type Target = RwLock<HashMap<String, Chunk>>;
fn deref(&self) -> &Self::Target { &self.cache }
}
impl Default for Loader {
fn default() -> Self {
let cache = HashMap::from_iter([
("archive".to_owned(), preset!("plugins/archive").into()),
("code".to_owned(), preset!("plugins/code").into()),
("dds".to_owned(), preset!("plugins/dds").into()),
("empty".to_owned(), preset!("plugins/empty").into()),
("extract".to_owned(), preset!("plugins/extract").into()),
("file".to_owned(), preset!("plugins/file").into()),
("folder".to_owned(), preset!("plugins/folder").into()),
("font".to_owned(), preset!("plugins/font").into()),
("fzf".to_owned(), preset!("plugins/fzf").into()),
("image".to_owned(), preset!("plugins/image").into()),
("json".to_owned(), preset!("plugins/json").into()),
("magick".to_owned(), preset!("plugins/magick").into()),
("mime".to_owned(), preset!("plugins/mime").into()), // TODO: remove this
("mime.dir".to_owned(), preset!("plugins/mime-dir").into()),
("mime.local".to_owned(), preset!("plugins/mime-local").into()),
("mime.remote".to_owned(), preset!("plugins/mime-remote").into()),
("noop".to_owned(), preset!("plugins/noop").into()),
("null".to_owned(), preset!("plugins/null").into()),
("pdf".to_owned(), preset!("plugins/pdf").into()),
("session".to_owned(), preset!("plugins/session").into()),
("svg".to_owned(), preset!("plugins/svg").into()),
("vfs".to_owned(), preset!("plugins/vfs").into()),
("video".to_owned(), preset!("plugins/video").into()),
("zoxide".to_owned(), preset!("plugins/zoxide").into()),
]);
Self { cache: RwLock::new(cache) }
}
}
impl Loader {
pub async fn ensure<F, T>(&self, id: &str, f: F) -> Result<T>
where
F: FnOnce(&Chunk) -> T,
{
let (id, plugin, entry) = Self::normalize_id(id)?;
if let Some(c) = self.cache.read().get(id) {
return Self::compatible_or_error(id, c).map(|_| f(c));
}
let p = BOOT.plugin_dir.join(format!("{plugin}.yazi/{entry}.lua"));
let chunk = Local::regular(&p)
.read()
.await
.with_context(|| format!("Failed to load plugin from {p:?}"))?
.into();
let result = Self::compatible_or_error(id, &chunk);
let inspect = f(&chunk);
self.cache.write().insert(id.to_owned(), chunk);
result.map(|_| inspect)
}
pub fn load(&self, lua: &Lua, id: &str) -> mlua::Result<Table> {
let (id, ..) = Self::normalize_id(id)?;
let loaded: Table = lua.globals().raw_get::<Table>("package")?.raw_get("loaded")?;
if let Ok(t) = loaded.raw_get(id) {
return Ok(t);
}
let t = self.load_once(lua, id)?;
t.raw_set("_id", lua.create_string(id)?)?;
loaded.raw_set(id, t.clone())?;
Ok(t)
}
pub fn load_once(&self, lua: &Lua, id: &str) -> mlua::Result<Table> {
let (id, ..) = Self::normalize_id(id)?;
let mut mode = ChunkMode::Text;
let f = match self.cache.read().get(id) {
Some(c) => {
mode = c.mode;
lua.load(c).set_name(id).into_function()
}
None => Err(format!("Plugin `{id}` not found").into_lua_err()),
}?;
if mode != ChunkMode::Binary {
let b = f.dump(LOG_LEVEL.get().is_none());
if let Some(c) = self.cache.write().get_mut(id) {
c.mode = ChunkMode::Binary;
c.bytes = Cow::Owned(b);
}
}
f.call(())
}
pub fn try_load(&self, lua: &Lua, id: &str) -> mlua::Result<Table> {
let (id, ..) = Self::normalize_id(id)?;
lua.globals().raw_get::<Table>("package")?.raw_get::<Table>("loaded")?.raw_get(id)
}
pub fn load_with(&self, lua: &Lua, id: &str, chunk: &Chunk) -> mlua::Result<Table> {
let (id, ..) = Self::normalize_id(id)?;
let loaded: Table = lua.globals().raw_get::<Table>("package")?.raw_get("loaded")?;
if let Ok(t) = loaded.raw_get(id) {
return Ok(t);
}
let t: Table = lua.load(chunk).set_name(id).call(())?;
t.raw_set("_id", lua.create_string(id)?)?;
loaded.raw_set(id, t.clone())?;
Ok(t)
}
pub fn compatible_or_error(id: &str, chunk: &Chunk) -> Result<()> {
if chunk.compatible() {
return Ok(());
}
bail!(
"Plugin `{id}` requires at least Yazi {}, but your current version is Yazi {}.",
chunk.since,
yazi_boot::actions::Actions::version()
);
}
pub fn normalize_id(id: &str) -> anyhow::Result<(&str, &str, &str)> {
let id = id.trim_end_matches(".main");
let (plugin, entry) = if let Some((a, b)) = id.split_once(".") { (a, b) } else { (id, "main") };
ensure!(plugin.as_bytes().kebab_cased(), "Plugin name `{plugin}` must be in kebab-case");
ensure!(entry.as_bytes().kebab_cased(), "Entry name `{entry}` must be in kebab-case");
Ok((id, plugin, entry))
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/loader/chunk.rs | yazi-plugin/src/loader/chunk.rs | use std::borrow::Cow;
use mlua::{AsChunk, ChunkMode};
use yazi_shared::natsort;
pub struct Chunk {
pub mode: ChunkMode,
pub bytes: Cow<'static, [u8]>,
pub since: String,
pub sync_peek: bool,
pub sync_entry: bool,
}
impl Chunk {
#[inline]
pub fn compatible(&self) -> bool {
let s = yazi_boot::actions::Actions::version();
natsort(s.as_bytes(), self.since.as_bytes(), false) != std::cmp::Ordering::Less
}
fn analyze(&mut self) {
for line in self.bytes.split(|&b| b == b'\n') {
if line.trim_ascii().is_empty() {
continue;
};
let Some(rest) = line.strip_prefix(b"---") else { break };
let rest = rest.trim_ascii();
let Some(i) = rest.iter().position(|&b| b == b' ' || b == b'\t') else { break };
match (rest[..i].trim_ascii(), rest[i..].trim_ascii()) {
(b"@sync", b"peek") => self.sync_peek = true,
(b"@sync", b"entry") => self.sync_entry = true,
(b"@since", b"") => continue,
(b"@since", b) => self.since = String::from_utf8_lossy(b).to_string(),
(_, []) => break,
(b, _) if b.strip_prefix(b"@").unwrap_or(b"").is_empty() => break,
_ => continue,
}
}
}
}
impl From<Cow<'static, [u8]>> for Chunk {
fn from(b: Cow<'static, [u8]>) -> Self {
let mut chunk = Self {
mode: ChunkMode::Text,
bytes: b,
since: String::new(),
sync_entry: false,
sync_peek: false,
};
chunk.analyze();
chunk
}
}
impl From<&'static [u8]> for Chunk {
fn from(b: &'static [u8]) -> Self { Self::from(Cow::Borrowed(b)) }
}
impl From<Vec<u8>> for Chunk {
fn from(b: Vec<u8>) -> Self { Self::from(Cow::Owned(b)) }
}
impl AsChunk for &Chunk {
fn mode(&self) -> Option<ChunkMode> { Some(self.mode) }
fn source<'a>(&self) -> std::io::Result<Cow<'a, [u8]>>
where
Self: 'a,
{
Ok(Cow::Borrowed(&self.bytes))
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/runtime/runtime.rs | yazi-plugin/src/runtime/runtime.rs | use mlua::{IntoLua, Lua, LuaSerdeExt, Value};
use yazi_binding::{Composer, ComposerGet, ComposerSet, SER_OPT, Url};
use yazi_boot::ARGS;
use yazi_config::YAZI;
pub fn compose() -> Composer<ComposerGet, ComposerSet> {
fn get(lua: &Lua, key: &[u8]) -> mlua::Result<Value> {
match key {
b"args" => args().into_lua(lua)?,
b"term" => super::term().into_lua(lua)?,
b"mgr" => mgr().into_lua(lua)?,
b"plugin" => super::plugin().into_lua(lua)?,
b"preview" => preview().into_lua(lua)?,
b"tasks" => tasks().into_lua(lua)?,
_ => return Ok(Value::Nil),
}
.into_lua(lua)
}
fn set(_: &Lua, _: &[u8], value: Value) -> mlua::Result<Value> { Ok(value) }
Composer::new(get, set)
}
fn args() -> Composer<ComposerGet, ComposerSet> {
fn get(lua: &Lua, key: &[u8]) -> mlua::Result<Value> {
match key {
b"entries" => lua.create_sequence_from(ARGS.entries.iter().map(Url::new))?.into_lua(lua),
b"cwd_file" => ARGS.cwd_file.as_ref().map(Url::new).into_lua(lua),
b"chooser_file" => ARGS.chooser_file.as_ref().map(Url::new).into_lua(lua),
_ => Ok(Value::Nil),
}
}
fn set(_: &Lua, _: &[u8], value: Value) -> mlua::Result<Value> { Ok(value) }
Composer::new(get, set)
}
fn mgr() -> Composer<ComposerGet, ComposerSet> {
fn get(lua: &Lua, key: &[u8]) -> mlua::Result<Value> {
let m = &YAZI.mgr;
match key {
b"ratio" => lua.to_value_with(&m.ratio, SER_OPT)?,
b"sort_by" => lua.to_value_with(&m.sort_by, SER_OPT)?,
b"sort_sensitive" => lua.to_value_with(&m.sort_sensitive, SER_OPT)?,
b"sort_reverse" => lua.to_value_with(&m.sort_reverse, SER_OPT)?,
b"sort_dir_first" => lua.to_value_with(&m.sort_dir_first, SER_OPT)?,
b"sort_translit" => lua.to_value_with(&m.sort_translit, SER_OPT)?,
b"linemode" => lua.to_value_with(&m.linemode, SER_OPT)?,
b"show_hidden" => lua.to_value_with(&m.show_hidden, SER_OPT)?,
b"show_symlink" => lua.to_value_with(&m.show_symlink, SER_OPT)?,
b"scrolloff" => lua.to_value_with(&m.scrolloff, SER_OPT)?,
b"mouse_events" => lua.to_value_with(&m.mouse_events, SER_OPT)?,
b"title_format" => lua.to_value_with(&m.title_format, SER_OPT)?,
_ => return Ok(Value::Nil),
}
.into_lua(lua)
}
fn set(lua: &Lua, key: &[u8], value: Value) -> mlua::Result<Value> {
let m = &YAZI.mgr;
Ok(match key {
b"ratio" => {
m.ratio.set(lua.from_value(value)?);
Value::Nil
}
_ => value,
})
}
Composer::new(get, set)
}
fn preview() -> Composer<ComposerGet, ComposerSet> {
fn get(lua: &Lua, key: &[u8]) -> mlua::Result<Value> {
let p = &YAZI.preview;
match key {
b"wrap" => lua.to_value_with(&p.wrap, SER_OPT)?,
b"tab_size" => lua.to_value_with(&p.tab_size, SER_OPT)?,
b"max_width" => lua.to_value_with(&p.max_width, SER_OPT)?,
b"max_height" => lua.to_value_with(&p.max_height, SER_OPT)?,
b"cache_dir" => lua.to_value_with(&p.cache_dir, SER_OPT)?,
b"image_delay" => lua.to_value_with(&p.image_delay, SER_OPT)?,
b"image_filter" => lua.to_value_with(&p.image_filter, SER_OPT)?,
b"image_quality" => lua.to_value_with(&p.image_quality, SER_OPT)?,
b"ueberzug_scale" => lua.to_value_with(&p.ueberzug_scale, SER_OPT)?,
b"ueberzug_offset" => lua.to_value_with(&p.ueberzug_offset, SER_OPT)?,
_ => return Ok(Value::Nil),
}
.into_lua(lua)
}
fn set(_: &Lua, _: &[u8], value: Value) -> mlua::Result<Value> { Ok(value) }
Composer::new(get, set)
}
fn tasks() -> Composer<ComposerGet, ComposerSet> {
fn get(lua: &Lua, key: &[u8]) -> mlua::Result<Value> {
let t = &YAZI.tasks;
match key {
b"micro_workers" => lua.to_value_with(&t.micro_workers, SER_OPT)?,
b"macro_workers" => lua.to_value_with(&t.macro_workers, SER_OPT)?,
b"bizarre_retry" => lua.to_value_with(&t.bizarre_retry, SER_OPT)?,
b"image_alloc" => lua.to_value_with(&t.image_alloc, SER_OPT)?,
b"image_bound" => lua.to_value_with(&t.image_bound, SER_OPT)?,
b"suppress_preload" => lua.to_value_with(&t.suppress_preload, SER_OPT)?,
_ => return Ok(Value::Nil),
}
.into_lua(lua)
}
fn set(_: &Lua, _: &[u8], value: Value) -> mlua::Result<Value> { Ok(value) }
Composer::new(get, set)
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/runtime/mod.rs | yazi-plugin/src/runtime/mod.rs | yazi_macro::mod_flat!(plugin runtime term);
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/runtime/term.rs | yazi-plugin/src/runtime/term.rs | use mlua::{Function, IntoLua, IntoLuaMulti, Lua, Value};
use yazi_adapter::{Dimension, EMULATOR};
use yazi_binding::{Composer, ComposerGet, ComposerSet};
pub(super) fn term() -> Composer<ComposerGet, ComposerSet> {
fn get(lua: &Lua, key: &[u8]) -> mlua::Result<Value> {
match key {
b"light" => EMULATOR.light.into_lua(lua),
b"cell_size" => cell_size(lua)?.into_lua(lua),
_ => Ok(Value::Nil),
}
}
fn set(_: &Lua, _: &[u8], value: Value) -> mlua::Result<Value> { Ok(value) }
Composer::new(get, set)
}
fn cell_size(lua: &Lua) -> mlua::Result<Function> {
lua.create_function(|lua, ()| {
if let Some(s) = Dimension::cell_size() {
s.into_lua_multi(lua)
} else {
().into_lua_multi(lua)
}
})
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/runtime/plugin.rs | yazi-plugin/src/runtime/plugin.rs | use mlua::{Function, IntoLua, Lua, UserData, Value};
use yazi_binding::{Composer, ComposerGet, ComposerSet, FileRef, cached_field};
use yazi_config::YAZI;
pub(super) fn plugin() -> Composer<ComposerGet, ComposerSet> {
fn get(lua: &Lua, key: &[u8]) -> mlua::Result<Value> {
match key {
b"fetchers" => fetchers(lua)?,
b"spotter" => spotter(lua)?,
b"preloaders" => preloaders(lua)?,
b"previewer" => previewer(lua)?,
_ => return Ok(Value::Nil),
}
.into_lua(lua)
}
fn set(_: &Lua, _: &[u8], value: Value) -> mlua::Result<Value> { Ok(value) }
Composer::new(get, set)
}
fn fetchers(lua: &Lua) -> mlua::Result<Function> {
lua.create_function(|lua, (file, mime): (FileRef, mlua::String)| {
lua.create_sequence_from(YAZI.plugin.fetchers(&file, &mime.to_str()?).map(Fetcher::new))
})
}
fn spotter(lua: &Lua) -> mlua::Result<Function> {
lua.create_function(|_, (file, mime): (FileRef, mlua::String)| {
Ok(YAZI.plugin.spotter(&file, &mime.to_str()?).map(Spotter::new))
})
}
fn preloaders(lua: &Lua) -> mlua::Result<Function> {
lua.create_function(|lua, (file, mime): (FileRef, mlua::String)| {
lua.create_sequence_from(YAZI.plugin.preloaders(&file, &mime.to_str()?).map(Preloader::new))
})
}
fn previewer(lua: &Lua) -> mlua::Result<Function> {
lua.create_function(|_, (file, mime): (FileRef, mlua::String)| {
Ok(YAZI.plugin.previewer(&file, &mime.to_str()?).map(Previewer::new))
})
}
// --- Fetcher
struct Fetcher {
inner: &'static yazi_config::plugin::Fetcher,
v_cmd: Option<Value>,
}
impl Fetcher {
pub fn new(inner: &'static yazi_config::plugin::Fetcher) -> Self { Self { inner, v_cmd: None } }
}
impl UserData for Fetcher {
fn add_fields<F: mlua::UserDataFields<Self>>(fields: &mut F) {
cached_field!(fields, cmd, |lua, me| lua.create_string(&*me.inner.run.name));
}
}
// --- Spotter
struct Spotter {
inner: &'static yazi_config::plugin::Spotter,
v_cmd: Option<Value>,
}
impl Spotter {
pub fn new(inner: &'static yazi_config::plugin::Spotter) -> Self { Self { inner, v_cmd: None } }
}
impl UserData for Spotter {
fn add_fields<F: mlua::UserDataFields<Self>>(fields: &mut F) {
cached_field!(fields, cmd, |lua, me| lua.create_string(&*me.inner.run.name));
}
}
// --- Preloader
struct Preloader {
inner: &'static yazi_config::plugin::Preloader,
v_cmd: Option<Value>,
}
impl Preloader {
pub fn new(inner: &'static yazi_config::plugin::Preloader) -> Self { Self { inner, v_cmd: None } }
}
impl UserData for Preloader {
fn add_fields<F: mlua::UserDataFields<Self>>(fields: &mut F) {
cached_field!(fields, cmd, |lua, me| lua.create_string(&*me.inner.run.name));
}
}
// --- Previewer
struct Previewer {
inner: &'static yazi_config::plugin::Previewer,
v_cmd: Option<Value>,
}
impl Previewer {
pub fn new(inner: &'static yazi_config::plugin::Previewer) -> Self { Self { inner, v_cmd: None } }
}
impl UserData for Previewer {
fn add_fields<F: mlua::UserDataFields<Self>>(fields: &mut F) {
cached_field!(fields, cmd, |lua, me| lua.create_string(&*me.inner.run.name));
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/external/fd.rs | yazi-plugin/src/external/fd.rs | use std::process::Stdio;
use anyhow::Result;
use tokio::{io::{AsyncBufReadExt, BufReader}, process::{Child, Command}, sync::mpsc::{self, UnboundedReceiver}};
use yazi_fs::{File, FsUrl};
use yazi_shared::url::{AsUrl, UrlBuf, UrlLike};
use yazi_vfs::VfsFile;
pub struct FdOpt {
pub cwd: UrlBuf,
pub hidden: bool,
pub subject: String,
pub args: Vec<String>,
}
pub fn fd(opt: FdOpt) -> Result<UnboundedReceiver<File>> {
let mut child = spawn("fd", &opt).or_else(|_| spawn("fdfind", &opt))?;
let mut it = BufReader::new(child.stdout.take().unwrap()).lines();
let (tx, rx) = mpsc::unbounded_channel();
tokio::spawn(async move {
while let Ok(Some(line)) = it.next_line().await {
let Ok(url) = opt.cwd.try_join(line) else {
continue;
};
if let Ok(file) = File::new(url).await {
tx.send(file).ok();
}
}
child.wait().await.ok();
});
Ok(rx)
}
fn spawn(program: &str, opt: &FdOpt) -> std::io::Result<Child> {
Command::new(program)
.arg("--base-directory")
.arg(&*opt.cwd.as_url().unified_path())
.arg("--regex")
.arg(if opt.hidden { "--hidden" } else { "--no-hidden" })
.args(&opt.args)
.arg(&opt.subject)
.kill_on_drop(true)
.stdout(Stdio::piped())
.stderr(Stdio::null())
.spawn()
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/external/rg.rs | yazi-plugin/src/external/rg.rs | use std::process::Stdio;
use anyhow::Result;
use tokio::{io::{AsyncBufReadExt, BufReader}, process::Command, sync::mpsc::{self, UnboundedReceiver}};
use yazi_fs::{File, FsUrl};
use yazi_shared::url::{AsUrl, UrlBuf, UrlLike};
use yazi_vfs::VfsFile;
pub struct RgOpt {
pub cwd: UrlBuf,
pub hidden: bool,
pub subject: String,
pub args: Vec<String>,
}
pub fn rg(opt: RgOpt) -> Result<UnboundedReceiver<File>> {
let mut child = Command::new("rg")
.args(["--color=never", "--files-with-matches", "--smart-case"])
.arg(if opt.hidden { "--hidden" } else { "--no-hidden" })
.args(opt.args)
.arg(opt.subject)
.arg(&*opt.cwd.as_url().unified_path())
.kill_on_drop(true)
.stdout(Stdio::piped())
.stderr(Stdio::null())
.spawn()?;
let mut it = BufReader::new(child.stdout.take().unwrap()).lines();
let (tx, rx) = mpsc::unbounded_channel();
tokio::spawn(async move {
while let Ok(Some(line)) = it.next_line().await {
let Ok(url) = opt.cwd.try_join(line) else {
continue;
};
if let Ok(file) = File::new(url).await {
tx.send(file).ok();
}
}
child.wait().await.ok();
});
Ok(rx)
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/external/highlighter.rs | yazi-plugin/src/external/highlighter.rs | use std::{borrow::Cow, io::Cursor, mem, path::{Path, PathBuf}, sync::OnceLock};
use anyhow::{Result, anyhow};
use ratatui::{layout::Size, text::{Line, Span, Text}};
use syntect::{LoadingError, dumps, easy::HighlightLines, highlighting::{self, Theme, ThemeSet}, parsing::{SyntaxReference, SyntaxSet}};
use tokio::io::{AsyncBufReadExt, AsyncSeekExt, BufReader};
use yazi_config::{THEME, YAZI, preview::PreviewWrap};
use yazi_fs::provider::{Provider, local::Local};
use yazi_shared::{Ids, errors::PeekError, push_printable_char};
static INCR: Ids = Ids::new();
static SYNTECT: OnceLock<(Theme, SyntaxSet)> = OnceLock::new();
pub struct Highlighter {
path: PathBuf,
}
impl Highlighter {
#[inline]
pub fn new<P>(path: P) -> Self
where
P: Into<PathBuf>,
{
Self { path: path.into() }
}
pub fn init() -> (&'static Theme, &'static SyntaxSet) {
let f = || {
let theme = std::fs::File::open(&THEME.mgr.syntect_theme)
.map_err(LoadingError::Io)
.and_then(|f| ThemeSet::load_from_reader(&mut std::io::BufReader::new(f)))
.or_else(|_| ThemeSet::load_from_reader(&mut Cursor::new(yazi_prebuilt::ansi_theme())));
let syntaxes = dumps::from_uncompressed_data(yazi_prebuilt::syntaxes());
(theme.unwrap(), syntaxes.unwrap())
};
let (theme, syntaxes) = SYNTECT.get_or_init(f);
(theme, syntaxes)
}
#[inline]
pub fn abort() { INCR.next(); }
pub async fn highlight(&self, skip: usize, size: Size) -> Result<Text<'static>, PeekError> {
let mut reader = BufReader::new(Local::regular(&self.path).open().await?);
let syntax = Self::find_syntax(&self.path, &mut reader).await;
let mut plain = syntax.is_err();
let mut before = Vec::with_capacity(if plain { 0 } else { skip });
let mut after = Vec::with_capacity(size.height as _);
let mut i = 0;
let mut buf = vec![];
let mut inspected = 0u16;
while reader.read_until(b'\n', &mut buf).await.is_ok_and(|n| n > 0) {
if Self::is_binary(&buf, &mut inspected) {
return Err("Binary file".into());
}
if !plain && (buf.len() > 5000 || Self::contains_control_chars(&buf)) {
plain = true;
drop(mem::take(&mut before));
}
if buf.ends_with(b"\r\n") {
buf.pop();
buf.pop();
buf.push(b'\n');
}
i += if i >= skip {
buf.iter_mut().for_each(Self::carriage_return_to_line_feed);
after.push(String::from_utf8_lossy(&buf).into_owned());
Self::line_height(&after[after.len() - 1], size.width)
} else if !plain {
before.push(String::from_utf8_lossy(&buf).into_owned());
Self::line_height(&before[before.len() - 1], size.width)
} else if YAZI.preview.wrap == PreviewWrap::Yes {
Self::line_height(&String::from_utf8_lossy(&buf), size.width)
} else {
1
};
buf.clear();
if i > skip + size.height as usize {
break;
}
}
if skip > 0 && i < skip + size.height as usize {
return Err(PeekError::Exceed(i.saturating_sub(size.height as _)));
}
Ok(if plain {
Text::from(Self::merge_highlight_lines(&after, YAZI.preview.tab_size))
} else {
Self::highlight_with(before, after, syntax.unwrap()).await?
})
}
async fn highlight_with(
before: Vec<String>,
after: Vec<String>,
syntax: &'static SyntaxReference,
) -> Result<Text<'static>, PeekError> {
let ticket = INCR.current();
tokio::task::spawn_blocking(move || {
let (theme, syntaxes) = Self::init();
let mut h = HighlightLines::new(syntax, theme);
for line in before {
if ticket != INCR.current() {
return Err("Highlighting cancelled".into());
}
h.highlight_line(&line, syntaxes).map_err(|e| anyhow!(e))?;
}
let indent = YAZI.preview.indent();
let mut lines = Vec::with_capacity(after.len());
for line in after {
if ticket != INCR.current() {
return Err("Highlighting cancelled".into());
}
let regions = h.highlight_line(&line, syntaxes).map_err(|e| anyhow!(e))?;
lines.push(Self::to_line_widget(regions, &indent));
}
Ok(Text::from(lines))
})
.await?
}
async fn find_syntax(
path: &Path,
reader: &mut BufReader<tokio::fs::File>,
) -> Result<&'static SyntaxReference> {
let (_, syntaxes) = Self::init();
let name = path.file_name().map(|n| n.to_string_lossy()).unwrap_or_default();
if let Some(s) = syntaxes.find_syntax_by_extension(&name) {
return Ok(s);
}
let ext = path.extension().map(|e| e.to_string_lossy()).unwrap_or_default();
if let Some(s) = syntaxes.find_syntax_by_extension(&ext) {
return Ok(s);
}
let mut line = String::new();
reader.read_line(&mut line).await?;
reader.rewind().await?;
syntaxes.find_syntax_by_first_line(&line).ok_or_else(|| anyhow!("No syntax found"))
}
#[inline(always)]
fn is_binary(buf: &[u8], inspected: &mut u16) -> bool {
if let Some(n) = 1024u16.checked_sub(*inspected) {
*inspected += n.min(buf.len() as u16);
buf.iter().take(n as usize).any(|&b| b == 0)
} else {
false
}
}
fn line_height(s: &str, width: u16) -> usize {
if YAZI.preview.wrap != PreviewWrap::Yes {
return 1;
}
let pad = YAZI
.preview
.tab_size
.checked_sub(1)
.map(|n| s.bytes().filter(|&b| b == b'\t').count() * n as usize)
.map(|n| yazi_config::preview::Preview::indent_with(n))
.unwrap_or_default();
let line = Line {
spans: vec![Span { content: pad, style: Default::default() }, Span {
content: Cow::Borrowed(s),
..Default::default()
}],
..Default::default()
};
ratatui::widgets::Paragraph::new(line)
.wrap(ratatui::widgets::Wrap { trim: false })
.line_count(width)
}
#[inline(always)]
fn contains_control_chars(buf: &[u8]) -> bool {
buf.iter().any(|&b| b.is_ascii_control() && !matches!(b, b'\t' | b'\n' | b'\r'))
}
#[inline(always)]
fn carriage_return_to_line_feed(b: &mut u8) {
if *b == b'\r' {
*b = b'\n';
}
}
fn merge_highlight_lines(s: &[String], tab_size: u8) -> String {
let mut buf = Vec::new();
buf.reserve_exact(s.iter().map(|s| s.len()).sum::<usize>() | 15);
for &b in s.iter().flat_map(|s| s.as_bytes()) {
push_printable_char(&mut buf, b, true, tab_size, false);
}
unsafe { String::from_utf8_unchecked(buf) }
}
}
impl Highlighter {
pub fn to_line_widget(regions: Vec<(highlighting::Style, &str)>, indent: &str) -> Line<'static> {
let spans: Vec<_> = regions
.into_iter()
.map(|(style, s)| {
let mut modifier = ratatui::style::Modifier::empty();
if style.font_style.contains(highlighting::FontStyle::BOLD) {
modifier |= ratatui::style::Modifier::BOLD;
}
if style.font_style.contains(highlighting::FontStyle::ITALIC) {
modifier |= ratatui::style::Modifier::ITALIC;
}
if style.font_style.contains(highlighting::FontStyle::UNDERLINE) {
modifier |= ratatui::style::Modifier::UNDERLINED;
}
Span {
content: s.replace('\t', indent).into(),
style: ratatui::style::Style {
fg: Self::to_ansi_color(style.foreground),
// bg: Self::to_ansi_color(style.background),
add_modifier: modifier,
..Default::default()
},
}
})
.collect();
Line::from(spans)
}
// Copy from https://github.com/sharkdp/bat/blob/master/src/terminal.rs
fn to_ansi_color(color: highlighting::Color) -> Option<ratatui::style::Color> {
if color.a == 0 {
// Themes can specify one of the user-configurable terminal colors by
// encoding them as #RRGGBBAA with AA set to 00 (transparent) and RR set
// to the 8-bit color palette number. The built-in themes ansi, base16,
// and base16-256 use this.
Some(match color.r {
// For the first 8 colors, use the Color enum to produce ANSI escape
// sequences using codes 30-37 (foreground) and 40-47 (background).
// For example, red foreground is \x1b[31m. This works on terminals
// without 256-color support.
0x00 => ratatui::style::Color::Black,
0x01 => ratatui::style::Color::Red,
0x02 => ratatui::style::Color::Green,
0x03 => ratatui::style::Color::Yellow,
0x04 => ratatui::style::Color::Blue,
0x05 => ratatui::style::Color::Magenta,
0x06 => ratatui::style::Color::Cyan,
0x07 => ratatui::style::Color::White,
// For all other colors, use Fixed to produce escape sequences using
// codes 38;5 (foreground) and 48;5 (background). For example,
// bright red foreground is \x1b[38;5;9m. This only works on
// terminals with 256-color support.
//
// TODO: When ansi_term adds support for bright variants using codes
// 90-97 (foreground) and 100-107 (background), we should use those
// for values 0x08 to 0x0f and only use Fixed for 0x10 to 0xff.
n => ratatui::style::Color::Indexed(n),
})
} else if color.a == 1 {
// Themes can specify the terminal's default foreground/background color
// (i.e. no escape sequence) using the encoding #RRGGBBAA with AA set to
// 01. The built-in theme ansi uses this.
None
} else {
Some(ratatui::style::Color::Rgb(color.r, color.g, color.b))
}
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/external/mod.rs | yazi-plugin/src/external/mod.rs | yazi_macro::mod_flat!(fd highlighter rg rga);
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/external/rga.rs | yazi-plugin/src/external/rga.rs | use std::process::Stdio;
use anyhow::Result;
use tokio::{io::{AsyncBufReadExt, BufReader}, process::Command, sync::mpsc::{self, UnboundedReceiver}};
use yazi_fs::{File, FsUrl};
use yazi_shared::url::{AsUrl, UrlBuf, UrlLike};
use yazi_vfs::VfsFile;
pub struct RgaOpt {
pub cwd: UrlBuf,
pub hidden: bool,
pub subject: String,
pub args: Vec<String>,
}
pub fn rga(opt: RgaOpt) -> Result<UnboundedReceiver<File>> {
let mut child = Command::new("rga")
.args(["--color=never", "--files-with-matches", "--smart-case"])
.arg(if opt.hidden { "--hidden" } else { "--no-hidden" })
.args(opt.args)
.arg(opt.subject)
.arg(&*opt.cwd.as_url().unified_path())
.kill_on_drop(true)
.stdout(Stdio::piped())
.stderr(Stdio::null())
.spawn()?;
let mut it = BufReader::new(child.stdout.take().unwrap()).lines();
let (tx, rx) = mpsc::unbounded_channel();
tokio::spawn(async move {
while let Ok(Some(line)) = it.next_line().await {
let Ok(url) = opt.cwd.try_join(line) else {
continue;
};
if let Ok(file) = File::new(url).await {
tx.send(file).ok();
}
}
child.wait().await.ok();
});
Ok(rx)
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/utils/app.rs | yazi-plugin/src/utils/app.rs | use std::any::TypeId;
use mlua::{AnyUserData, ExternalError, Function, Lua};
use tokio::process::{ChildStderr, ChildStdin, ChildStdout};
use yazi_binding::{Id, Permit, PermitRef, deprecate};
use yazi_proxy::{AppProxy, HIDER};
use super::Utils;
impl Utils {
pub(super) fn id(lua: &Lua) -> mlua::Result<Function> {
lua.create_function(|_, r#type: mlua::String| {
Ok(Id(match &*r#type.as_bytes() {
b"app" => *yazi_dds::ID,
b"ft" => yazi_fs::FILES_TICKET.next(),
_ => Err("Invalid id type".into_lua_err())?,
}))
})
}
pub(super) fn drop(lua: &Lua) -> mlua::Result<Function> {
lua.create_function(|_, ud: AnyUserData| {
match ud.type_id() {
Some(t) if t == TypeId::of::<ChildStdin>() => {}
Some(t) if t == TypeId::of::<ChildStdout>() => {}
Some(t) if t == TypeId::of::<ChildStderr>() => {}
Some(t) => Err(format!("Cannot drop userdata of type {t:?}").into_lua_err())?,
None => Err("Cannot drop scoped userdata".into_lua_err())?,
};
ud.destroy()
})
}
pub(super) fn hide(lua: &Lua) -> mlua::Result<Function> {
lua.create_async_function(|lua, ()| async move {
deprecate!(lua, "`ya.hide()` is deprecated, use `ui.hide()` instead, in your {}\nSee #2939 for more details: https://github.com/sxyazi/yazi/pull/2939");
if lua.named_registry_value::<PermitRef>("HIDE_PERMIT").is_ok_and(|h| h.is_some()) {
return Err("Cannot hide while already hidden".into_lua_err());
}
let permit = HIDER.acquire().await.unwrap();
AppProxy::stop().await;
lua.set_named_registry_value("HIDE_PERMIT", Permit::new(permit, AppProxy::resume()))?;
lua.named_registry_value::<AnyUserData>("HIDE_PERMIT")
})
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/utils/user.rs | yazi-plugin/src/utils/user.rs | #[cfg(unix)]
use mlua::{Function, Lua};
use super::Utils;
impl Utils {
#[cfg(unix)]
pub(super) fn uid(lua: &Lua) -> mlua::Result<Function> {
use uzers::Users;
lua.create_function(|_, ()| Ok(yazi_shared::USERS_CACHE.get_current_uid()))
}
#[cfg(unix)]
pub(super) fn gid(lua: &Lua) -> mlua::Result<Function> {
use uzers::Groups;
lua.create_function(|_, ()| Ok(yazi_shared::USERS_CACHE.get_current_gid()))
}
#[cfg(unix)]
pub(super) fn user_name(lua: &Lua) -> mlua::Result<Function> {
use uzers::Users;
use yazi_shared::USERS_CACHE;
lua.create_function(|lua, uid: Option<u32>| {
USERS_CACHE
.get_user_by_uid(uid.unwrap_or_else(|| USERS_CACHE.get_current_uid()))
.map(|s| lua.create_string(s.name().as_encoded_bytes()))
.transpose()
})
}
#[cfg(unix)]
pub(super) fn group_name(lua: &Lua) -> mlua::Result<Function> {
use uzers::Groups;
use yazi_shared::USERS_CACHE;
lua.create_function(|lua, gid: Option<u32>| {
USERS_CACHE
.get_group_by_gid(gid.unwrap_or_else(|| USERS_CACHE.get_current_gid()))
.map(|s| lua.create_string(s.name().as_encoded_bytes()))
.transpose()
})
}
#[cfg(unix)]
pub(super) fn host_name(lua: &Lua) -> mlua::Result<Function> {
lua.create_function(|lua, ()| yazi_shared::hostname().map(|s| lua.create_string(s)).transpose())
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/utils/process.rs | yazi-plugin/src/utils/process.rs | use mlua::{Function, Lua};
use super::Utils;
impl Utils {
#[cfg(target_os = "macos")]
pub(super) fn proc_info(lua: &Lua) -> mlua::Result<Function> {
lua.create_function(|lua, pid: usize| {
let info = unsafe {
let mut info: libc::proc_taskinfo = std::mem::zeroed();
libc::proc_pidinfo(
pid as _,
libc::PROC_PIDTASKINFO,
0,
&mut info as *mut _ as *mut _,
std::mem::size_of_val(&info) as _,
);
info
};
lua.create_table_from([("mem_resident", info.pti_resident_size)])
})
}
#[cfg(not(target_os = "macos"))]
pub(super) fn proc_info(lua: &Lua) -> mlua::Result<Function> {
lua.create_function(|lua, ()| lua.create_table())
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/utils/call.rs | yazi-plugin/src/utils/call.rs | use mlua::{Function, Lua, Table};
use yazi_binding::deprecate;
use yazi_dds::Sendable;
use yazi_macro::{emit, render};
use yazi_shared::{Layer, Source, event::Cmd};
use super::Utils;
impl Utils {
pub(super) fn render(lua: &Lua) -> mlua::Result<Function> {
lua.create_function(|lua, ()| {
deprecate!(lua, "`ya.render()` is deprecated, use `ui.render()` instead, in your {}\nSee #2939 for more details: https://github.com/sxyazi/yazi/pull/2939");
render!();
Ok(())
})
}
pub(super) fn emit(lua: &Lua) -> mlua::Result<Function> {
lua.create_function(|lua, (name, args): (String, Table)| {
let mut cmd = Cmd::new(name, Source::Emit, Some(Layer::Mgr))?;
cmd.args = Sendable::table_to_args(lua, args)?;
Ok(emit!(Call(cmd)))
})
}
pub(super) fn mgr_emit(lua: &Lua) -> mlua::Result<Function> {
lua.create_function(|lua, (name, args): (String, Table)| {
emit!(Call(Cmd {
name: name.into(),
args: Sendable::table_to_args(lua, args)?,
layer: Layer::Mgr,
source: Source::Emit,
}));
Ok(())
})
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/utils/image.rs | yazi-plugin/src/utils/image.rs | use mlua::{Function, IntoLuaMulti, Lua, Value};
use yazi_adapter::{ADAPTOR, Image};
use yazi_binding::{Error, UrlRef, elements::Rect};
use yazi_fs::FsUrl;
use yazi_shared::url::{AsUrl, UrlLike};
use super::Utils;
use crate::bindings::ImageInfo;
impl Utils {
pub(super) fn image_info(lua: &Lua) -> mlua::Result<Function> {
lua.create_async_function(|lua, url: UrlRef| async move {
let path = url.as_url().unified_path().into_owned();
match yazi_adapter::ImageInfo::new(path).await {
Ok(info) => ImageInfo::from(info).into_lua_multi(&lua),
Err(e) => (Value::Nil, Error::custom(e.to_string())).into_lua_multi(&lua),
}
})
}
pub(super) fn image_show(lua: &Lua) -> mlua::Result<Function> {
lua.create_async_function(|lua, (url, rect): (UrlRef, Rect)| async move {
let path = url.as_url().unified_path();
match ADAPTOR.get().image_show(path, *rect).await {
Ok(area) => Rect::from(area).into_lua_multi(&lua),
Err(e) => (Value::Nil, Error::custom(e.to_string())).into_lua_multi(&lua),
}
})
}
pub(super) fn image_precache(lua: &Lua) -> mlua::Result<Function> {
lua.create_async_function(|lua, (src, dist): (UrlRef, UrlRef)| async move {
let Some(dist) = dist.as_local() else {
return (Value::Nil, Error::custom("Destination must be a local path"))
.into_lua_multi(&lua);
};
let src = src.as_url().unified_path().into_owned();
match Image::precache(src, dist).await {
Ok(()) => true.into_lua_multi(&lua),
Err(e) => (false, Error::custom(e.to_string())).into_lua_multi(&lua),
}
})
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
sxyazi/yazi | https://github.com/sxyazi/yazi/blob/3c39a326abaacb37d9ff15af7668756d60624dfa/yazi-plugin/src/utils/sync.rs | yazi-plugin/src/utils/sync.rs | use anyhow::Context;
use futures::future::join_all;
use mlua::{ExternalError, ExternalResult, Function, IntoLuaMulti, Lua, MultiValue, Value, Variadic};
use tokio::sync::oneshot;
use yazi_binding::{Handle, runtime, runtime_mut};
use yazi_dds::Sendable;
use yazi_parser::app::{PluginCallback, PluginOpt};
use yazi_proxy::AppProxy;
use yazi_shared::{LOCAL_SET, data::Data};
use super::Utils;
use crate::{bindings::{MpscRx, MpscTx, MpscUnboundedRx, MpscUnboundedTx, OneshotRx, OneshotTx}, loader::LOADER};
impl Utils {
pub(super) fn sync(lua: &Lua, isolate: bool) -> mlua::Result<Function> {
if isolate {
lua.create_function(|lua, ()| {
let Some(block) = runtime_mut!(lua)?.next_block() else {
return Err("`ya.sync()` must be called in a plugin").into_lua_err();
};
lua.create_async_function(move |lua, args: MultiValue| async move {
let Some(cur) = runtime!(lua)?.current_owned() else {
return Err("`ya.sync()` block must be used within a plugin").into_lua_err();
};
Self::retrieve(&lua, &cur, block, args)
.await
.and_then(|data| Sendable::list_to_values(&lua, data))
.with_context(|| format!("Failed to execute sync block-{block} in `{cur}` plugin"))
.into_lua_err()
})
})
} else {
lua.create_function(|lua, f: Function| {
let mut rt = runtime_mut!(lua)?;
if !rt.put_block(f.clone()) {
return Err("`ya.sync()` must be called in a plugin").into_lua_err();
}
let cur = rt.current_owned().unwrap();
lua.create_function(move |lua, mut args: MultiValue| {
args.push_front(Value::Table(LOADER.try_load(lua, &cur)?));
f.call::<MultiValue>(args)
})
})
}
}
pub(super) fn r#async(lua: &Lua, isolate: bool) -> mlua::Result<Function> {
if isolate {
lua.create_function(|_, _: Function| {
Err::<(), _>("`ya.async()` can only be used in sync context at the moment".into_lua_err())
})
} else {
lua.create_function(|lua, (f, args): (Function, MultiValue)| {
let name = runtime!(lua)?.current_owned();
Ok(Handle::AsyncFn(LOCAL_SET.spawn_local(async move {
let result = f.call_async::<MultiValue>(args).await;
if let Err(ref e) = result {
match name {
Some(s) => tracing::error!("Failed to execute async block in `{s}` plugin: {e}"),
None => tracing::error!("Failed to execute async block in `init.lua`: {e}"),
}
}
result
})))
})
}
}
pub(super) fn chan(lua: &Lua) -> mlua::Result<Function> {
lua.create_function(|lua, (r#type, buffer): (mlua::String, Option<usize>)| {
match (&*r#type.as_bytes(), buffer) {
(b"mpsc", Some(buffer)) if buffer < 1 => {
Err("Buffer size must be greater than 0".into_lua_err())
}
(b"mpsc", Some(buffer)) => {
let (tx, rx) = tokio::sync::mpsc::channel::<Value>(buffer);
(MpscTx(tx), MpscRx(rx)).into_lua_multi(lua)
}
(b"mpsc", None) => {
let (tx, rx) = tokio::sync::mpsc::unbounded_channel::<Value>();
(MpscUnboundedTx(tx), MpscUnboundedRx(rx)).into_lua_multi(lua)
}
(b"oneshot", _) => {
let (tx, rx) = tokio::sync::oneshot::channel::<Value>();
(OneshotTx(Some(tx)), OneshotRx(Some(rx))).into_lua_multi(lua)
}
_ => Err("Channel type must be `mpsc` or `oneshot`".into_lua_err()),
}
})
}
pub(super) fn join(lua: &Lua) -> mlua::Result<Function> {
lua.create_async_function(|_, fns: Variadic<Function>| async move {
let mut results = MultiValue::with_capacity(fns.len());
for r in join_all(fns.into_iter().map(|f| f.call_async::<MultiValue>(()))).await {
results.extend(r?);
}
Ok(results)
})
}
// TODO
pub(super) fn select(lua: &Lua) -> mlua::Result<Function> {
lua.create_async_function(|_lua, _futs: MultiValue| async move { Ok(()) })
}
async fn retrieve(
lua: &Lua,
id: &str,
calls: usize,
args: MultiValue,
) -> mlua::Result<Vec<Data>> {
let args = Sendable::values_to_list(lua, args)?;
let (tx, rx) = oneshot::channel::<Vec<Data>>();
let callback: PluginCallback = {
let id = id.to_owned();
Box::new(move |lua, plugin| {
let Some(block) = runtime!(lua)?.get_block(&id, calls) else {
return Err("sync block not found".into_lua_err());
};
let args = [Ok(Value::Table(plugin))]
.into_iter()
.chain(args.into_iter().map(|d| Sendable::data_to_value(lua, d)))
.collect::<mlua::Result<MultiValue>>()?;
let values = Sendable::values_to_list(lua, block.call(args)?)?;
tx.send(values).map_err(|_| "send failed".into_lua_err())
})
};
AppProxy::plugin(PluginOpt::new_callback(id.to_owned(), callback));
rx.await.into_lua_err()
}
}
| rust | MIT | 3c39a326abaacb37d9ff15af7668756d60624dfa | 2026-01-04T15:33:17.426354Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.