file_path stringlengths 3 280 | file_language stringclasses 66 values | content stringlengths 1 1.04M | repo_name stringlengths 5 92 | repo_stars int64 0 154k | repo_description stringlengths 0 402 | repo_primary_language stringclasses 108 values | developer_username stringlengths 1 25 | developer_name stringlengths 0 30 | developer_company stringlengths 0 82 |
|---|---|---|---|---|---|---|---|---|---|
src/kbs2/agent.rs | Rust | use std::collections::HashMap;
use std::fs;
use std::io::{BufRead, BufReader, BufWriter, Read, Write};
use std::os::unix::net::{UnixListener, UnixStream};
use std::path::PathBuf;
use std::process::{Command, Stdio};
use std::thread;
use std::time::Duration;
use age::secrecy::{ExposeSecret as _, SecretString};
use anyhow::{anyhow, Context, Result};
use nix::unistd::Uid;
use serde::de::DeserializeOwned;
use serde::{Deserialize, Serialize};
use crate::kbs2::backend::{Backend, RageLib};
/// The version of the agent protocol.
const PROTOCOL_VERSION: u32 = 1;
/// Represents the entire request message, including the protocol field.
#[derive(Debug, Deserialize, PartialEq, Serialize)]
struct Request {
protocol: u32,
body: RequestBody,
}
/// Represents the kinds of requests understood by the `kbs2` authentication agent.
#[derive(Debug, Deserialize, PartialEq, Serialize)]
#[serde(tag = "type", content = "body")]
enum RequestBody {
/// Unwrap a particular keyfile (second element) with a password (third element), identifying
/// it in the agent with a particular public key (first element).
UnwrapKey(String, String, String),
/// Check whether a particular public key has an unwrapped keyfile in the agent.
QueryUnwrappedKey(String),
/// Get the actual unwrapped key, by public key.
GetUnwrappedKey(String),
/// Flush all keys from the agent.
FlushKeys,
/// Ask the agent to exit.
Quit,
}
/// Represents the kinds of responses sent by the `kbs2` authentication agent.
#[derive(Debug, Deserialize, PartialEq, Serialize)]
#[serde(tag = "type", content = "body")]
enum Response {
/// A successful request, with some request-specific response data.
Success(String),
/// A failed request, of `FailureKind`.
Failure(FailureKind),
}
/// Represents the kinds of failures encoded by a `kbs2` `Response`.
#[derive(Debug, Deserialize, PartialEq, Serialize)]
#[serde(tag = "type", content = "body")]
enum FailureKind {
/// The request failed because the client couldn't be authenticated.
Auth,
/// The request failed because one or more I/O operations failed.
Io(String),
/// The request failed because it was malformed.
Malformed(String),
/// The request failed because key unwrapping failed.
Unwrap(String),
/// The request failed because the agent and client don't speak the same protocol version.
VersionMismatch(u32),
/// The request failed because the requested query failed.
Query,
}
/// A convenience trait for marshaling and unmarshaling `RequestBody`s and `Response`s
/// through Rust's `Read` and `Write` traits.
trait Message {
fn read<R: Read>(reader: R) -> Result<Self>
where
Self: DeserializeOwned,
{
// NOTE(ww): This would be cleaner with a BufReader, but unsound: a BufReader
// can buffer more than one line at once, causing us to silently drop client requests.
// I don't think that would actually happen in this case (since each client sends exactly
// one line before expecting a response), but it's one less thing to think about.
// NOTE(ww): Safe unwrap: we only perform after checking `is_ok`, and we capture
// the error by using `Result<Vec<_>, _>` with `collect`.
#[allow(clippy::unwrap_used, clippy::unbuffered_bytes)]
let data: Result<Vec<_>, _> = reader
.bytes()
.take_while(|b| b.is_ok() && *b.as_ref().unwrap() != b'\n')
.collect();
let data = data?;
let res = serde_json::from_slice(&data)?;
Ok(res)
}
fn write<W: Write>(&self, mut writer: W) -> Result<()>
where
Self: Serialize,
{
serde_json::to_writer(&mut writer, &self)?;
writer.write_all(b"\n")?;
writer.flush()?;
Ok(())
}
}
impl Message for Request {}
impl Message for Response {}
/// Represents the state in a running `kbs2` authentication agent.
pub struct Agent {
/// The local path to the Unix domain socket.
agent_path: PathBuf,
/// A map of public key => (keyfile path, unwrapped key material).
unwrapped_keys: HashMap<String, (String, SecretString)>,
/// Whether or not the agent intends to quit momentarily.
quitting: bool,
}
impl Agent {
/// Returns a unique, user-specific socket path that the authentication agent listens on.
fn path() -> PathBuf {
let mut agent_path = PathBuf::from("/tmp");
agent_path.push(format!("kbs2-agent-{}", whoami::username()));
agent_path
}
/// Spawns a new agent as a daemon process, returning once the daemon
/// is ready to begin serving clients.
pub fn spawn() -> Result<()> {
let agent_path = Self::path();
// If an agent appears to be running already, do nothing.
if agent_path.exists() {
log::debug!("agent seems to be running; not trying to spawn another");
return Ok(());
}
log::debug!("agent isn't already running, attempting spawn");
// Sanity check: `kbs2` should never be run as root, and any difference between our
// UID and EUID indicates some SUID-bit weirdness that we didn't expect and don't want.
let (uid, euid) = (Uid::current(), Uid::effective());
if uid.is_root() || uid != euid {
return Err(anyhow!(
"unusual UID or UID/EUID pair found, refusing to spawn"
));
}
// NOTE(ww): Given the above, it *should* be safe to spawn based on the path returned by
// `current_exe`: we know we aren't being tricked with any hardlink + SUID shenanigans.
let kbs2 = std::env::current_exe().with_context(|| "failed to locate the kbs2 binary")?;
// NOTE(ww): We could spawn the agent by forking and daemonizing, but that would require
// at least one direct use of unsafe{} (for the fork itself), and potentially others.
// This is a little simpler and requires less unsafety.
let _ = Command::new(kbs2)
.arg("agent")
.stdin(Stdio::null())
.stdout(Stdio::null())
.stderr(Stdio::null())
.spawn()?;
for attempt in 0..10 {
log::debug!("waiting for agent, loop {attempt}...");
thread::sleep(Duration::from_millis(10));
if agent_path.exists() {
return Ok(());
}
}
Err(anyhow!("agent spawn timeout exhausted"))
}
/// Initializes a new agent without accepting connections.
pub fn new() -> Result<Self> {
let agent_path = Self::path();
if agent_path.exists() {
return Err(anyhow!(
"an agent is already running or didn't exit cleanly"
));
}
#[allow(clippy::redundant_field_names)]
Ok(Self {
agent_path: agent_path,
unwrapped_keys: HashMap::new(),
quitting: false,
})
}
// TODO(ww): These can be replaced with the UnixStream.peer_cred API once it stabilizes:
// https://doc.rust-lang.org/std/os/unix/net/struct.UnixStream.html#method.peer_cred
#[cfg(any(target_os = "linux", target_os = "android",))]
fn auth_client(&self, stream: &UnixStream) -> bool {
use nix::sys::socket::getsockopt;
use nix::sys::socket::sockopt::PeerCredentials;
if let Ok(cred) = getsockopt(stream, PeerCredentials) {
cred.uid() == Uid::effective().as_raw()
} else {
log::error!("getsockopt failed; treating as auth failure");
false
}
}
#[cfg(any(
target_os = "macos",
target_os = "ios",
target_os = "freebsd",
target_os = "openbsd",
target_os = "netbsd",
target_os = "dragonfly",
))]
fn auth_client(&self, stream: &UnixStream) -> bool {
use nix::unistd;
if let Ok((peer_uid, _)) = unistd::getpeereid(stream) {
peer_uid == Uid::effective()
} else {
log::error!("getpeereid failed; treating as auth failure");
false
}
}
/// Handles an inner request payload, i.e. one of potentially several
/// requests made during a client's connection.
fn handle_request_body(&mut self, body: RequestBody) -> Response {
match body {
RequestBody::UnwrapKey(pubkey, keyfile, password) => {
let password = SecretString::from(password);
// If the running agent is already tracking an unwrapped key for this
// pubkey, return early with a success.
#[allow(clippy::map_entry)]
if self.unwrapped_keys.contains_key(&pubkey) {
log::debug!("client requested unwrap for already unwrapped keyfile: {keyfile}");
Response::Success("OK; agent already has unwrapped key".into())
} else {
match RageLib::unwrap_keyfile(&keyfile, password) {
Ok(unwrapped_key) => {
self.unwrapped_keys.insert(pubkey, (keyfile, unwrapped_key));
Response::Success("OK; unwrapped key ready".into())
}
Err(e) => {
log::error!("keyfile unwrap failed: {e:?}");
Response::Failure(FailureKind::Unwrap(e.to_string()))
}
}
}
}
RequestBody::QueryUnwrappedKey(pubkey) => {
if self.unwrapped_keys.contains_key(&pubkey) {
Response::Success("OK".into())
} else {
Response::Failure(FailureKind::Query)
}
}
RequestBody::GetUnwrappedKey(pubkey) => {
if let Some((_, unwrapped_key)) = self.unwrapped_keys.get(&pubkey) {
log::debug!("successful key request for pubkey: {pubkey}");
Response::Success(unwrapped_key.expose_secret().into())
} else {
log::error!("unknown pubkey requested: {}", &pubkey);
Response::Failure(FailureKind::Query)
}
}
RequestBody::FlushKeys => {
self.unwrapped_keys.clear();
log::debug!("successfully flushed all unwrapped keys");
Response::Success("OK".into())
}
RequestBody::Quit => {
self.quitting = true;
log::debug!("agent exit requested");
Response::Success("OK".into())
}
}
}
/// Handles a single client connection.
/// Individual clients may issue multiple requests in a single session.
fn handle_client(&mut self, stream: UnixStream) {
let reader = BufReader::new(&stream);
let mut writer = BufWriter::new(&stream);
if !self.auth_client(&stream) {
log::warn!("client failed auth check");
// This can fail, but we don't care.
let _ = Response::Failure(FailureKind::Auth).write(&mut writer);
return;
}
for line in reader.lines() {
let line = match line {
Ok(line) => line,
Err(e) => {
log::error!("i/o error: {e:?}");
// This can fail, but we don't care.
let _ = Response::Failure(FailureKind::Io(e.to_string())).write(&mut writer);
return;
}
};
let req: Request = match serde_json::from_str(&line) {
Ok(req) => req,
Err(e) => {
log::error!("malformed req: {e:?}");
// This can fail, but we don't care.
let _ =
Response::Failure(FailureKind::Malformed(e.to_string())).write(&mut writer);
return;
}
};
if req.protocol != PROTOCOL_VERSION {
let _ = Response::Failure(FailureKind::VersionMismatch(PROTOCOL_VERSION))
.write(&mut writer);
return;
}
let resp = self.handle_request_body(req.body);
// This can fail, but we don't care.
let _ = resp.write(&mut writer);
}
}
/// Run the `kbs2` authentication agent.
///
/// The function does not return *unless* either an error occurs on agent startup *or*
/// a client asks the agent to quit.
pub fn run(&mut self) -> Result<()> {
log::debug!("agent run requested");
let listener = UnixListener::bind(&self.agent_path)?;
// NOTE(ww): This could spawn a separate thread for each incoming connection, but I see
// no reason to do so:
//
// 1. The incoming queue already provides a synchronization mechanism, and we don't
// expect a number of simultaneous clients that would come close to exceeding the
// default queue length. Even if that were to happen, rejecting pending clients
// is an acceptable error mode.
// 2. Using separate threads here makes the rest of the code unnecessarily complicated:
// each `Agent` becomes an `Arc<Mutex<Agent>>` to protect the underlying `HashMap`,
// and makes actually quitting the agent with a `Quit` request more difficult than it
// needs to be.
for stream in listener.incoming() {
match stream {
Ok(stream) => {
self.handle_client(stream);
if self.quitting {
break;
}
}
Err(e) => {
log::error!("connect error: {e:?}");
continue;
}
}
}
Ok(())
}
}
impl Drop for Agent {
fn drop(&mut self) {
log::debug!("agent teardown");
// NOTE(ww): We don't expect this to fail, but it's okay if it does: the agent gets dropped
// at the very end of its lifecycle, meaning that an expect here is acceptable.
#[allow(clippy::expect_used)]
fs::remove_file(Agent::path()).expect("attempted to remove missing agent socket");
}
}
/// Represents a client to the `kbs2` authentication agent.
///
/// Clients may send multiple requests and receive multiple responses while active.
pub struct Client {
stream: UnixStream,
}
impl Client {
/// Create and return a new client, failing if connection to the agent fails.
pub fn new() -> Result<Self> {
log::debug!("creating a new agent client");
let stream = UnixStream::connect(Agent::path())
.with_context(|| "failed to connect to agent; is it running?")?;
Ok(Self { stream })
}
/// Issue the given request to the agent, returning the agent's `Response`.
fn request(&self, body: RequestBody) -> Result<Response> {
#[allow(clippy::redundant_field_names)]
let req = Request {
protocol: PROTOCOL_VERSION,
body: body,
};
req.write(&self.stream)?;
let resp = Response::read(&self.stream)?;
Ok(resp)
}
/// Instruct the agent to unwrap the given keyfile, using the given password.
/// The keyfile path and its unwrapped contents are associated with the given pubkey.
pub fn add_key(&self, pubkey: &str, keyfile: &str, password: SecretString) -> Result<()> {
log::debug!("add_key: requesting that agent unwrap {keyfile}");
let body = RequestBody::UnwrapKey(
pubkey.into(),
keyfile.into(),
password.expose_secret().into(),
);
let resp = self.request(body)?;
match resp {
Response::Success(msg) => {
log::debug!("agent reports success: {msg}");
Ok(())
}
Response::Failure(kind) => Err(anyhow!("adding key to agent failed: {:?}", kind)),
}
}
/// Ask the agent whether it has an unwrapped key for the given pubkey.
pub fn query_key(&self, pubkey: &str) -> Result<bool> {
log::debug!("query_key: asking whether agent has key for {pubkey}");
let body = RequestBody::QueryUnwrappedKey(pubkey.into());
let resp = self.request(body)?;
match resp {
Response::Success(_) => Ok(true),
Response::Failure(FailureKind::Query) => Ok(false),
Response::Failure(kind) => Err(anyhow!("querying key from agent failed: {:?}", kind)),
}
}
/// Ask the agent for the unwrapped key material for the given pubkey.
pub fn get_key(&self, pubkey: &str) -> Result<String> {
log::debug!("get_key: requesting unwrapped key for {pubkey}");
let body = RequestBody::GetUnwrappedKey(pubkey.into());
let resp = self.request(body)?;
match resp {
Response::Success(unwrapped_key) => Ok(unwrapped_key),
Response::Failure(kind) => Err(anyhow!(
"retrieving unwrapped key from agent failed: {:?}",
kind
)),
}
}
/// Ask the agent to flush all of its unwrapped keys.
pub fn flush_keys(&self) -> Result<()> {
log::debug!("flush_keys: asking agent to forget all keys");
self.request(RequestBody::FlushKeys)?;
Ok(())
}
/// Ask the agent to quit gracefully.
pub fn quit_agent(self) -> Result<()> {
log::debug!("quit_agent: asking agent to exit gracefully");
self.request(RequestBody::Quit)?;
Ok(())
}
}
| woodruffw/kbs2 | 126 | A secret manager backed by age | Rust | woodruffw | William Woodruff | astral-sh |
src/kbs2/backend.rs | Rust | use std::fs;
use std::io::{Read, Write};
use std::path::Path;
use std::str::FromStr;
use age::armor::{ArmoredReader, ArmoredWriter, Format};
use age::secrecy::{ExposeSecret as _, SecretString};
use age::Decryptor;
use anyhow::{anyhow, Context, Result};
use crate::kbs2::agent;
use crate::kbs2::config;
use crate::kbs2::record::Record;
use crate::kbs2::util;
/// The maximum size of a wrapped key file, on disk.
///
/// This is an **extremely** conservative maximum: actual plain-text formatted
/// wrapped keys should never be more than a few hundred bytes. But we need some
/// number of harden the I/O that the agent does, and a single page/4K seems reasonable.
pub const MAX_WRAPPED_KEY_FILESIZE: u64 = 4096;
/// Represents the operations that all age backends are capable of.
pub trait Backend {
/// Creates an age keypair, saving the private component to the given path.
///
/// NOTE: The private component is written in an ASCII-armored format.
fn create_keypair<P: AsRef<Path>>(path: P) -> Result<String>;
/// Creates a wrapped age keypair, saving the encrypted private component to the
/// given path.
///
/// NOTE: Like `create_keypair`, this writes an ASCII-armored private component.
fn create_wrapped_keypair<P: AsRef<Path>>(path: P, password: SecretString) -> Result<String>;
/// Unwraps the given `keyfile` using `password`, returning the unwrapped contents.
fn unwrap_keyfile<P: AsRef<Path>>(keyfile: P, password: SecretString) -> Result<SecretString>;
/// Wraps the given `key` using the given `password`, returning the wrapped result.
fn wrap_key(key: SecretString, password: SecretString) -> Result<Vec<u8>>;
/// Rewraps the given keyfile in place, decrypting it with the `old` password
/// and re-encrypting it with the `new` password.
///
/// NOTE: This function does *not* make a backup of the original keyfile.
fn rewrap_keyfile<P: AsRef<Path>>(path: P, old: SecretString, new: SecretString) -> Result<()>;
/// Encrypts the given record, returning it as an ASCII-armored string.
fn encrypt(&self, record: &Record) -> Result<String>;
/// Decrypts the given ASCII-armored string, returning it as a Record.
fn decrypt(&self, encrypted: &str) -> Result<Record>;
}
/// Encapsulates the age crate (i.e., the `rage` CLI's backing library).
pub struct RageLib {
pub pubkey: age::x25519::Recipient,
pub identity: age::x25519::Identity,
}
impl RageLib {
pub fn new(config: &config::Config) -> Result<RageLib> {
let pubkey = config
.public_key
.parse::<age::x25519::Recipient>()
.map_err(|e| anyhow!("unable to parse public key (backend reports: {:?})", e))?;
let identity = if config.wrapped {
log::debug!("config specifies a wrapped key");
let client = agent::Client::new().with_context(|| "failed to connect to kbs2 agent")?;
if !client.query_key(&config.public_key)? {
client.add_key(
&config.public_key,
&config.keyfile,
util::get_password(None, &config.pinentry)?,
)?;
}
let unwrapped_key = client
.get_key(&config.public_key)
.with_context(|| format!("agent has no unwrapped key for {}", config.keyfile))?;
log::debug!("parsing unwrapped key");
age::x25519::Identity::from_str(&unwrapped_key)
.map_err(|e| anyhow!("failed to parse unwrapped key ({e:?})",))?
} else {
let unwrapped_key = fs::read_to_string(&config.keyfile)?;
log::debug!("parsing unwrapped key from file");
age::x25519::Identity::from_str(&unwrapped_key)
.map_err(|e| anyhow!("failed to parse unwrapped key ({e:?})",))?
};
log::debug!("successfully parsed a private key!");
Ok(RageLib { pubkey, identity })
}
}
impl Backend for RageLib {
fn create_keypair<P: AsRef<Path>>(path: P) -> Result<String> {
let keypair = age::x25519::Identity::generate();
std::fs::write(path, keypair.to_string().expose_secret())?;
Ok(keypair.to_public().to_string())
}
fn create_wrapped_keypair<P: AsRef<Path>>(path: P, password: SecretString) -> Result<String> {
let keypair = age::x25519::Identity::generate();
let wrapped_key = Self::wrap_key(keypair.to_string(), password)?;
std::fs::write(path, wrapped_key)?;
Ok(keypair.to_public().to_string())
}
fn unwrap_keyfile<P: AsRef<Path>>(keyfile: P, password: SecretString) -> Result<SecretString> {
let wrapped_key = util::read_guarded(&keyfile, MAX_WRAPPED_KEY_FILESIZE)?;
// NOTE(ww): A work factor of 22 is an educated guess here; rage has generated messages
// that have needed 17 and 18 before, so this should (hopefully) give us some
// breathing room.
let mut identity = age::scrypt::Identity::new(password);
identity.set_max_work_factor(22);
// Create a new decryptor for the wrapped key.
let decryptor = Decryptor::new(ArmoredReader::new(wrapped_key.as_slice()))
.map_err(|e| anyhow!("unable to load private key (backend reports: {:?})", e))?;
if !decryptor.is_scrypt() {
return Err(anyhow!(
"key unwrap failed: not a password-wrapped keyfile?"
));
}
// ...and decrypt (i.e., unwrap) using the master password.
log::debug!("beginning key unwrap...");
let mut unwrapped_key = String::new();
decryptor
.decrypt([&identity as &dyn age::Identity].into_iter())
.map_err(|e| anyhow!("unable to decrypt (backend reports: {:?})", e))
.and_then(|mut r| {
r.read_to_string(&mut unwrapped_key)
.map_err(|_| anyhow!("i/o error while decrypting"))
})?;
log::debug!("finished key unwrap!");
Ok(SecretString::from(unwrapped_key))
}
fn wrap_key(key: SecretString, password: SecretString) -> Result<Vec<u8>> {
let encryptor = age::Encryptor::with_user_passphrase(password);
let mut wrapped_key = vec![];
let mut writer = encryptor.wrap_output(ArmoredWriter::wrap_output(
&mut wrapped_key,
Format::AsciiArmor,
)?)?;
writer.write_all(key.expose_secret().as_bytes())?;
writer.finish().and_then(|armor| armor.finish())?;
Ok(wrapped_key)
}
fn rewrap_keyfile<P: AsRef<Path>>(
keyfile: P,
old: SecretString,
new: SecretString,
) -> Result<()> {
let unwrapped_key = Self::unwrap_keyfile(&keyfile, old)?;
let rewrapped_key = Self::wrap_key(unwrapped_key, new)?;
std::fs::write(&keyfile, rewrapped_key)?;
Ok(())
}
fn encrypt(&self, record: &Record) -> Result<String> {
#[allow(clippy::unwrap_used)]
let encryptor =
age::Encryptor::with_recipients([&self.pubkey as &dyn age::Recipient].into_iter())
.unwrap();
let mut encrypted = vec![];
let mut writer = encryptor
.wrap_output(ArmoredWriter::wrap_output(
&mut encrypted,
Format::AsciiArmor,
)?)
.map_err(|e| anyhow!("wrap_output failed (backend report: {:?})", e))?;
writer.write_all(serde_json::to_string(record)?.as_bytes())?;
writer.finish().and_then(|armor| armor.finish())?;
Ok(String::from_utf8(encrypted)?)
}
fn decrypt(&self, encrypted: &str) -> Result<Record> {
let decryptor = age::Decryptor::new(ArmoredReader::new(encrypted.as_bytes()))
.map_err(|e| anyhow!("unable to load private key (backend reports: {:?})", e))?;
let mut decrypted = String::new();
decryptor
.decrypt([&self.identity as &dyn age::Identity].into_iter())
.map_err(|e| anyhow!("unable to decrypt (backend reports: {:?})", e))
.and_then(|mut r| {
r.read_to_string(&mut decrypted)
.map_err(|e| anyhow!("i/o error while decrypting: {:?}", e))
})?;
Ok(serde_json::from_str(&decrypted)?)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::kbs2::record::{LoginFields, RecordBody};
fn dummy_login() -> Record {
Record::new(
"dummy",
RecordBody::Login(LoginFields {
username: "foobar".into(),
password: "bazqux".into(),
}),
)
}
fn ragelib_backend() -> RageLib {
let key = age::x25519::Identity::generate();
RageLib {
pubkey: key.to_public(),
identity: key,
}
}
fn ragelib_backend_bad_keypair() -> RageLib {
let key1 = age::x25519::Identity::generate();
let key2 = age::x25519::Identity::generate();
RageLib {
pubkey: key1.to_public(),
identity: key2,
}
}
#[test]
fn test_ragelib_create_keypair() {
let keyfile = tempfile::NamedTempFile::new().unwrap();
assert!(RageLib::create_keypair(&keyfile).is_ok());
}
#[test]
fn test_ragelib_create_wrapped_keypair() {
let keyfile = tempfile::NamedTempFile::new().unwrap();
// Creating a wrapped keypair with a particular password should succeed.
assert!(RageLib::create_wrapped_keypair(
&keyfile,
SecretString::new("weakpassword".into())
)
.is_ok());
// Unwrapping the keyfile using the same password should succeed.
assert!(
RageLib::unwrap_keyfile(&keyfile, SecretString::new("weakpassword".into())).is_ok()
);
}
#[test]
fn test_ragelib_rewrap_keyfile() {
let keyfile = tempfile::NamedTempFile::new().unwrap();
RageLib::create_wrapped_keypair(&keyfile, SecretString::new("weakpassword".into()))
.unwrap();
let wrapped_key_a = std::fs::read(&keyfile).unwrap();
let unwrapped_key_a =
RageLib::unwrap_keyfile(&keyfile, SecretString::new("weakpassword".into())).unwrap();
// Changing the password on a wrapped keyfile should succeed.
assert!(RageLib::rewrap_keyfile(
&keyfile,
SecretString::new("weakpassword".into()),
SecretString::new("stillweak".into()),
)
.is_ok());
let wrapped_key_b = std::fs::read(&keyfile).unwrap();
let unwrapped_key_b =
RageLib::unwrap_keyfile(&keyfile, SecretString::new("stillweak".into())).unwrap();
// The wrapped envelopes should not be equal, since the password has changed.
assert_ne!(wrapped_key_a, wrapped_key_b);
// However, the wrapped key itself should be preserved.
assert_eq!(
unwrapped_key_a.expose_secret(),
unwrapped_key_b.expose_secret()
);
}
#[test]
fn test_ragelib_encrypt() {
{
let backend = ragelib_backend();
let record = dummy_login();
assert!(backend.encrypt(&record).is_ok());
}
// TODO: Test RageLib::encrypt failure modes.
}
#[test]
fn test_ragelib_decrypt() {
{
let backend = ragelib_backend();
let record = dummy_login();
let encrypted = backend.encrypt(&record).unwrap();
let decrypted = backend.decrypt(&encrypted).unwrap();
assert_eq!(record, decrypted);
}
{
let backend = ragelib_backend_bad_keypair();
let record = dummy_login();
let encrypted = backend.encrypt(&record).unwrap();
let err = backend.decrypt(&encrypted).unwrap_err();
assert_eq!(
err.to_string(),
"unable to decrypt (backend reports: NoMatchingKeys)"
);
}
}
}
| woodruffw/kbs2 | 126 | A secret manager backed by age | Rust | woodruffw | William Woodruff | astral-sh |
src/kbs2/command.rs | Rust | use std::convert::TryInto;
use std::env;
use std::fmt::Write as _;
use std::io::{self, stdin, IsTerminal, Read, Seek, Write};
use std::path::{Path, PathBuf};
use std::process;
use age::secrecy::{ExposeSecret as _, SecretBox};
use anyhow::{anyhow, Result};
use arboard::Clipboard;
use clap::ArgMatches;
use daemonize::Daemonize;
use inquire::Confirm;
use nix::unistd::{fork, ForkResult};
use crate::kbs2::agent;
use crate::kbs2::backend::{self, Backend};
use crate::kbs2::config::{self, Pinentry};
use crate::kbs2::generator::Generator;
use crate::kbs2::input::Input;
use crate::kbs2::record::{
self, EnvironmentFields, LoginFields, Record, RecordBody, UnstructuredFields,
};
use crate::kbs2::session::Session;
use crate::kbs2::util;
/// Implements the `kbs2 init` command.
pub fn init(matches: &ArgMatches, config_dir: &Path) -> Result<()> {
log::debug!("initializing a new config");
#[allow(clippy::unwrap_used)]
if config_dir.join(config::CONFIG_BASENAME).exists()
&& !*matches.get_one::<bool>("force").unwrap()
{
return Err(anyhow!(
"refusing to overwrite your current config without --force"
));
}
#[allow(clippy::unwrap_used)]
let store_dir = matches.get_one::<PathBuf>("store-dir").unwrap().as_path();
// Warn, but don't fail, if the store directory is already present.
if store_dir.exists() {
util::warn("Requested store directory already exists");
}
#[allow(clippy::unwrap_used)]
let password = if !*matches.get_one::<bool>("insecure-not-wrapped").unwrap() {
Some(util::get_password(None, Pinentry::default())?)
} else {
None
};
config::initialize(&config_dir, &store_dir, password)
}
/// Implements the `kbs2 agent` command (and subcommands).
pub fn agent(matches: &ArgMatches, config: &config::Config) -> Result<()> {
log::debug!("agent subcommand dispatch");
// No subcommand: run the agent itself
if matches.subcommand().is_none() {
let mut agent = agent::Agent::new()?;
#[allow(clippy::unwrap_used)]
if !matches.get_one::<bool>("foreground").unwrap() {
Daemonize::new().start()?;
}
agent.run()?;
return Ok(());
}
match matches.subcommand() {
Some(("flush", matches)) => agent_flush(matches),
Some(("query", matches)) => agent_query(matches, config),
Some(("unwrap", matches)) => agent_unwrap(matches, config),
_ => unreachable!(),
}
}
/// Implements the `kbs2 agent flush` subcommand.
fn agent_flush(matches: &ArgMatches) -> Result<()> {
log::debug!("asking the agent to flush all keys");
let client = agent::Client::new()?;
client.flush_keys()?;
#[allow(clippy::unwrap_used)]
if *matches.get_one::<bool>("quit").unwrap() {
client.quit_agent()?;
}
Ok(())
}
/// Implements the `kbs2 agent query` subcommand.
fn agent_query(_matches: &ArgMatches, config: &config::Config) -> Result<()> {
log::debug!("querying the agent for a key's existence");
// It doesn't make sense to query the agent for keypairs that the agent
// doesn't manage. Use a specific code to signal this case.
if !config.wrapped {
std::process::exit(2);
}
// Don't allow client creation to fail the normal way: if we can't create
// a client for whatever reason (e.g., the agent isn't running), exit
// with a specific code to signal our state to the user.
let client = agent::Client::new().unwrap_or_else(|_| std::process::exit(3));
if !client.query_key(&config.public_key)? {
std::process::exit(1);
}
Ok(())
}
/// Implements the `kbs2 agent unwrap` subcommand.
fn agent_unwrap(_matches: &ArgMatches, config: &config::Config) -> Result<()> {
log::debug!("asking the agent to unwrap a key");
// Bare keys are loaded directly from their `keyfile`.
if !config.wrapped {
return Err(anyhow!("config specifies a bare key; nothing to do"));
}
let client = agent::Client::new()?;
if client.query_key(&config.public_key)? {
println!("kbs2 agent already has this key; ignoring.");
return Ok(());
}
let password = util::get_password(None, &config.pinentry)?;
client.add_key(&config.public_key, &config.keyfile, password)?;
Ok(())
}
/// Implements the `kbs2 new` command.
pub fn new(matches: &ArgMatches, config: &config::Config) -> Result<()> {
log::debug!("creating a new record");
let session: Session = config.try_into()?;
if let Some(pre_hook) = &session.config.commands.new.pre_hook {
log::debug!("pre-hook: {pre_hook}");
session.config.call_hook(pre_hook, &[])?;
}
#[allow(clippy::unwrap_used)]
let label = matches.get_one::<String>("label").unwrap();
#[allow(clippy::unwrap_used)]
if session.has_record(label) && !matches.get_one::<bool>("force").unwrap() {
return Err(anyhow!("refusing to overwrite a record without --force"));
}
let config = session.config.with_matches(matches);
#[allow(clippy::unwrap_used)]
let record = match matches
.get_one::<String>("kind")
.map(AsRef::as_ref)
.unwrap()
{
"login" => Record::new(label, LoginFields::input(&config)?),
"environment" => Record::new(label, EnvironmentFields::input(&config)?),
"unstructured" => Record::new(label, UnstructuredFields::input(&config)?),
_ => unreachable!(),
};
session.add_record(&record)?;
if let Some(post_hook) = &session.config.commands.new.post_hook {
log::debug!("post-hook: {post_hook}");
session.config.call_hook(post_hook, &[label])?;
}
Ok(())
}
/// Implements the `kbs2 list` command.
pub fn list(matches: &ArgMatches, config: &config::Config) -> Result<()> {
log::debug!("listing records");
let session: Session = config.try_into()?;
#[allow(clippy::unwrap_used)]
let (details, filter_kind) = (
*matches.get_one::<bool>("details").unwrap(),
matches.contains_id("kind"),
);
for label in session.record_labels()? {
let mut display = String::new();
if details || filter_kind {
let record = session.get_record(&label)?;
if filter_kind {
#[allow(clippy::unwrap_used)]
let kind = matches.get_one::<String>("kind").unwrap();
if &record.body.to_string() != kind {
continue;
}
}
display.push_str(&label);
if details {
write!(display, " {} {}", record.body, record.timestamp)?;
}
} else {
display.push_str(&label);
}
println!("{display}");
}
Ok(())
}
/// Implements the `kbs2 rm` command.
pub fn rm(matches: &ArgMatches, config: &config::Config) -> Result<()> {
log::debug!("removing a record");
let session: Session = config.try_into()?;
#[allow(clippy::unwrap_used)]
let labels: Vec<_> = matches
.get_many::<String>("label")
.unwrap()
.map(AsRef::as_ref)
.collect();
for label in &labels {
session.delete_record(label)?;
}
if let Some(post_hook) = &session.config.commands.rm.post_hook {
log::debug!("post-hook: {post_hook}");
session.config.call_hook(post_hook, &labels)?;
}
Ok(())
}
/// Implements the `kbs2 rename` command.
pub fn rename(matches: &ArgMatches, config: &config::Config) -> Result<()> {
log::debug!("renaming a record");
let session: Session = config.try_into()?;
#[allow(clippy::unwrap_used)]
let old_label: &str = matches.get_one::<String>("old-label").unwrap();
#[allow(clippy::unwrap_used)]
let new_label: &str = matches.get_one::<String>("new-label").unwrap();
#[allow(clippy::unwrap_used)]
if session.has_record(new_label) && !matches.get_one::<bool>("force").unwrap() {
return Err(anyhow!("refusing to overwrite a record without --force"));
}
session.rename_record(old_label, new_label)?;
if let Some(post_hook) = &session.config.commands.rename.post_hook {
log::debug!("post-hook: {post_hook}");
session
.config
.call_hook(post_hook, &[old_label, new_label])?;
}
Ok(())
}
/// Implements the `kbs2 dump` command.
pub fn dump(matches: &ArgMatches, config: &config::Config) -> Result<()> {
log::debug!("dumping a record");
let session: Session = config.try_into()?;
#[allow(clippy::unwrap_used)]
let labels: Vec<_> = matches.get_many::<String>("label").unwrap().collect();
for label in labels {
let record = session.get_record(label)?;
#[allow(clippy::unwrap_used)]
if *matches.get_one::<bool>("json").unwrap() {
println!("{}", serde_json::to_string(&record)?);
} else {
println!("Label {}\nKind {}", label, record.body);
match record.body {
RecordBody::Login(l) => {
println!("Username {}\nPassword {}", l.username, l.password)
}
RecordBody::Environment(e) => {
println!("Variable {}\nValue {}", e.variable, e.value)
}
RecordBody::Unstructured(u) => println!("Contents {}", u.contents),
}
}
}
Ok(())
}
/// Implements the `kbs2 pass` command.
pub fn pass(matches: &ArgMatches, config: &config::Config) -> Result<()> {
log::debug!("getting a login's password");
let session: Session = config.try_into()?;
if let Some(pre_hook) = &session.config.commands.pass.pre_hook {
log::debug!("pre-hook: {pre_hook}");
session.config.call_hook(pre_hook, &[])?;
}
#[allow(clippy::unwrap_used)]
let label = matches.get_one::<String>("label").unwrap();
let record = session.get_record(label)?;
let login = match record.body {
RecordBody::Login(l) => l,
_ => return Err(anyhow!("not a login record: {}", label)),
};
let password = login.password;
#[allow(clippy::unwrap_used)]
if *matches.get_one::<bool>("clipboard").unwrap() {
// NOTE(ww): fork() is unsafe in multithreaded programs where the child calls
// non async-signal-safe functions. kbs2 is single threaded, so this usage is fine.
unsafe {
match fork() {
Ok(ForkResult::Child) => {
clip(password, &session)?;
}
Err(_) => return Err(anyhow!("clipboard fork failed")),
_ => {}
}
}
} else if !stdin().is_terminal() {
print!("{password}");
} else {
println!("{password}");
}
if let Some(post_hook) = &session.config.commands.pass.post_hook {
log::debug!("post-hook: {post_hook}");
session.config.call_hook(post_hook, &[])?;
}
Ok(())
}
#[doc(hidden)]
fn clip(password: String, session: &Session) -> Result<()> {
let clipboard_duration = session.config.commands.pass.clipboard_duration;
let clear_after = session.config.commands.pass.clear_after;
let mut clipboard = Clipboard::new()?;
clipboard.set_text(&password)?;
std::thread::sleep(std::time::Duration::from_secs(clipboard_duration));
if clear_after {
clipboard.clear()?;
if let Some(clear_hook) = &session.config.commands.pass.clear_hook {
log::debug!("clear-hook: {clear_hook}");
session.config.call_hook(clear_hook, &[])?;
}
}
Ok(())
}
/// Implements the `kbs2 env` command.
pub fn env(matches: &ArgMatches, config: &config::Config) -> Result<()> {
log::debug!("getting a environment variable");
let session: Session = config.try_into()?;
#[allow(clippy::unwrap_used)]
let label = matches.get_one::<String>("label").unwrap();
let record = session.get_record(label)?;
let environment = match record.body {
RecordBody::Environment(e) => e,
_ => return Err(anyhow!("not an environment record: {}", label)),
};
#[allow(clippy::unwrap_used)]
if *matches.get_one::<bool>("value-only").unwrap() {
println!("{}", environment.value);
} else if *matches.get_one::<bool>("no-export").unwrap() {
println!("{}={}", environment.variable, environment.value);
} else {
println!("export {}={}", environment.variable, environment.value);
}
Ok(())
}
/// Implements the `kbs2 edit` command.
pub fn edit(matches: &ArgMatches, config: &config::Config) -> Result<()> {
log::debug!("editing a record");
let session: Session = config.try_into()?;
let editor = match session
.config
.commands
.edit
.editor
.as_ref()
.cloned()
.or_else(|| env::var("EDITOR").ok())
{
Some(editor) => editor,
None => return Err(anyhow!("no editor configured to edit with")),
};
let (editor, editor_args) = util::parse_and_split_args(&editor)?;
log::debug!("editor: {editor}, args: {editor_args:?}");
#[allow(clippy::unwrap_used)]
let label = matches.get_one::<String>("label").unwrap();
let record = session.get_record(label)?;
let mut file = tempfile::NamedTempFile::new()?;
file.write_all(&serde_json::to_vec_pretty(&record)?)?;
if !process::Command::new(&editor)
.args(&editor_args)
.arg(file.path())
.status()
.is_ok_and(|o| o.success())
{
return Err(anyhow!("failed to run the editor"));
}
// Rewind, pull the changed contents, deserialize back into a record.
file.rewind()?;
let mut record_contents = vec![];
file.read_to_end(&mut record_contents)?;
let mut record = serde_json::from_slice::<record::Record>(&record_contents)?;
// Users can't modify these fields, at least not with `kbs2 edit`.
record.label = label.into();
record.timestamp = util::current_timestamp();
session.add_record(&record)?;
if let Some(post_hook) = &session.config.commands.edit.post_hook {
log::debug!("post-hook: {post_hook}");
session.config.call_hook(post_hook, &[])?;
}
Ok(())
}
/// Implements the `kbs2 generate` command.
pub fn generate(matches: &ArgMatches, config: &config::Config) -> Result<()> {
let generator = {
#[allow(clippy::unwrap_used)]
let generator_name = matches.get_one::<String>("generator").unwrap();
match config.generator(generator_name) {
Some(generator) => generator,
None => {
return Err(anyhow!(
"couldn't find a generator named {}",
generator_name
))
}
}
};
println!("{}", generator.secret()?);
Ok(())
}
/// Implements the `kbs2 rewrap` command.
pub fn rewrap(matches: &ArgMatches, config: &config::Config) -> Result<()> {
log::debug!("attempting key rewrap");
if !config.wrapped {
return Err(anyhow!("config specifies a bare key; nothing to rewrap"));
}
#[allow(clippy::unwrap_used)]
if !*matches.get_one::<bool>("no-backup").unwrap() {
let keyfile_backup: PathBuf = format!("{}.old", &config.keyfile).into();
#[allow(clippy::unwrap_used)]
if keyfile_backup.exists() && !*matches.get_one::<bool>("force").unwrap() {
return Err(anyhow!(
"refusing to overwrite a previous key backup without --force"
));
}
std::fs::copy(&config.keyfile, &keyfile_backup)?;
println!("Backup of the OLD wrapped keyfile saved to: {keyfile_backup:?}");
}
let old = util::get_password(Some("OLD master password: "), &config.pinentry)?;
let new = util::get_password(Some("NEW master password: "), &config.pinentry)?;
backend::RageLib::rewrap_keyfile(&config.keyfile, old, new)
}
/// Implements the `kbs2 rekey` command.
pub fn rekey(matches: &ArgMatches, config: &config::Config) -> Result<()> {
log::debug!("attempting to rekey the store");
// This is an artificial limitation; bare keys should never be used outside of testing,
// so support for them is unnecessary here.
if !config.wrapped {
return Err(anyhow!("rekeying is only supported on wrapped keys"));
}
let session: Session = config.try_into()?;
println!(
"This subcommand REKEYS your entire store ({}) and REWRITES your config",
session.config.store
);
if !Confirm::new("Are you SURE you want to continue?")
.with_default(false)
.with_help_message("Be certain! If you are not certain, press [enter] to do nothing.")
.prompt()?
{
return Ok(());
}
#[allow(clippy::unwrap_used)]
if !*matches.get_one::<bool>("no-backup").unwrap() {
// First, back up the keyfile.
let keyfile_backup: PathBuf = format!("{}.old", &config.keyfile).into();
if keyfile_backup.exists() {
return Err(anyhow!(
"refusing to overwrite a previous key backup during rekeying; resolve manually"
));
}
std::fs::copy(&config.keyfile, &keyfile_backup)?;
println!("Backup of the OLD wrapped keyfile saved to: {keyfile_backup:?}");
// Next, the config itself.
let config_backup: PathBuf =
Path::new(&config.config_dir).join(format!("{}.old", config::CONFIG_BASENAME));
if config_backup.exists() {
return Err(anyhow!(
"refusing to overwrite a previous config backup during rekeying; resolve manually"
));
}
std::fs::copy(
Path::new(&config.config_dir).join(config::CONFIG_BASENAME),
&config_backup,
)?;
println!("Backup of the OLD config saved to: {config_backup:?}");
// Finally, every record in the store.
let store_backup: PathBuf = format!("{}.old", &config.store).into();
if store_backup.exists() {
return Err(anyhow!(
"refusing to overwrite a previous store backup during rekeying; resolve manually"
));
}
std::fs::create_dir_all(&store_backup)?;
for label in session.record_labels()? {
std::fs::copy(
Path::new(&config.store).join(&label),
store_backup.join(&label),
)?;
}
println!("Backup of the OLD store saved to: {:?}", &store_backup);
}
// Decrypt and collect all records.
let records: Vec<SecretBox<record::Record>> = {
let records: Result<Vec<record::Record>> = session
.record_labels()?
.iter()
.map(|l| session.get_record(l))
.collect();
records?
.into_iter()
.map(|r| SecretBox::new(Box::new(r)))
.collect()
};
// Get a new master password.
let new_password = util::get_password(Some("NEW master password: "), &config.pinentry)?;
// Use it to generate a new wrapped keypair, overwriting the previous keypair.
let public_key =
backend::RageLib::create_wrapped_keypair(&config.keyfile, new_password.clone())?;
// Dupe the current config, update only the public key field, and write it back.
let config = config::Config {
public_key,
..config.clone()
};
std::fs::write(
Path::new(&config.config_dir).join(config::CONFIG_BASENAME),
toml::to_string(&config)?,
)?;
// Flush the stale key from the active agent, and add the new key to the agent.
// NOTE(ww): This scope is essential: we need to drop this client before we
// create the new session below. Why? Because the session contains its
// own agent client, and the current agent implementation only allows a
// single client at a time. Clients yield their access by closing their
// underlying socket, so we need to drop here to prevent a deadlock.
{
let client = agent::Client::new()?;
client.flush_keys()?;
client.add_key(&config.public_key, &config.keyfile, new_password)?;
}
// Create a new session from the new config and use it to re-encrypt each record.
println!("Re-encrypting all records, be patient...");
let session: Session = (&config).try_into()?;
for record in records {
log::debug!("re-encrypting {}", record.expose_secret().label);
session.add_record(record.expose_secret())?;
}
println!("All done.");
Ok(())
}
/// Implements the `kbs2 config` command.
pub fn config(matches: &ArgMatches, config: &config::Config) -> Result<()> {
log::debug!("config subcommand dispatch");
match matches.subcommand() {
Some(("dump", matches)) =>
{
#[allow(clippy::unwrap_used)]
if *matches.get_one::<bool>("pretty").unwrap() {
serde_json::to_writer_pretty(io::stdout(), &config)?;
} else {
serde_json::to_writer(io::stdout(), &config)?;
}
}
Some((_, _)) => unreachable!(),
None => unreachable!(),
}
Ok(())
}
| woodruffw/kbs2 | 126 | A secret manager backed by age | Rust | woodruffw | William Woodruff | astral-sh |
src/kbs2/config.rs | Rust | use std::collections::HashMap;
use std::env;
use std::ffi::OsStr;
use std::fs;
use std::io::{stdin, IsTerminal};
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use age::secrecy::SecretString;
use anyhow::{anyhow, Result};
use clap::ArgMatches;
use lazy_static::lazy_static;
use serde::{de, Deserialize, Serialize};
use xdg::BaseDirectories;
use crate::kbs2::backend::{Backend, RageLib};
use crate::kbs2::generator::Generator;
use crate::kbs2::util;
/// The default basename for the main config file, relative to the configuration
/// directory.
pub static CONFIG_BASENAME: &str = "config.toml";
/// The default generate age key is placed in this file, relative to
/// the configuration directory.
pub static DEFAULT_KEY_BASENAME: &str = "key";
lazy_static! {
// We're completely hosed if we can't find a reasonable set of base directories,
// so there isn't much point in trying to avoid this `expect`.
static ref XDG_DIRS: BaseDirectories = {
#[allow(clippy::expect_used)]
BaseDirectories::with_prefix(env!("CARGO_PKG_NAME"))
.expect("Fatal: XDG: couldn't determine reasonable base directories")
};
pub static ref DEFAULT_CONFIG_DIR: PathBuf = XDG_DIRS.get_config_home();
pub static ref DEFAULT_STORE_DIR: PathBuf = XDG_DIRS.get_data_home();
}
/// The main kbs2 configuration structure.
/// The fields of this structure correspond directly to the fields
/// loaded from the configuration file.
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Config {
/// The path to the directory that this configuration was loaded from.
///
/// **NOTE**: This field is never loaded from the configuration file itself.
#[serde(skip)]
pub config_dir: String,
/// The public component of the keypair.
#[serde(rename = "public-key")]
pub public_key: String,
/// The path to a file containing the private component of the keypair,
/// which may be wrapped with a passphrase.
#[serde(deserialize_with = "deserialize_with_tilde")]
pub keyfile: String,
/// Whether or not to auto-start the kbs2 authentication agent when
/// creating a session.
#[serde(rename = "agent-autostart")]
#[serde(default = "default_as_true")]
pub agent_autostart: bool,
/// Whether or not the private component of the keypair is wrapped with
/// a passphrase.
#[serde(default = "default_as_true")]
pub wrapped: bool,
/// The path to the directory where encrypted records are stored.
#[serde(deserialize_with = "deserialize_with_tilde")]
pub store: String,
/// The pinentry binary to use for password prompts.
#[serde(default)]
pub pinentry: Pinentry,
/// An optional command to run before each `kbs2` subcommand.
#[serde(deserialize_with = "deserialize_optional_with_tilde")]
#[serde(rename = "pre-hook")]
#[serde(default)]
pub pre_hook: Option<String>,
/// An optional command to run after each `kbs2` subcommand, on success.
#[serde(deserialize_with = "deserialize_optional_with_tilde")]
#[serde(rename = "post-hook")]
#[serde(default)]
pub post_hook: Option<String>,
/// An optional command to run after each `kbs2` subcommand, on error.
#[serde(deserialize_with = "deserialize_optional_with_tilde")]
#[serde(rename = "error-hook")]
#[serde(default)]
pub error_hook: Option<String>,
/// Whether or not any hooks are called when a hook itself invokes `kbs2`.
#[serde(default)]
#[serde(rename = "reentrant-hooks")]
pub reentrant_hooks: bool,
/// Any secret generators configured by the user.
#[serde(default)]
pub generators: Vec<GeneratorConfig>,
/// Per-command configuration.
#[serde(default)]
pub commands: CommandConfigs,
}
impl Config {
/// Calls a command as a hook, meaning:
/// * The command is run with the `kbs2` store as its working directory
/// * The command is run with `KBS2_HOOK=1` in its environment
///
/// Hooks have the following behavior:
/// 1. If `reentrant-hooks` is `true` *or* `KBS2_HOOK` is *not* present in the environment,
/// the hook is run.
/// 2. If `reentrant-hooks` is `false` (the default) *and* `KBS2_HOOK` is already present
/// (indicating that we're already in a hook), nothing is run.
pub fn call_hook(&self, cmd: &str, args: &[&str]) -> Result<()> {
if self.reentrant_hooks || env::var("KBS2_HOOK").is_err() {
let success = Command::new(cmd)
.args(args)
.current_dir(Path::new(&self.store))
.env("KBS2_HOOK", "1")
.env("KBS2_CONFIG_DIR", &self.config_dir)
.stdin(Stdio::null())
.stdout(Stdio::null())
.status()
.map(|s| s.success())
.map_err(|_| anyhow!("failed to run hook: {}", cmd))?;
if success {
Ok(())
} else {
Err(anyhow!("hook exited with an error code: {}", cmd))
}
} else {
util::warn("nested hook requested without reentrant-hooks; skipping");
Ok(())
}
}
/// Given the `name` of a configured generator, return that generator
/// if it exists.
pub fn generator(&self, name: &str) -> Option<&GeneratorConfig> {
self.generators
.iter()
.find(|&generator_config| generator_config.name() == name)
}
/// Create a `RuntimeConfig` from this config and the given `matches`.
pub fn with_matches<'a>(&'a self, matches: &'a ArgMatches) -> RuntimeConfig<'a> {
RuntimeConfig {
config: self,
matches,
}
}
}
/// A newtype wrapper around a `String`, used to provide a sensible default for `Config.pinentry`.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Pinentry(String);
impl Default for Pinentry {
fn default() -> Self {
Self("pinentry".into())
}
}
impl AsRef<OsStr> for Pinentry {
fn as_ref(&self) -> &OsStr {
self.0.as_ref()
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct GeneratorConfig {
/// The name of the generator.
pub name: String,
/// The alphabets used by the generator.
pub alphabets: Vec<String>,
/// The length of the secrets generated.
pub length: usize,
}
impl Default for GeneratorConfig {
fn default() -> Self {
GeneratorConfig {
name: "default".into(),
alphabets: vec![
"abcdefghijklmnopqrstuvwxyz".into(),
"ABCDEFGHIJKLMNOPQRSTUVWXYZ".into(),
"0123456789".into(),
"(){}[]-_+=".into(),
],
length: 16,
}
}
}
/// The per-command configuration settings known to `kbs2`.
#[derive(Clone, Default, Debug, Deserialize, Serialize)]
#[serde(default)]
pub struct CommandConfigs {
/// Settings for `kbs2 new`.
pub new: NewConfig,
/// Settings for `kbs2 pass`.
pub pass: PassConfig,
/// Settings for `kbs2 edit`.
pub edit: EditConfig,
/// Settings for `kbs2 rm`.
pub rm: RmConfig,
/// Settings for `kbs2 rename`.
pub rename: RenameConfig,
/// External command settings.
pub ext: HashMap<String, HashMap<String, toml::Value>>,
}
/// Configuration settings for `kbs2 new`.
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[serde(default)]
pub struct NewConfig {
#[serde(rename = "default-username")]
pub default_username: Option<String>,
// TODO(ww): This deserialize_with is ugly. There's probably a better way to do this.
#[serde(deserialize_with = "deserialize_optional_with_tilde")]
#[serde(rename = "pre-hook")]
pub pre_hook: Option<String>,
#[serde(deserialize_with = "deserialize_optional_with_tilde")]
#[serde(rename = "post-hook")]
pub post_hook: Option<String>,
}
/// Configuration settings for `kbs2 pass`.
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(default)]
pub struct PassConfig {
#[serde(rename = "clipboard-duration")]
pub clipboard_duration: u64,
#[serde(rename = "clear-after")]
pub clear_after: bool,
#[serde(deserialize_with = "deserialize_optional_with_tilde")]
#[serde(rename = "pre-hook")]
pub pre_hook: Option<String>,
#[serde(deserialize_with = "deserialize_optional_with_tilde")]
#[serde(rename = "post-hook")]
pub post_hook: Option<String>,
#[serde(deserialize_with = "deserialize_optional_with_tilde")]
#[serde(rename = "clear-hook")]
pub clear_hook: Option<String>,
}
#[derive(Copy, Clone, Debug, Deserialize, PartialEq, Eq, Serialize)]
pub enum X11Clipboard {
Clipboard,
Primary,
}
impl Default for PassConfig {
fn default() -> Self {
PassConfig {
clipboard_duration: 10,
clear_after: true,
pre_hook: None,
post_hook: None,
clear_hook: None,
}
}
}
/// Configuration settings for `kbs2 edit`.
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[serde(default)]
pub struct EditConfig {
pub editor: Option<String>,
#[serde(deserialize_with = "deserialize_optional_with_tilde")]
#[serde(rename = "post-hook")]
pub post_hook: Option<String>,
}
/// Configuration settings for `kbs2 rm`.
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[serde(default)]
pub struct RmConfig {
#[serde(deserialize_with = "deserialize_optional_with_tilde")]
#[serde(rename = "post-hook")]
pub post_hook: Option<String>,
}
/// Configuration settings for `kbs2 rename`.
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[serde(default)]
pub struct RenameConfig {
#[serde(deserialize_with = "deserialize_optional_with_tilde")]
#[serde(rename = "post-hook")]
pub post_hook: Option<String>,
}
/// A "view" for an active configuration, composed with some set of argument matches
/// from the command line.
pub struct RuntimeConfig<'a> {
pub config: &'a Config,
pub matches: &'a ArgMatches,
}
impl RuntimeConfig<'_> {
pub fn generator(&self) -> Result<&GeneratorConfig> {
// If the user explicitly requests a specific generator, use it.
// Otherwise, use the default generator, which is always present.
if let Some(generator) = self.matches.get_one::<String>("generator") {
self.config
.generator(generator)
.ok_or_else(|| anyhow!("no generator named {generator}"))
} else {
// Failure here indicates a bug, since we should always have a default.
self.config
.generator("default")
.ok_or_else(|| anyhow!("missing default generator?"))
}
}
pub fn terse(&self) -> bool {
!stdin().is_terminal() || *self.matches.get_one::<bool>("terse").unwrap_or(&false)
}
}
#[doc(hidden)]
#[inline]
fn deserialize_with_tilde<'de, D>(deserializer: D) -> std::result::Result<String, D::Error>
where
D: de::Deserializer<'de>,
{
let unexpanded: String = Deserialize::deserialize(deserializer)?;
Ok(shellexpand::tilde(&unexpanded).into_owned())
}
#[doc(hidden)]
#[inline]
fn deserialize_optional_with_tilde<'de, D>(
deserializer: D,
) -> std::result::Result<Option<String>, D::Error>
where
D: de::Deserializer<'de>,
{
let unexpanded: Option<String> = Deserialize::deserialize(deserializer)?;
match unexpanded {
Some(unexpanded) => Ok(Some(shellexpand::tilde(&unexpanded).into_owned())),
None => Ok(None),
}
}
#[doc(hidden)]
#[inline]
fn default_as_true() -> bool {
// https://github.com/serde-rs/serde/issues/1030
true
}
/// Given a path to a `kbs2` configuration directory, initializes a configuration
/// file and keypair within it.
///
/// # Arguments
///
/// * `config_dir` - The configuration directory to initialize within
/// * `store_dir` - The record store directory to use
/// * `password` - An optional master password for wrapping the secret
pub fn initialize<P: AsRef<Path>>(
config_dir: P,
store_dir: P,
password: Option<SecretString>,
) -> Result<()> {
fs::create_dir_all(&config_dir)?;
let keyfile = config_dir.as_ref().join(DEFAULT_KEY_BASENAME);
let mut wrapped = false;
let public_key = if let Some(password) = password {
wrapped = true;
RageLib::create_wrapped_keypair(&keyfile, password)?
} else {
RageLib::create_keypair(&keyfile)?
};
log::debug!("public key: {public_key}");
let serialized = {
let config_dir = config_dir
.as_ref()
.to_str()
.ok_or_else(|| anyhow!("unencodable config dir"))?
.into();
let store = store_dir
.as_ref()
.to_str()
.ok_or_else(|| anyhow!("unencodable store dir"))?
.into();
#[allow(clippy::redundant_field_names)]
toml::to_string(&Config {
// NOTE(ww): Not actually serialized; just here to make the compiler happy.
config_dir: config_dir,
public_key: public_key,
keyfile: keyfile
.to_str()
.ok_or_else(|| anyhow!("unrepresentable keyfile path: {:?}", keyfile))?
.into(),
agent_autostart: true,
wrapped: wrapped,
store: store,
pinentry: Default::default(),
pre_hook: None,
post_hook: None,
error_hook: None,
reentrant_hooks: false,
generators: vec![Default::default()],
commands: Default::default(),
})?
};
fs::write(config_dir.as_ref().join(CONFIG_BASENAME), serialized)?;
Ok(())
}
/// Given a path to a `kbs2` configuration directory, loads the configuration
/// file within and returns the resulting `Config`.
pub fn load<P: AsRef<Path>>(config_dir: P) -> Result<Config> {
let config_dir = config_dir.as_ref();
let config_path = config_dir.join(CONFIG_BASENAME);
let contents = fs::read_to_string(config_path)?;
let mut config = Config {
config_dir: config_dir
.to_str()
.ok_or_else(|| anyhow!("unrepresentable config dir path: {:?}", config_dir))?
.into(),
..toml::from_str(&contents).map_err(|e| anyhow!("config loading error: {}", e))?
};
// Always put a default generator in the generator list.
if config.generators.is_empty() {
config.generators.push(Default::default());
}
Ok(config)
}
#[cfg(test)]
mod tests {
use tempfile::tempdir;
use super::*;
fn dummy_config_unwrapped_key() -> Config {
Config {
config_dir: "/not/a/real/dir".into(),
public_key: "not a real public key".into(),
keyfile: "not a real private key file".into(),
agent_autostart: false,
wrapped: false,
store: "/tmp".into(),
pinentry: Default::default(),
pre_hook: Some("true".into()),
post_hook: Some("false".into()),
error_hook: Some("true".into()),
reentrant_hooks: false,
generators: vec![Default::default()],
commands: CommandConfigs {
rm: RmConfig {
post_hook: Some("this-command-does-not-exist".into()),
},
..Default::default()
},
}
}
#[test]
fn test_find_default_config_dir() {
// NOTE: We can't check whether the main config dir exists since we create it if it
// doesn't; instead, we just check that it isn't something weird like a regular file.
assert!(!DEFAULT_CONFIG_DIR.is_file());
// The default config dir's parents aren't guaranteed to exist; we create them
// if they don't.
}
#[test]
fn test_find_default_store_dir() {
// NOTE: Like above: just make sure it isn't something weird like a regular file.
assert!(!DEFAULT_STORE_DIR.is_file());
// The default store dir's parents aren't guaranteed to exist; we create them
// if they don't.
}
#[test]
fn test_initialize_unwrapped() {
{
let config_dir = tempdir().unwrap();
let store_dir = tempdir().unwrap();
assert!(initialize(&config_dir, &store_dir, None).is_ok());
let config_dir = config_dir.path();
assert!(config_dir.exists());
assert!(config_dir.is_dir());
assert!(config_dir.join(CONFIG_BASENAME).exists());
assert!(config_dir.join(CONFIG_BASENAME).is_file());
assert!(config_dir.join(DEFAULT_KEY_BASENAME).exists());
assert!(config_dir.join(DEFAULT_KEY_BASENAME).is_file());
let config = load(config_dir).unwrap();
assert!(!config.wrapped);
}
}
#[test]
fn test_initialize_wrapped() {
{
let config_dir = tempdir().unwrap();
let store_dir = tempdir().unwrap();
assert!(initialize(
&config_dir,
&store_dir,
Some(SecretString::new("badpassword".into()))
)
.is_ok());
let config_dir = config_dir.path();
assert!(config_dir.exists());
assert!(config_dir.is_dir());
assert!(config_dir.join(CONFIG_BASENAME).exists());
assert!(config_dir.join(CONFIG_BASENAME).is_file());
assert!(config_dir.join(DEFAULT_KEY_BASENAME).exists());
assert!(config_dir.join(DEFAULT_KEY_BASENAME).is_file());
let config = load(config_dir).unwrap();
assert!(config.wrapped);
}
}
#[test]
fn test_load() {
{
let config_dir = tempdir().unwrap();
let store_dir = tempdir().unwrap();
initialize(&config_dir, &store_dir, None).unwrap();
assert!(load(&config_dir).is_ok());
}
{
let config_dir = tempdir().unwrap();
let store_dir = tempdir().unwrap();
initialize(&config_dir, &store_dir, None).unwrap();
let config = load(&config_dir).unwrap();
assert_eq!(config_dir.path().to_str().unwrap(), config.config_dir);
assert_eq!(store_dir.path().to_str().unwrap(), config.store);
}
}
#[test]
fn test_call_hook() {
let config = dummy_config_unwrapped_key();
{
assert!(config
.call_hook(config.pre_hook.as_ref().unwrap(), &[])
.is_ok());
}
{
let err = config
.call_hook(config.commands.rm.post_hook.as_ref().unwrap(), &[])
.unwrap_err();
assert_eq!(
err.to_string(),
"failed to run hook: this-command-does-not-exist"
);
}
{
let err = config
.call_hook(config.post_hook.as_ref().unwrap(), &[])
.unwrap_err();
assert_eq!(err.to_string(), "hook exited with an error code: false");
}
{
assert!(config
.call_hook(config.error_hook.as_ref().unwrap(), &[])
.is_ok());
}
}
#[test]
fn test_get_generator() {
let config = dummy_config_unwrapped_key();
assert!(config.generator("default").is_some());
assert!(config.generator("nonexistent-generator").is_none());
}
}
| woodruffw/kbs2 | 126 | A secret manager backed by age | Rust | woodruffw | William Woodruff | astral-sh |
src/kbs2/generator.rs | Rust | use anyhow::{anyhow, Result};
use rand::seq::{IteratorRandom, SliceRandom};
use crate::kbs2::config;
/// Represents the operations that all generators are capable of.
pub trait Generator {
/// Returns the name of the generator, e.g. `"default"`.
fn name(&self) -> &str;
/// Returns a secret produced by the generator.
fn secret(&self) -> Result<String>;
}
impl Generator for config::GeneratorConfig {
fn name(&self) -> &str {
&self.name
}
fn secret(&self) -> Result<String> {
// Invariants: we need at least one alphabet, and our length has to be nonzero.
if self.alphabets.is_empty() {
return Err(anyhow!("generator must have at least one alphabet"));
}
if self.length == 0 {
return Err(anyhow!("generator length is invalid (must be nonzero)"));
}
// Our secret generation strategy:
// 1. Sample each alphabet once
// 2. Pad the secret out to the remaining length, sampling from all alphabets
// 3. Shuffle the result
let mut rng = rand::thread_rng();
let mut secret = Vec::with_capacity(self.length);
for alphabet in self.alphabets.iter() {
if alphabet.is_empty() {
return Err(anyhow!("generator alphabet(s) must not be empty"));
}
// NOTE(ww): Disallow non-ASCII, to prevent gibberish indexing below.
if !alphabet.is_ascii() {
return Err(anyhow!(
"generator alphabet(s) contain non-ascii characters"
));
}
// Safe unwrap: alphabet.chars() is always nonempty.
#[allow(clippy::unwrap_used)]
secret.push(alphabet.chars().choose(&mut rng).unwrap());
}
// If step 1 generated a longer password than "length" allows, fail.
if secret.len() >= self.length {
return Err(anyhow!(
"generator invariant failure (too many separate alphabets for length?)"
));
}
// Pad out with the combined alphabet.
let combined_alphabet = self.alphabets.iter().flat_map(|a| a.chars());
let remainder = combined_alphabet.choose_multiple(&mut rng, self.length - secret.len());
secret.extend(remainder);
// Shuffle and return.
secret.shuffle(&mut rng);
Ok(secret.into_iter().collect())
}
}
#[cfg(test)]
mod tests {
use super::*;
fn dummy_internal_generator(alphabets: &[&str]) -> Box<dyn Generator> {
Box::new(config::GeneratorConfig {
name: "dummy-internal".into(),
alphabets: alphabets.iter().map(|a| (*a).into()).collect(),
length: 5,
})
}
#[test]
fn test_internal_generator_invariants() {
// Fails with no alphabets.
{
let gen = config::GeneratorConfig {
name: "dummy-internal".into(),
alphabets: vec![],
length: 10,
};
assert_eq!(
gen.secret().unwrap_err().to_string(),
"generator must have at least one alphabet"
);
}
// Fails with a length of 0.
{
let gen = config::GeneratorConfig {
name: "dummy-internal".into(),
alphabets: vec!["abcd".into()],
length: 0,
};
assert_eq!(
gen.secret().unwrap_err().to_string(),
"generator length is invalid (must be nonzero)"
);
}
// Fails if an alphabet is non-ASCII.
{
let gen = dummy_internal_generator(&["ⓓⓔⓕⓘⓝⓘⓣⓔⓛⓨ ⓝⓞⓣ ⓐⓢⓒⓘⓘ"]);
let err = gen.secret().unwrap_err();
assert_eq!(
err.to_string(),
"generator alphabet(s) contain non-ascii characters"
);
}
// Fails if any individual alphabet is empty.
{
let gen = dummy_internal_generator(&[""]);
let err = gen.secret().unwrap_err();
assert_eq!(err.to_string(), "generator alphabet(s) must not be empty");
}
// Fails if there are more alphabets than available length.
{
let gen = config::GeneratorConfig {
name: "dummy-internal".into(),
alphabets: vec!["abc", "def", "ghi"]
.into_iter()
.map(Into::into)
.collect(),
length: 2,
};
assert_eq!(
gen.secret().unwrap_err().to_string(),
"generator invariant failure (too many separate alphabets for length?)"
);
}
// Succeeds and upholds length and inclusion invariants.
{
let alphabets = ["abcd", "1234", "!@#$"];
let gen = config::GeneratorConfig {
name: "dummy-internal".into(),
alphabets: alphabets.into_iter().map(Into::into).collect(),
length: 10,
};
for secret in (0..100).map(|_| gen.secret()) {
assert!(secret.is_ok());
let secret = secret.unwrap();
assert_eq!(secret.len(), 10);
assert!(alphabets
.iter()
.all(|a| a.chars().any(|c| secret.contains(c))));
}
}
}
}
| woodruffw/kbs2 | 126 | A secret manager backed by age | Rust | woodruffw | William Woodruff | astral-sh |
src/kbs2/input.rs | Rust | use std::io::{self, Read};
use anyhow::{anyhow, Result};
use inquire::{Password as Pass, Text};
use super::record::{EnvironmentFields, LoginFields, RecordBody, UnstructuredFields};
use crate::kbs2::config::RuntimeConfig;
use crate::kbs2::generator::Generator;
/// The input separator used when input is gathered in "terse" mode.
pub static TERSE_IFS: &str = "\x01";
pub trait Input {
const FIELD_COUNT: usize;
fn from_prompt(config: &RuntimeConfig) -> Result<RecordBody>;
fn from_terse(config: &RuntimeConfig) -> Result<RecordBody>;
fn take_terse_fields() -> Result<Vec<String>> {
let mut input = String::new();
io::stdin().read_to_string(&mut input)?;
if input.ends_with('\n') {
input.pop();
}
let fields = input
.splitn(Self::FIELD_COUNT, TERSE_IFS)
.map(Into::into)
.collect::<Vec<String>>();
if fields.len() != Self::FIELD_COUNT {
return Err(anyhow!(
"field count mismatch: expected {}, got {}",
Self::FIELD_COUNT,
fields.len()
));
}
Ok(fields)
}
fn input(config: &RuntimeConfig) -> Result<RecordBody> {
if config.terse() {
Self::from_terse(config)
} else {
Self::from_prompt(config)
}
}
}
impl Input for LoginFields {
const FIELD_COUNT: usize = 2;
fn from_prompt(config: &RuntimeConfig) -> Result<RecordBody> {
let username = if let Some(default_username) = &config.config.commands.new.default_username
{
Text::new("Username?")
.with_default(default_username)
.prompt()?
} else {
Text::new("Username?").prompt()?
};
let mut password = Pass::new("Password?")
.with_help_message("Press [enter] to auto-generate")
.without_confirmation()
.prompt()?;
if password.is_empty() {
password = config.generator()?.secret()?;
}
Ok(RecordBody::Login(LoginFields { username, password }))
}
fn from_terse(config: &RuntimeConfig) -> Result<RecordBody> {
// NOTE: Backwards order here because we're popping from the vector.
let (mut password, username) = {
let mut fields = Self::take_terse_fields()?;
// Unwrap safety: take_terse_fields checks FIELD_COUNT to ensure sufficient elements.
#[allow(clippy::unwrap_used)]
(fields.pop().unwrap(), fields.pop().unwrap())
};
if password.is_empty() {
password = config.generator()?.secret()?;
}
Ok(RecordBody::Login(LoginFields { username, password }))
}
}
impl Input for EnvironmentFields {
const FIELD_COUNT: usize = 2;
fn from_prompt(config: &RuntimeConfig) -> Result<RecordBody> {
let variable = Text::new("Variable?").prompt()?;
let mut value = Pass::new("Value?")
.with_help_message("Press [enter] to auto-generate")
.prompt()?;
if value.is_empty() {
value = config.generator()?.secret()?;
}
Ok(RecordBody::Environment(EnvironmentFields {
variable,
value,
}))
}
fn from_terse(config: &RuntimeConfig) -> Result<RecordBody> {
// NOTE: Backwards order here because we're popping from the vector.
let (mut value, variable) = {
let mut fields = Self::take_terse_fields()?;
// Unwrap safety: take_terse_fields checks FIELD_COUNT to ensure sufficient elements.
#[allow(clippy::unwrap_used)]
(fields.pop().unwrap(), fields.pop().unwrap())
};
if value.is_empty() {
value = config.generator()?.secret()?;
}
Ok(RecordBody::Environment(EnvironmentFields {
variable,
value,
}))
}
}
impl Input for UnstructuredFields {
const FIELD_COUNT: usize = 1;
fn from_prompt(_config: &RuntimeConfig) -> Result<RecordBody> {
let contents = Text::new("Contents?").prompt()?;
Ok(RecordBody::Unstructured(UnstructuredFields { contents }))
}
fn from_terse(_config: &RuntimeConfig) -> Result<RecordBody> {
// Unwrap safety: take_terse_fields checks FIELD_COUNT to ensure sufficient elements.
#[allow(clippy::unwrap_used)]
let contents = Self::take_terse_fields()?.pop().unwrap();
Ok(RecordBody::Unstructured(UnstructuredFields { contents }))
}
}
// /// Given an array of field names and a potential generator, grabs the values for
// /// those fields in a terse manner (each separated by `TERSE_IFS`).
// ///
// /// Fields that are marked as sensitive are subsequently overwritten by the
// /// generator, if one is provided.
// fn terse_fields(names: &[FieldKind], generator: Option<&dyn Generator>) -> Result<Vec<String>> {
// let mut input = String::new();
// io::stdin().read_to_string(&mut input)?;
// if input.ends_with('\n') {
// input.pop();
// }
// // NOTE(ww): Handling generated inputs in terse mode is a bit of a mess.
// // First, we collect all inputs, expecting blank slots where we'll fill
// // in the generated values.
// let mut fields = input
// .split(TERSE_IFS)
// .map(|s| s.to_string())
// .collect::<Vec<String>>();
// if fields.len() != names.len() {
// return Err(anyhow!(
// "field count mismatch: expected {}, found {}",
// names.len(),
// fields.len()
// ));
// }
// // Then, if we have a generator configured, we iterate over the
// // fields and insert them as appropriate.
// if let Some(generator) = generator {
// for (i, name) in names.iter().enumerate() {
// if let Sensitive(_) = name {
// let field = fields.get_mut(i).unwrap();
// field.clear();
// field.push_str(&generator.secret()?);
// }
// }
// }
// Ok(fields)
// }
// /// Given an array of field names and a potential generator, grabs the values for those
// /// fields by prompting the user for each.
// ///
// /// If a field is marked as sensitive **and** a generator is provided, the generator
// /// is used to provide that field and the user is **not** prompted.
// fn interactive_fields(
// names: &[FieldKind],
// config: &Config,
// generator: Option<&dyn Generator>,
// ) -> Result<Vec<String>> {
// let mut fields = vec![];
// for name in names {
// let field = match name {
// Sensitive(name) => {
// if let Some(generator) = generator {
// generator.secret()?
// } else {
// let field = Password::new()
// .with_prompt(*name)
// .allow_empty_password(config.commands.new.generate_on_empty)
// .interact()?;
// if field.is_empty() && config.commands.new.generate_on_empty {
// log::debug!("generate-on-empty with an empty field, generating a secret");
// let generator = config.get_generator("default").ok_or_else(|| {
// anyhow!("generate-on-empty configured but no default generator")
// })?;
// generator.secret()?
// } else {
// field
// }
// }
// }
// Insensitive(name) => Input::<String>::new().with_prompt(*name).interact()?,
// };
// fields.push(field);
// }
// Ok(fields)
// }
// /// Grabs the values for a set of field names from user input.
// ///
// /// # Arguments
// ///
// /// * `names` - the set of field names to grab
// /// * `terse` - whether or not to get fields tersely, i.e. by splitting on
// /// `TERSE_IFS` instead of prompting for each
// /// * `config` - the active `Config`
// /// * `generator` - the generator, if any, to use for sensitive fields
// pub fn fields(
// names: &[FieldKind],
// terse: bool,
// config: &Config,
// generator: Option<&dyn Generator>,
// ) -> Result<Vec<String>> {
// if terse {
// terse_fields(names, generator)
// } else {
// interactive_fields(names, config, generator)
// }
// }
| woodruffw/kbs2 | 126 | A secret manager backed by age | Rust | woodruffw | William Woodruff | astral-sh |
src/kbs2/mod.rs | Rust | /// Structures and routines for the `kbs2` authentication agent.
pub mod agent;
/// Structures and routines for interacting with age backends.
pub mod backend;
/// Routines for the various `kbs2` subcommands.
pub mod command;
/// Structures and routines for `kbs2`'s configuration.
pub mod config;
/// Structures and routines for secret generators.
pub mod generator;
/// Routines for handling user input.
pub mod input;
/// Structures and routines for creating and managing individual `kbs2` records.
pub mod record;
/// Structures and routines for creating and managing an active `kbs2` session.
pub mod session;
/// Reusable utility code for `kbs2`.
pub mod util;
| woodruffw/kbs2 | 126 | A secret manager backed by age | Rust | woodruffw | William Woodruff | astral-sh |
src/kbs2/record.rs | Rust | use age::secrecy::zeroize::Zeroize;
use serde::{Deserialize, Serialize};
use crate::kbs2::util;
// TODO(ww): Figure out how to generate this from the RecordBody enum below.
/// The stringified names of record kinds known to `kbs2`.
pub static RECORD_KINDS: &[&str] = &["login", "environment", "unstructured"];
/// Represents the envelope of a `kbs2` record.
#[derive(Debug, Deserialize, PartialEq, Eq, Serialize)]
pub struct Record {
/// When the record was created, as seconds since the Unix epoch.
pub timestamp: u64,
/// The identifying label of the record.
pub label: String,
/// The type contents of the record.
pub body: RecordBody,
}
impl Zeroize for Record {
fn zeroize(&mut self) {
self.timestamp.zeroize();
self.label.zeroize();
self.body.zeroize();
}
}
/// Represents the core contents of a `kbs2` record.
#[derive(Debug, Deserialize, PartialEq, Eq, Serialize)]
#[serde(tag = "kind", content = "fields")]
pub enum RecordBody {
Login(LoginFields),
Environment(EnvironmentFields),
Unstructured(UnstructuredFields),
}
impl Zeroize for RecordBody {
fn zeroize(&mut self) {
match self {
RecordBody::Login(l) => l.zeroize(),
RecordBody::Environment(e) => e.zeroize(),
RecordBody::Unstructured(u) => u.zeroize(),
};
}
}
impl std::fmt::Display for RecordBody {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match *self {
RecordBody::Login(_) => write!(f, "login"),
RecordBody::Environment(_) => write!(f, "environment"),
RecordBody::Unstructured(_) => write!(f, "unstructured"),
}
}
}
/// Represents the fields of a login record.
#[derive(Debug, Deserialize, PartialEq, Eq, Serialize)]
pub struct LoginFields {
/// The username associated with the login.
pub username: String,
/// The password associated with the login.
pub password: String,
}
impl Zeroize for LoginFields {
fn zeroize(&mut self) {
self.username.zeroize();
self.password.zeroize();
}
}
/// Represents the fields of an environment record.
#[derive(Debug, Deserialize, PartialEq, Eq, Serialize)]
pub struct EnvironmentFields {
/// The variable associated with the environment.
pub variable: String,
/// The value associated with the environment.
pub value: String,
}
impl Zeroize for EnvironmentFields {
fn zeroize(&mut self) {
self.variable.zeroize();
self.value.zeroize();
}
}
/// Represents the fields of an unstructured record.
#[derive(Debug, Deserialize, PartialEq, Eq, Serialize)]
pub struct UnstructuredFields {
/// The contents associated with the record.
pub contents: String,
}
impl Zeroize for UnstructuredFields {
fn zeroize(&mut self) {
self.contents.zeroize();
}
}
impl Record {
pub fn new(label: &str, body: RecordBody) -> Record {
Record {
timestamp: util::current_timestamp(),
label: label.into(),
body,
}
}
}
| woodruffw/kbs2 | 126 | A secret manager backed by age | Rust | woodruffw | William Woodruff | astral-sh |
src/kbs2/session.rs | Rust | use std::convert::TryFrom;
use std::fs;
use std::io;
use std::path::Path;
use anyhow::{anyhow, Result};
use crate::kbs2::agent::Agent;
use crate::kbs2::backend::{Backend, RageLib};
use crate::kbs2::config;
use crate::kbs2::record;
/// Encapsulates the context needed by `kbs2` to interact with records.
pub struct Session<'a> {
/// The `RageLib` backend used to encrypt and decrypt records.
pub backend: RageLib,
/// The configuration that `kbs2` was invoked with.
pub config: &'a config::Config,
}
impl<'a> Session<'a> {
/// Creates a new session, given a `Config`.
fn new(config: &'a config::Config) -> Result<Session<'a>> {
// NOTE(ww): I don't like that we do this here, but I'm not sure where else to put it.
if config.wrapped && config.agent_autostart {
Agent::spawn()?;
}
fs::create_dir_all(&config.store)?;
#[allow(clippy::redundant_field_names)]
Ok(Session {
backend: RageLib::new(config)?,
config: config,
})
}
/// Returns the label of every record available in the store.
pub fn record_labels(&self) -> Result<Vec<String>> {
let store = Path::new(&self.config.store);
if !store.is_dir() {
return Err(anyhow!("secret store is not a directory"));
}
let mut labels = vec![];
for entry in fs::read_dir(store)? {
let path = entry?.path();
if !path.is_file() {
log::debug!("skipping non-file in store: {path:?}");
continue;
}
// NOTE(ww): This unwrap is safe, since file_name always returns Some
// for non-directories.
#[allow(clippy::expect_used)]
let label = path
.file_name()
.expect("impossible: is_file=true for path but file_name=None");
// NOTE(ww): This one isn't safe, but we don't care. Non-UTF-8 labels aren't supported.
labels.push(
label
.to_str()
.ok_or_else(|| anyhow!("unrepresentable record label: {:?}", label))?
.into(),
);
}
Ok(labels)
}
/// Returns whether or not the store contains a given record.
pub fn has_record(&self, label: &str) -> bool {
let record_path = Path::new(&self.config.store).join(label);
record_path.is_file()
}
/// Retrieves a record from the store by its label.
pub fn get_record(&self, label: &str) -> Result<record::Record> {
if !self.has_record(label) {
return Err(anyhow!("no such record: {}", label));
}
let record_path = Path::new(&self.config.store).join(label);
let record_contents = fs::read_to_string(record_path).map_err(|e| match e.kind() {
io::ErrorKind::NotFound => anyhow!("no such record: {}", label),
_ => e.into(),
})?;
match self.backend.decrypt(&record_contents) {
Ok(record) => Ok(record),
Err(e) => Err(e),
}
}
/// Adds the given record to the store.
pub fn add_record(&self, record: &record::Record) -> anyhow::Result<()> {
let record_path = Path::new(&self.config.store).join(&record.label);
let record_contents = self.backend.encrypt(record)?;
std::fs::write(record_path, record_contents)?;
Ok(())
}
/// Deletes a record from the store by label.
pub fn delete_record(&self, label: &str) -> Result<()> {
let record_path = Path::new(&self.config.store).join(label);
std::fs::remove_file(record_path).map_err(|e| match e.kind() {
io::ErrorKind::NotFound => anyhow!("no such record: {}", label),
_ => e.into(),
})
}
/// Renames a record.
pub fn rename_record(&self, old_label: &str, new_label: &str) -> Result<()> {
let mut record = self.get_record(old_label)?;
record.label = new_label.into();
self.add_record(&record)?;
self.delete_record(old_label)?;
Ok(())
}
}
impl<'a> TryFrom<&'a config::Config> for Session<'a> {
type Error = anyhow::Error;
fn try_from(config: &'a config::Config) -> Result<Self> {
Self::new(config)
}
}
#[cfg(test)]
mod tests {
use tempfile::{tempdir, TempDir};
use super::*;
use crate::kbs2::record::{LoginFields, Record, RecordBody};
fn dummy_login(label: &str, username: &str, password: &str) -> Record {
Record::new(
label,
RecordBody::Login(LoginFields {
username: username.into(),
password: password.into(),
}),
)
}
// NOTE: We pass store in here instead of creating it for lifetime reasons:
// the temp dir is unlinked when its TempDir object is destructed, so we need
// to keep it alive long enough for each unit test.
fn dummy_config(store: &TempDir) -> config::Config {
config::Config {
config_dir: "/not/a/real/dir".into(),
// NOTE: We create the backend above manually, so the public_key and keyfile
// here are dummy values that shouldn't need to be interacted with.
public_key: "not a real public key".into(),
keyfile: "not a real private key file".into(),
agent_autostart: false,
wrapped: false,
store: store.path().to_str().unwrap().into(),
pinentry: Default::default(),
pre_hook: None,
post_hook: None,
error_hook: None,
reentrant_hooks: false,
generators: vec![Default::default()],
commands: Default::default(),
}
}
fn dummy_session(config: &config::Config) -> Session {
let backend = {
let key = age::x25519::Identity::generate();
RageLib {
pubkey: key.to_public(),
identity: key,
}
};
Session { backend, config }
}
// TODO: Figure out how to test Session::new. Doing so will require an interface for
// creating + initializing a config that doesn't unconditionally put the store directory
// within the user's data directory.
#[test]
fn test_record_labels() {
{
let store = tempdir().unwrap();
let config = dummy_config(&store);
let session = dummy_session(&config);
assert_eq!(session.record_labels().unwrap(), Vec::<String>::new());
}
{
let store = tempdir().unwrap();
let config = dummy_config(&store);
let session = dummy_session(&config);
let record = dummy_login("foo", "bar", "baz");
session.add_record(&record).unwrap();
assert_eq!(session.record_labels().unwrap(), vec!["foo"]);
}
}
#[test]
fn test_has_record() {
{
let store = tempdir().unwrap();
let config = dummy_config(&store);
let session = dummy_session(&config);
let record = dummy_login("foo", "bar", "baz");
session.add_record(&record).unwrap();
assert!(session.has_record("foo"));
}
{
let store = tempdir().unwrap();
let config = dummy_config(&store);
let session = dummy_session(&config);
assert!(!session.has_record("does-not-exist"));
}
}
#[test]
fn test_get_record() {
{
let store = tempdir().unwrap();
let config = dummy_config(&store);
let session = dummy_session(&config);
let record = dummy_login("foo", "bar", "baz");
session.add_record(&record).unwrap();
let retrieved_record = session.get_record("foo").unwrap();
assert_eq!(record, retrieved_record);
}
{
let store = tempdir().unwrap();
let config = dummy_config(&store);
let session = dummy_session(&config);
let err = session.get_record("foo").unwrap_err();
assert_eq!(err.to_string(), "no such record: foo");
}
}
#[test]
fn test_add_record() {
{
let store = tempdir().unwrap();
let config = dummy_config(&store);
let session = dummy_session(&config);
let record1 = dummy_login("foo", "bar", "baz");
session.add_record(&record1).unwrap();
let record2 = dummy_login("a", "b", "c");
session.add_record(&record2).unwrap();
// NOTE: record_labels() returns labels in a platform dependent order,
// which is why we don't compared against a fixed-order vec here or below.
assert_eq!(session.record_labels().unwrap().len(), 2);
assert!(session.record_labels().unwrap().contains(&"foo".into()));
assert!(session.record_labels().unwrap().contains(&"a".into()));
// Overwrite foo; still only two records.
let record3 = dummy_login("foo", "quux", "zap");
session.add_record(&record3).unwrap();
assert_eq!(session.record_labels().unwrap().len(), 2);
assert!(session.record_labels().unwrap().contains(&"foo".into()));
assert!(session.record_labels().unwrap().contains(&"a".into()));
}
}
#[test]
fn test_delete_record() {
{
let store = tempdir().unwrap();
let config = dummy_config(&store);
let session = dummy_session(&config);
let record = dummy_login("foo", "bar", "baz");
session.add_record(&record).unwrap();
assert!(session.delete_record("foo").is_ok());
assert!(!session.has_record("foo"));
assert_eq!(session.record_labels().unwrap(), Vec::<String>::new());
}
{
let store = tempdir().unwrap();
let config = dummy_config(&store);
let session = dummy_session(&config);
let record1 = dummy_login("foo", "bar", "baz");
session.add_record(&record1).unwrap();
let record2 = dummy_login("a", "b", "c");
session.add_record(&record2).unwrap();
assert!(session.delete_record("foo").is_ok());
assert_eq!(session.record_labels().unwrap(), vec!["a"]);
}
{
let store = tempdir().unwrap();
let config = dummy_config(&store);
let session = dummy_session(&config);
let err = session.delete_record("does-not-exist").unwrap_err();
assert_eq!(err.to_string(), "no such record: does-not-exist");
}
}
#[test]
fn test_rename_record() {
{
let store = tempdir().unwrap();
let config = dummy_config(&store);
let session = dummy_session(&config);
let record = dummy_login("foo", "bar", "baz");
session.add_record(&record).unwrap();
assert!(session.has_record("foo"));
session.rename_record("foo", "bar").unwrap();
assert!(!session.has_record("foo"));
assert!(session.has_record("bar"));
}
}
}
| woodruffw/kbs2 | 126 | A secret manager backed by age | Rust | woodruffw | William Woodruff | astral-sh |
src/kbs2/util.rs | Rust | use std::ffi::OsStr;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use std::time::{SystemTime, UNIX_EPOCH};
use age::secrecy::SecretString;
use anyhow::{anyhow, Result};
use pinentry::PassphraseInput;
/// Given an input string formatted according to shell quoting rules,
/// split it into its command and argument parts and return each.
pub fn parse_and_split_args(argv: &str) -> Result<(String, Vec<String>)> {
let args = match shell_words::split(argv) {
Ok(args) => args,
Err(_) => return Err(anyhow!("failed to split command-line arguments: {}", argv)),
};
let (command, args) = args
.split_first()
.map(|t| (t.0.to_owned(), t.1.to_owned()))
.ok_or_else(|| anyhow!("missing one or more arguments in command"))?;
Ok((command, args))
}
/// Securely retrieve a password from the user.
///
/// NOTE: This function currently uses pinentry internally, which
/// will delegate to the appropriate pinentry binary on the user's
/// system.
pub fn get_password<S: AsRef<OsStr>>(
prompt: Option<&'static str>,
pinentry: S,
) -> Result<SecretString> {
let prompt = prompt.unwrap_or("Password: ");
if let Some(mut input) = PassphraseInput::with_binary(pinentry) {
input
.with_description("Enter your master kbs2 password")
.with_prompt(prompt)
.required("A non-empty password is required")
.interact()
.map_err(|e| anyhow!("pinentry failed: {}", e.to_string()))
} else {
log::debug!("no pinentry binary, falling back on rpassword");
rpassword::prompt_password(prompt)
.map(SecretString::from)
.map_err(|e| anyhow!("password prompt failed: {}", e.to_string()))
}
}
/// Return the current timestamp as seconds since the UNIX epoch.
pub fn current_timestamp() -> u64 {
// NOTE(ww): This unwrap should be safe, since every time should be
// greater than or equal to the epoch.
#[allow(clippy::expect_used)]
SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("impossible: system time is before the UNIX epoch")
.as_secs()
}
/// Print the given message on `stderr` with a warning prefix.
pub fn warn(msg: &str) {
eprintln!("Warn: {msg}");
}
/// Read the entire given file into a `Vec<u8>`, or fail if its on-disk size exceeds
/// some limit.
pub fn read_guarded<P: AsRef<Path>>(path: P, limit: u64) -> Result<Vec<u8>> {
let mut file = File::open(&path)?;
let meta = file.metadata()?;
if meta.len() > limit {
return Err(anyhow!("requested file is suspiciously large, refusing"));
}
let mut buf = Vec::with_capacity(meta.len() as usize);
file.read_to_end(&mut buf)?;
Ok(buf)
}
#[cfg(test)]
mod tests {
use std::io::Write;
use tempfile::NamedTempFile;
use super::*;
#[test]
fn test_parse_and_split_args() {
{
let (cmd, args) = parse_and_split_args("just-a-command").unwrap();
assert_eq!(cmd, "just-a-command");
assert_eq!(args, Vec::<String>::new());
}
{
let (cmd, args) =
parse_and_split_args("foo -a -ab --c -d=e --f=g bar baz quux").unwrap();
assert_eq!(cmd, "foo");
assert_eq!(
args,
vec!["-a", "-ab", "--c", "-d=e", "--f=g", "bar", "baz", "quux"]
);
}
{
let (cmd, args) = parse_and_split_args("foo 'one arg' \"another arg\" ''").unwrap();
assert_eq!(cmd, "foo");
assert_eq!(args, vec!["one arg", "another arg", ""]);
}
{
let err = parse_and_split_args("some 'bad {syntax").unwrap_err();
assert_eq!(
err.to_string(),
"failed to split command-line arguments: some 'bad {syntax"
);
}
{
let err = parse_and_split_args("").unwrap_err();
assert_eq!(err.to_string(), "missing one or more arguments in command");
}
}
// TODO: Figure out a good way to test util::get_password.
#[test]
fn test_current_timestamp() {
{
let ts = current_timestamp();
assert!(ts != 0);
}
{
let ts1 = current_timestamp();
let ts2 = current_timestamp();
assert!(ts2 >= ts1);
}
}
// TODO: Figure out a good way to test util::warn.
#[test]
fn test_read_guarded() {
{
let mut small = NamedTempFile::new().unwrap();
small.write_all(b"test").unwrap();
small.flush().unwrap();
let contents = read_guarded(small.path(), 1024);
assert!(contents.is_ok());
assert_eq!(contents.unwrap().as_slice(), b"test");
}
{
let mut toobig = NamedTempFile::new().unwrap();
toobig.write_all(b"slightlytoobig").unwrap();
toobig.flush().unwrap();
assert!(read_guarded(toobig.path(), 10).is_err());
}
}
}
| woodruffw/kbs2 | 126 | A secret manager backed by age | Rust | woodruffw | William Woodruff | astral-sh |
src/main.rs | Rust | //! The entrypoint for the `kbs2` CLI.
#![deny(rustdoc::broken_intra_doc_links)]
#![deny(missing_docs)]
#![deny(clippy::unwrap_used)]
#![deny(clippy::expect_used)]
#![deny(clippy::panic)]
use std::ffi::{OsStr, OsString};
use std::process;
use std::{io, path::PathBuf};
use anyhow::{anyhow, Context, Result};
use clap::builder::{EnumValueParser, PossibleValuesParser, ValueParser};
use clap::{Arg, ArgAction, ArgMatches, Command, ValueHint};
use clap_complete::{generate, Shell};
mod kbs2;
fn app() -> Command {
// TODO(ww): Put this in a separate file, or switch to YAML.
// The latter probably won't work with env!, though.
Command::new(env!("CARGO_PKG_NAME"))
.allow_external_subcommands(true)
.version(env!("KBS2_BUILD_VERSION"))
.about(env!("CARGO_PKG_DESCRIPTION"))
.arg(
Arg::new("config-dir")
.help("use the specified config directory")
.short('c')
.long("config-dir")
.value_name("DIR")
.value_parser(ValueParser::path_buf())
.env("KBS2_CONFIG_DIR")
.default_value(<PathBuf as AsRef<OsStr>>::as_ref(
&kbs2::config::DEFAULT_CONFIG_DIR,
))
.value_hint(ValueHint::DirPath),
)
.arg(
Arg::new("completions")
.help("emit shell tab completions")
.long("completions")
.value_name("SHELL")
.value_parser(EnumValueParser::<Shell>::new()),
)
.subcommand(
Command::new("agent")
.about("run the kbs2 authentication agent")
.arg(
Arg::new("foreground")
.help("run the agent in the foreground")
.short('F')
.long("foreground")
.action(ArgAction::SetTrue),
)
.subcommand(
Command::new("flush")
.about("remove all unwrapped keys from the running agent")
.arg(
Arg::new("quit")
.help("quit the agent after flushing")
.short('q')
.long("quit")
.action(ArgAction::SetTrue),
),
)
.subcommand(
Command::new("query")
.about("ask the current agent whether it has the current config's key"),
)
.subcommand(
Command::new("unwrap")
.about("unwrap the current config's key in the running agent"),
),
)
.subcommand(
Command::new("init")
.about("initialize kbs2 with a new config and keypair")
.arg(
Arg::new("force")
.help("overwrite the config and keyfile, if already present")
.short('f')
.long("force")
.action(ArgAction::SetTrue),
)
.arg(
Arg::new("store-dir")
.help("the directory to store encrypted kbs2 records in")
.short('s')
.long("store-dir")
.value_name("DIR")
.value_parser(ValueParser::path_buf())
.default_value(<PathBuf as AsRef<OsStr>>::as_ref(
&kbs2::config::DEFAULT_STORE_DIR,
))
.value_hint(ValueHint::DirPath),
)
.arg(
Arg::new("insecure-not-wrapped")
.help("don't wrap the keypair with a master password")
.long("insecure-not-wrapped")
.action(ArgAction::SetTrue),
),
)
.subcommand(
Command::new("new")
.about("create a new record")
.arg(
Arg::new("label")
.help("the record's label")
.index(1)
.required(true),
)
.arg(
Arg::new("kind")
.help("the kind of record to create")
.short('k')
.long("kind")
.value_parser(PossibleValuesParser::new(kbs2::record::RECORD_KINDS))
.default_value("login"),
)
.arg(
Arg::new("force")
.help("overwrite, if already present")
.short('f')
.long("force")
.action(ArgAction::SetTrue),
)
.arg(
Arg::new("terse")
.help("read fields in a terse format, even when connected to a tty")
.short('t')
.long("terse")
.action(ArgAction::SetTrue),
)
.arg(
Arg::new("generator")
.help("use the given generator to generate sensitive fields")
.short('G')
.long("generator")
.default_value("default"),
),
)
.subcommand(
Command::new("list")
.about("list records")
.arg(
Arg::new("details")
.help("print (non-field) details for each record")
.short('d')
.long("details")
.action(ArgAction::SetTrue),
)
.arg(
Arg::new("kind")
.help("list only records of this kind")
.short('k')
.long("kind")
.value_parser(PossibleValuesParser::new(kbs2::record::RECORD_KINDS)),
),
)
.subcommand(
Command::new("rm").about("remove one or more records").arg(
Arg::new("label")
.help("the labels of the records to remove")
.index(1)
.required(true)
.num_args(1..),
),
)
.subcommand(
Command::new("rename")
.about("rename a record")
.arg(
Arg::new("old-label")
.help("the record's current label")
.index(1)
.required(true),
)
.arg(
Arg::new("new-label")
.help("the new record label")
.index(2)
.required(true),
)
.arg(
Arg::new("force")
.help("overwrite, if already present")
.short('f')
.long("force")
.action(ArgAction::SetTrue),
),
)
.subcommand(
Command::new("dump")
.about("dump one or more records")
.arg(
Arg::new("label")
.help("the labels of the records to dump")
.index(1)
.required(true)
.num_args(1..),
)
.arg(
Arg::new("json")
.help("dump in JSON format (JSONL when multiple)")
.short('j')
.long("json")
.action(ArgAction::SetTrue),
),
)
.subcommand(
Command::new("pass")
.about("get the password in a login record")
.arg(
Arg::new("label")
.help("the record's label")
.index(1)
.required(true),
)
.arg(
Arg::new("clipboard")
.help("copy the password to the clipboard")
.short('c')
.long("clipboard")
.action(ArgAction::SetTrue),
),
)
.subcommand(
Command::new("env")
.about("get an environment record")
.arg(
Arg::new("label")
.help("the record's label")
.index(1)
.required(true),
)
.arg(
Arg::new("value-only")
.help("print only the environment variable value, not the variable name")
.short('v')
.long("value-only")
.action(ArgAction::SetTrue),
)
.arg(
Arg::new("no-export")
.help("print only VAR=val without `export`")
.short('n')
.long("no-export")
.action(ArgAction::SetTrue),
),
)
.subcommand(
Command::new("edit")
.about("modify a record with a text editor")
.arg(
Arg::new("label")
.help("the record's label")
.index(1)
.required(true),
)
.arg(
Arg::new("preserve-timestamp")
.help("don't update the record's timestamp")
.short('p')
.long("preserve-timestamp"),
),
)
.subcommand(
Command::new("generate")
.about("generate secret values using a generator")
.arg(
Arg::new("generator")
.help("the generator to use")
.index(1)
.default_value("default"),
),
)
.subcommand(
Command::new("rewrap")
.about("change the master password on a wrapped key")
.arg(
Arg::new("no-backup")
.help("don't make a backup of the old wrapped key")
.short('n')
.long("no-backup")
.action(ArgAction::SetTrue),
)
.arg(
Arg::new("force")
.help("overwrite a previous backup, if one exists")
.short('f')
.long("force")
.action(ArgAction::SetTrue),
),
)
.subcommand(
// NOTE: The absence of a --force option here is intentional.
Command::new("rekey")
.about("re-encrypt the entire store with a new keypair and master password")
.arg(
Arg::new("no-backup")
.help("don't make a backup of the old wrapped key, config, or store")
.short('n')
.long("no-backup")
.action(ArgAction::SetTrue),
),
)
.subcommand(
Command::new("config")
.subcommand_required(true)
.about("interact with kbs2's configuration file")
.subcommand(
Command::new("dump")
.about("dump the active configuration file as JSON")
.arg(
Arg::new("pretty")
.help("pretty-print the JSON")
.short('p')
.long("pretty")
.action(ArgAction::SetTrue),
),
),
)
}
fn run(matches: &ArgMatches, config: &kbs2::config::Config) -> Result<()> {
// Subcommand dispatch happens here. All subcommands handled here take a `Config`.
//
// Internally, most (but not all) subcommands load a `Session` from their borrowed
// `Config` argument. This `Session` is in turn used to perform record and encryption
// operations.
// Special case: `kbs2 agent` does not receive pre- or post-hooks.
if let Some(("agent", matches)) = matches.subcommand() {
return kbs2::command::agent(matches, config);
}
if let Some(pre_hook) = &config.pre_hook {
log::debug!("pre-hook: {pre_hook}");
config.call_hook(pre_hook, &[])?;
}
match matches.subcommand() {
Some(("new", matches)) => kbs2::command::new(matches, config)?,
Some(("list", matches)) => kbs2::command::list(matches, config)?,
Some(("rm", matches)) => kbs2::command::rm(matches, config)?,
Some(("rename", matches)) => kbs2::command::rename(matches, config)?,
Some(("dump", matches)) => kbs2::command::dump(matches, config)?,
Some(("pass", matches)) => kbs2::command::pass(matches, config)?,
Some(("env", matches)) => kbs2::command::env(matches, config)?,
Some(("edit", matches)) => kbs2::command::edit(matches, config)?,
Some(("generate", matches)) => kbs2::command::generate(matches, config)?,
Some(("rewrap", matches)) => kbs2::command::rewrap(matches, config)?,
Some(("rekey", matches)) => kbs2::command::rekey(matches, config)?,
Some(("config", matches)) => kbs2::command::config(matches, config)?,
Some((cmd, matches)) => {
let cmd = format!("kbs2-{cmd}");
let ext_args: Vec<_> = match matches.get_many::<OsString>("") {
Some(values) => values.collect(),
None => vec![],
};
log::debug!("external command requested: {cmd} (args: {ext_args:?})");
let status = process::Command::new(&cmd)
.args(&ext_args)
.env("KBS2_CONFIG_DIR", &config.config_dir)
.env("KBS2_STORE", &config.store)
.env("KBS2_SUBCOMMAND", "1")
.env("KBS2_MAJOR_VERSION", env!("CARGO_PKG_VERSION_MAJOR"))
.env("KBS2_MINOR_VERSION", env!("CARGO_PKG_VERSION_MINOR"))
.env("KBS2_PATCH_VERSION", env!("CARGO_PKG_VERSION_PATCH"))
.status()
.with_context(|| format!("no such command: {cmd}"))?;
if !status.success() {
return Err(match status.code() {
Some(code) => anyhow!("{} failed: exited with {}", cmd, code),
None => anyhow!("{} failed: terminated by signal", cmd),
});
}
}
_ => unreachable!(),
}
if let Some(post_hook) = &config.post_hook {
log::debug!("post-hook: {post_hook}");
config.call_hook(post_hook, &[])?;
}
Ok(())
}
fn main() -> Result<()> {
env_logger::init();
let mut app = app();
let matches = app.clone().get_matches();
// Shell completion generation is completely independent, so perform it before
// any config or subcommand operations.
if let Some(shell) = matches.get_one::<Shell>("completions") {
generate(*shell, &mut app, env!("CARGO_PKG_NAME"), &mut io::stdout());
return Ok(());
}
#[allow(clippy::unwrap_used)]
let config_dir = matches.get_one::<PathBuf>("config-dir").unwrap();
log::debug!("config dir: {config_dir:?}");
std::fs::create_dir_all(config_dir)?;
// There are two special cases that are not handled in `run`:
//
// * `kbs2` (no subcommand): Act as if a long --help message was requested and exit.
// * `kbs2 init`: We're initializing a config instead of loading one.
if matches.subcommand().is_none() {
return app
.clone()
.print_long_help()
.with_context(|| "failed to print help".to_string());
} else if let Some(("init", matches)) = matches.subcommand() {
return kbs2::command::init(matches, config_dir);
}
// Everything else (i.e., all other subcommands) go through here.
let config = kbs2::config::load(config_dir)?;
match run(&matches, &config) {
Ok(()) => Ok(()),
Err(e) => {
if let Some(error_hook) = &config.error_hook {
log::debug!("error-hook: {error_hook}");
config.call_hook(error_hook, &[&e.to_string()])?;
}
Err(e)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_app() {
app().debug_assert();
}
}
| woodruffw/kbs2 | 126 | A secret manager backed by age | Rust | woodruffw | William Woodruff | astral-sh |
tests/common/mod.rs | Rust | // NOTE(ww): Dead code allowed because of this `cargo test` bug:
// https://github.com/rust-lang/rust/issues/46379
#![allow(dead_code)]
use std::process::Output;
use assert_cmd::Command;
use serde_json::Value;
use tempfile::TempDir;
#[derive(Debug)]
pub struct CliSession {
pub config_dir: TempDir,
pub store_dir: TempDir,
}
impl CliSession {
pub fn new() -> Self {
let config_dir = TempDir::new().unwrap();
let store_dir = TempDir::new().unwrap();
// Run `kbs2 init` to configure the config and session directories.
{
kbs2()
.arg("--config-dir")
.arg(config_dir.path())
.arg("init")
.arg("--insecure-not-wrapped")
.arg("--store-dir")
.arg(store_dir.path())
.assert()
.success();
}
Self {
config_dir,
store_dir,
}
}
pub fn command(&self) -> Command {
let mut kbs2 = kbs2();
kbs2.arg("--config-dir").arg(self.config_dir.path());
kbs2
}
}
pub fn kbs2() -> Command {
Command::cargo_bin(env!("CARGO_PKG_NAME")).unwrap()
}
pub trait ToJson {
fn json(&self) -> Value;
}
impl ToJson for Output {
fn json(&self) -> Value {
serde_json::from_slice(&self.stdout).unwrap()
}
}
| woodruffw/kbs2 | 126 | A secret manager backed by age | Rust | woodruffw | William Woodruff | astral-sh |
tests/test_kbs2.rs | Rust | mod common;
use clap::ValueEnum;
use clap_complete::Shell;
use common::kbs2;
#[test]
fn test_kbs2_help() {
// `help`, `--help`, and `-h` all produce the same output
let reference_output = kbs2().arg("help").output().unwrap();
assert!(reference_output.status.success());
for help in &["--help", "-h"] {
let output = kbs2().arg(help).output().unwrap();
assert!(output.status.success());
assert_eq!(reference_output.stdout, output.stdout);
}
}
#[test]
fn test_kbs2_completions() {
// Tab completion generation works
for shell in Shell::value_variants() {
let output = kbs2()
.args(["--completions", &shell.to_string()])
.output()
.unwrap();
assert!(output.status.success());
assert!(!output.stdout.is_empty());
}
}
#[test]
fn test_kbs2_version() {
// kbs2 --version works and outputs a string starting with `kbs2 X.Y.Z`
let version = format!("kbs2 {}", env!("CARGO_PKG_VERSION"));
let output = kbs2().arg("--version").output().unwrap();
assert!(output.status.success());
assert!(String::from_utf8(output.stdout)
.unwrap()
.starts_with(&version));
}
| woodruffw/kbs2 | 126 | A secret manager backed by age | Rust | woodruffw | William Woodruff | astral-sh |
tests/test_kbs2_init.rs | Rust | mod common;
use common::CliSession;
#[test]
fn test_kbs2_init() {
let session = CliSession::new();
let config_dir = session.config_dir.path();
let store_dir = session.store_dir.path();
// Our config dir, etc. all exist; the store dir is empty.
assert!(config_dir.is_dir());
assert!(store_dir.is_dir());
assert!(config_dir.join("config.toml").is_file());
assert!(store_dir.read_dir().unwrap().next().is_none());
}
| woodruffw/kbs2 | 126 | A secret manager backed by age | Rust | woodruffw | William Woodruff | astral-sh |
tests/test_kbs2_new.rs | Rust | mod common;
use common::{CliSession, ToJson};
use serde_json::json;
// TODO: Figure out how to test prompts instead of terse inputs.
#[test]
fn test_kbs2_new_login() {
let session = CliSession::new();
session
.command()
.args(["new", "-k", "login", "test-record"])
.write_stdin("fakeuser\x01fakepass")
.assert()
.success();
let dump = session
.command()
.args(["dump", "--json", "test-record"])
.output()
.unwrap()
.json();
let fields = dump.get("body").unwrap().get("fields").unwrap();
assert_eq!(
fields,
// https://github.com/serde-rs/json/issues/867
&json!({ "username": "fakeuser", "password": "fakepass" }),
);
}
#[test]
fn test_kbs2_new_environment() {
let session = CliSession::new();
session
.command()
.args(["new", "-k", "environment", "test-record"])
.write_stdin("fakevariable\x01fakevalue")
.assert()
.success();
let dump = session
.command()
.args(["dump", "--json", "test-record"])
.output()
.unwrap()
.json();
let fields = dump.get("body").unwrap().get("fields").unwrap();
assert_eq!(
fields,
// https://github.com/serde-rs/json/issues/867
&json!({ "variable": "fakevariable", "value": "fakevalue" }),
);
}
#[test]
fn test_kbs2_new_unstructured() {
let session = CliSession::new();
session
.command()
.args(["new", "-k", "unstructured", "test-record"])
.write_stdin("fakevalue")
.assert()
.success();
let dump = session
.command()
.args(["dump", "--json", "test-record"])
.output()
.unwrap()
.json();
let fields = dump.get("body").unwrap().get("fields").unwrap();
assert_eq!(
fields,
// https://github.com/serde-rs/json/issues/867
&json!({ "contents": "fakevalue" }),
);
}
| woodruffw/kbs2 | 126 | A secret manager backed by age | Rust | woodruffw | William Woodruff | astral-sh |
tests/test_kbs2_rename.rs | Rust | mod common;
use common::CliSession;
#[test]
fn test_kbs2_rename() {
let session = CliSession::new();
// `rename` deletes the old record.
session
.command()
.args(["new", "-k", "login", "test-record"])
.write_stdin("fakeuser\x01fakepass")
.assert()
.success();
session
.command()
.args(["rename", "test-record", "test-record-1"])
.assert()
.success();
session
.command()
.args(["dump", "test-record"])
.assert()
.failure();
session
.command()
.args(["dump", "test-record-1"])
.assert()
.success();
}
// TODO: `kbs2 rename --force`
// TODO: `kbs2 rename` with the same record twice
| woodruffw/kbs2 | 126 | A secret manager backed by age | Rust | woodruffw | William Woodruff | astral-sh |
tests/test_kbs2_rm.rs | Rust | mod common;
use common::CliSession;
#[test]
fn test_kbs2_rm() {
let session = CliSession::new();
// `kbs2 rm` with a nonexistent record fails.
{
session
.command()
.args(["rm", "does-not-exist"])
.assert()
.failure();
}
// `kbs2 rm` works as expected with a record that exists.
{
session
.command()
.args(["new", "-k", "login", "test-record"])
.write_stdin("fakeuser\x01fakepass")
.assert()
.success();
session
.command()
.args(["rm", "test-record"])
.assert()
.success();
session
.command()
.args(["dump", "test-record"])
.assert()
.failure();
}
// `kbs2 rm` works as expected with multiple records.
{
session
.command()
.args(["new", "-k", "login", "test-record-1"])
.write_stdin("fakeuser\x01fakepass")
.assert()
.success();
session
.command()
.args(["new", "-k", "login", "test-record-2"])
.write_stdin("fakeuser\x01fakepass")
.assert()
.success();
session
.command()
.args(["rm", "test-record-1", "test-record-2"])
.assert()
.success();
session
.command()
.args(["dump", "test-record-1", "test-record-2"])
.assert()
.failure();
}
}
| woodruffw/kbs2 | 126 | A secret manager backed by age | Rust | woodruffw | William Woodruff | astral-sh |
src/lib.rs | Rust | use std::path::Path;
use libc::pid_t;
use pyo3::exceptions::{PyException, PyIOError};
use pyo3::prelude::*;
use pyo3::{create_exception, wrap_pyfunction};
use rsprocmaps::error::Error;
create_exception!(procmaps, ParseError, PyException);
/// Represents a memory map in the maps file.
#[pyclass]
struct Map {
inner: rsprocmaps::Map,
}
#[pymethods]
impl Map {
/// Returns the beginning address for the map.
#[getter]
fn begin_address(&self) -> u64 {
self.inner.address_range.begin
}
/// Returns the end address for the map.
#[getter]
fn end_address(&self) -> u64 {
self.inner.address_range.end
}
/// Returns whether this map has the 'read' bit set
#[getter]
fn is_readable(&self) -> bool {
self.inner.permissions.readable
}
/// Returns whether this map has the 'write' bit set
#[getter]
fn is_writable(&self) -> bool {
self.inner.permissions.writable
}
/// Returns whether this map has 'executable' bit set
#[getter]
fn is_executable(&self) -> bool {
self.inner.permissions.executable
}
/// Returns whether this map is shared with other processes
#[getter]
fn is_shared(&self) -> bool {
self.inner.permissions.shared
}
/// Returns whether this map is private (i.e., copy-on-write)
#[getter]
fn is_private(&self) -> bool {
self.inner.permissions.private
}
/// Returns the offset of the source that this map begins at.
#[getter]
fn offset(&self) -> u64 {
self.inner.offset
}
/// Returns the (major, minor) tuple for the associated device.
#[getter]
fn device(&self) -> (u64, u64) {
(self.inner.device.major, self.inner.device.minor)
}
/// Returns the inode associated with the source and device, or 0 if
/// no inode is associated.
#[getter]
fn inode(&self) -> u64 {
self.inner.inode
}
/// Returns the pathname (or pseudo-path) associated with the map,
/// or None if the map is an anonymous map.
#[getter]
fn pathname(&self) -> Option<String> {
match &self.inner.pathname {
rsprocmaps::Pathname::Stack => Some("[stack]".into()),
rsprocmaps::Pathname::Vdso => Some("[vdso]".into()),
rsprocmaps::Pathname::Vvar => Some("[vvar]".into()),
rsprocmaps::Pathname::Vsyscall => Some("[vsyscall]".into()),
rsprocmaps::Pathname::Heap => Some("[heap]".into()),
rsprocmaps::Pathname::OtherPseudo(p) => Some(p.into()),
rsprocmaps::Pathname::Path(p) => Some(p.into()),
rsprocmaps::Pathname::Mmap => None,
}
}
fn __contains__(&self, addr: u64) -> PyResult<bool> {
Ok(addr >= self.inner.address_range.begin && addr < self.inner.address_range.end)
}
}
// NOTE(ww): Trait impl stupidity.
struct ProcmapsError(Error);
impl std::convert::From<ProcmapsError> for PyErr {
fn from(err: ProcmapsError) -> PyErr {
match err.0 {
Error::Io(e) => PyIOError::new_err(e),
Error::ParseError(e) => ParseError::new_err(e.to_string()),
Error::WidthError(e) => ParseError::new_err(e),
}
}
}
/// Returns the maps for the given process.
#[pyfunction]
fn from_pid(pid: pid_t) -> PyResult<Vec<Map>> {
let mut maps = Vec::new();
let inner_maps = rsprocmaps::from_pid(pid).map_err(ProcmapsError)?;
for map in inner_maps {
maps.push(Map {
inner: map.map_err(ProcmapsError)?,
})
}
Ok(maps)
}
/// Returns the maps in the given file.
#[pyfunction]
fn from_path(path: &str) -> PyResult<Vec<Map>> {
let mut maps = Vec::new();
let inner_maps = rsprocmaps::from_path(Path::new(path)).map_err(ProcmapsError)?;
for map in inner_maps {
maps.push(Map {
inner: map.map_err(ProcmapsError)?,
})
}
Ok(maps)
}
/// Returns the maps in the given string.
#[pyfunction]
fn from_str(maps_data: &str) -> PyResult<Vec<Map>> {
let mut maps = Vec::new();
let inner_maps = rsprocmaps::from_str(maps_data);
for map in inner_maps {
maps.push(Map {
inner: map.map_err(ProcmapsError)?,
})
}
Ok(maps)
}
#[pymodule]
fn procmaps(_py: Python, m: &Bound<'_, PyModule>) -> PyResult<()> {
m.add_class::<Map>()?;
m.add_wrapped(wrap_pyfunction!(from_pid))?;
m.add_wrapped(wrap_pyfunction!(from_path))?;
m.add_wrapped(wrap_pyfunction!(from_str))?;
Ok(())
}
| woodruffw/procmaps.py | 35 | Python bindings for procmaps.rs | Rust | woodruffw | William Woodruff | astral-sh |
test/test_procmaps.py | Python | import os
import unittest
import procmaps
class TestProcmaps(unittest.TestCase):
def check_map_properties(self, map_):
self.assertIsInstance(map_.begin_address, int)
self.assertIsInstance(map_.end_address, int)
self.assertTrue(map_.begin_address in map_)
self.assertFalse(map_.end_address in map_)
self.assertIsInstance(map_.is_readable, bool)
self.assertIsInstance(map_.is_writable, bool)
self.assertIsInstance(map_.is_executable, bool)
self.assertIsInstance(map_.is_shared, bool)
self.assertIsInstance(map_.is_private, bool)
self.assertIsInstance(map_.offset, int)
self.assertIsInstance(map_.device, tuple)
self.assertIsInstance(map_.device[0], int)
self.assertIsInstance(map_.device[1], int)
self.assertIsInstance(map_.inode, int)
if map_.is_shared:
self.assertFalse(map_.is_private)
if map_.is_private:
self.assertFalse(map_.is_shared)
self.assertTrue(isinstance(map_.pathname, str) or map_.pathname is None)
def test_from_pid(self):
maps = procmaps.from_pid(os.getpid())
for map_ in maps:
self.check_map_properties(map_)
def test_from_path(self):
maps = procmaps.from_path("/proc/self/maps")
for map_ in maps:
self.check_map_properties(map_)
def test_from_str(self):
maps = procmaps.from_str("55d5564b4000-55d5564b6000 r--p 00000000 08:11 6553896 /bin/cat")
self.assertEqual(len(maps), 1)
self.check_map_properties(maps[0])
if __name__ == "__main__":
unittest.main()
| woodruffw/procmaps.py | 35 | Python bindings for procmaps.rs | Rust | woodruffw | William Woodruff | astral-sh |
src/error.rs | Rust | //! Error types for `rsprocmaps`.
use std::error;
use std::fmt;
use std::io;
use std::num;
use pest::error::Error as PestError;
use crate::Rule;
/// An enumeration of possible error states for `rsprocmaps`.
#[derive(Debug)]
pub enum Error {
/// An I/O error.
Io(io::Error),
// NOTE(ww): PestError<Rule> is pretty big, so we box it to keep
// the surrounding error type small.
/// A general parsing error.
ParseError(Box<PestError<Rule>>),
// NOTE(ww): ParseIntError is more general than just numbers that don't
// fit into a particular width, but we handle all of its other parsing issues
// at the pest/actual parsing level.
/// An integer-width parsing error.
WidthError(num::ParseIntError),
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Error {
Error::Io(err)
}
}
impl From<PestError<Rule>> for Error {
fn from(err: PestError<Rule>) -> Error {
Error::ParseError(Box::new(err))
}
}
impl From<num::ParseIntError> for Error {
fn from(err: num::ParseIntError) -> Error {
Error::WidthError(err)
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::Io(ref e) => e.fmt(f),
Error::ParseError(ref e) => e.fmt(f),
Error::WidthError(ref e) => e.fmt(f),
}
}
}
impl error::Error for Error {
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
match *self {
Error::Io(ref e) => Some(e),
Error::ParseError(ref e) => Some(e),
Error::WidthError(ref e) => Some(e),
}
}
}
| woodruffw/procmaps.rs | 15 | A small Rust library for reading process maps from procfs | Rust | woodruffw | William Woodruff | astral-sh |
src/lib.rs | Rust | //! A small Rust library for parsing `/proc/<pid>/maps`.
#![deny(rustdoc::broken_intra_doc_links)]
#![allow(clippy::redundant_field_names)]
#![forbid(unsafe_code)]
use std::fmt;
use std::fs::File;
use std::io::Lines;
use std::io::{BufRead, BufReader};
use std::path::Path;
use libc::pid_t;
use pest::Parser as ParserTrait;
use pest_derive::Parser;
use phf::phf_map;
use serde::{Deserialize, Serialize};
pub mod error;
use error::Error;
static PSUEDO_PATH_MAP: phf::Map<&'static str, Pathname> = phf_map! {
"[stack]" => Pathname::Stack,
"[vdso]" => Pathname::Vdso,
"[vvar]" => Pathname::Vvar,
"[vsyscall]" => Pathname::Vsyscall,
"[heap]" => Pathname::Heap,
};
#[derive(Parser)]
#[grammar = "map.pest"]
struct MapParser;
/// Represents the variants of the "pathname" field in a map.
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
pub enum Pathname {
/// This map is the initial process's (i.e., main thread's) stack.
Stack,
/// This map contains the vDSO. See `man 7 vdso`.
Vdso,
/// This map contains the VVAR page.
Vvar,
/// This map contains the VSYSCALL page(s).
Vsyscall,
/// This map is the process's heap.
Heap,
/// This map was created by a call to `mmap`.
Mmap,
/// This map looks like another, unparsed, pseudo-path. See `man 5 proc`.
OtherPseudo(String),
// NOTE(ww): This should really be a PathBuf, but pest uses UTF-8 strings.
// Better hope your paths are valid UTF-8!
/// This map appears to correspond to a filesystem path.
Path(String),
}
/// Represents the address range of a map.
#[derive(Debug, Deserialize, Serialize, PartialEq, Eq)]
pub struct AddressRange {
/// The (inclusive) start of the address range.
pub begin: u64,
/// The (exclusive) end of the address range.
pub end: u64,
}
impl fmt::Display for AddressRange {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:x}-{:x}", self.begin, self.end)
}
}
/// Represents the permissions associated with a map.
#[derive(Debug, Default, Deserialize, Serialize, PartialEq, Eq)]
pub struct Permissions {
/// Is this map readable?
pub readable: bool,
/// Is this map writable?
pub writable: bool,
/// Is this map executable?
pub executable: bool,
/// Is this map shared?
pub shared: bool,
/// Is this map private (i.e., copy-on-write)?
pub private: bool,
}
impl fmt::Display for Permissions {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut mask = String::new();
if self.readable {
mask.push('r');
} else {
mask.push('-');
}
if self.writable {
mask.push('w');
} else {
mask.push('-');
}
if self.executable {
mask.push('e');
} else {
mask.push('-');
}
if self.shared {
mask.push('s')
} else {
mask.push('p')
}
write!(f, "{}", mask)
}
}
/// Represents the device associated with a map.
#[derive(Debug, Deserialize, Serialize, PartialEq, Eq)]
pub struct Device {
/// The device's major number.
pub major: u64,
/// The device's minor number.
pub minor: u64,
}
impl fmt::Display for Device {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:02}-{:02}", self.major, self.minor)
}
}
/// Represents a map, i.e. a region of program memory.
#[derive(Debug, Deserialize, Serialize, PartialEq, Eq)]
pub struct Map {
/// The map's address range.
pub address_range: AddressRange,
/// The map's permissions.
pub permissions: Permissions,
/// The offset of the map within its source.
pub offset: u64,
/// The device that the map's inode belongs on.
pub device: Device,
/// The map's inode (or 0 if inapplicable).
pub inode: u64,
/// The map's pathname field.
pub pathname: Pathname,
}
impl Default for Map {
fn default() -> Self {
Map {
address_range: AddressRange { begin: 0, end: 0 },
permissions: Default::default(),
offset: 0,
device: Device { major: 0, minor: 0 },
inode: 0,
pathname: Pathname::Mmap,
}
}
}
impl Map {
fn parse(line: &str) -> Result<Map, Error> {
// NOTE(ww): The map rule is singular, so this next + unwrap is safe after
// a successful parse.
let parsed = MapParser::parse(Rule::map, line)?.next().unwrap();
let mut map: Map = Default::default();
for entry in parsed.into_inner() {
match entry.as_rule() {
Rule::address_range => {
let mut address_range = entry.into_inner();
map.address_range.begin =
u64::from_str_radix(address_range.next().unwrap().as_str(), 16)?;
map.address_range.end =
u64::from_str_radix(address_range.next().unwrap().as_str(), 16)?;
}
Rule::permissions => {
let permissions = entry.as_str().as_bytes();
map.permissions.readable = permissions[0] == b'r';
map.permissions.writable = permissions[1] == b'w';
map.permissions.executable = permissions[2] == b'x';
map.permissions.shared = permissions[3] == b's';
map.permissions.private = !map.permissions.shared;
}
Rule::offset => {
let offset = entry.as_str();
map.offset = u64::from_str_radix(offset, 16)?;
}
Rule::device => {
let mut device = entry.into_inner();
map.device.major = u64::from_str_radix(device.next().unwrap().as_str(), 16)?;
map.device.minor = u64::from_str_radix(device.next().unwrap().as_str(), 16)?;
}
Rule::inode => {
map.inode = entry.as_str().parse()?;
}
Rule::pathname => {
let pathname = entry.as_str();
if pathname.is_empty() {
// An empty path indicates an mmap'd region.
map.pathname = Pathname::Mmap;
} else if PSUEDO_PATH_MAP.contains_key(pathname) {
// There are some pseudo-files that we know; use their enum variants
// if we see them.
map.pathname = PSUEDO_PATH_MAP.get(pathname).unwrap().clone();
} else if pathname.starts_with('[') && pathname.ends_with(']') {
// There are probably other pseudo-files that we don't know;
// if we see something that looks like one, mark it as such.
map.pathname = Pathname::OtherPseudo(pathname.into());
} else {
// Finally, treat anything else like a path.
// As proc(5) notes, there are a few ambiguities here with escaped
// newlines and the "(deleted)" suffix; leave these to the user to figure out.
map.pathname = Pathname::Path(pathname.into());
}
}
// NOTE(ww): There are other rules, but we should never be able to match them in this context.
_ => {
unreachable!();
}
}
}
Ok(map)
}
}
/// A wrapper structure for consuming individual `Map`s from a reader.
pub struct MapsLines<T> {
lines: Lines<T>,
}
impl<T> MapsLines<T> {
/// Creates a new `Maps` from the given `reader`.
pub fn new(lines: Lines<T>) -> MapsLines<T> {
MapsLines { lines }
}
}
impl<T> Iterator for MapsLines<T>
where
T: BufRead,
{
type Item = Result<Map, Error>;
fn next(&mut self) -> Option<Self::Item> {
match self.lines.next() {
Some(Ok(line)) => Some(Map::parse(&line)),
Some(Err(e)) => Some(Err(e.into())),
None => None,
}
}
}
/// A wrapper structure for consuming individual `Map`s from a reader.
pub struct Maps<T: BufRead> {
reader: T,
}
impl<T: BufRead> Maps<T> {
/// Creates a new `Maps` from the given `reader`.
pub fn new(reader: T) -> Maps<T> {
Maps { reader }
}
}
impl<T: BufRead> Iterator for Maps<T> {
type Item = Result<Map, Error>;
fn next(&mut self) -> Option<Self::Item> {
let mut line_buf = String::new();
match self.reader.read_line(&mut line_buf) {
Ok(0) => None,
Ok(_) => {
// NOTE(ww): Annoying: the Lines iterator yields lines
// without their trailing delimiters, but read_line includes them.
if line_buf.ends_with('\n') {
line_buf.pop();
}
Some(Map::parse(&line_buf))
}
Err(e) => Some(Err(e.into())),
}
}
}
/// Returns an iterable `Maps` for the given pid.
pub fn from_pid(pid: pid_t) -> Result<Maps<BufReader<File>>, Error> {
let path = Path::new("/proc").join(pid.to_string()).join("maps");
from_path(path)
}
/// Returns an iterable `Maps` parsed from the given file.
pub fn from_path<P: AsRef<Path>>(path: P) -> Result<Maps<BufReader<File>>, Error> {
let reader = {
let f = File::open(path)?;
BufReader::new(f)
};
Ok(Maps::new(reader))
}
/// Returns an iterable `Maps` parsed from the given string.
pub fn from_str(maps_data: &str) -> Maps<&[u8]> {
Maps::new(maps_data.as_bytes())
}
/// Returns an iterable `Maps` parsed from the given [Lines].
pub fn from_lines<T>(maps_lines: Lines<T>) -> MapsLines<T> {
MapsLines::new(maps_lines)
}
#[cfg(test)]
mod tests {
use std::fs;
use glob::glob;
use super::*;
#[test]
fn test_parse_map() {
let map =
Map::parse("5608dd391000-5608dd3be000 r--p 00000000 08:11 6572575 /bin/bash").unwrap();
assert_eq!(map.address_range.begin, 0x5608dd391000);
assert_eq!(map.address_range.end, 0x5608dd3be000);
assert!(map.permissions.readable);
assert!(!map.permissions.writable);
assert!(!map.permissions.executable);
assert!(!map.permissions.shared);
assert!(map.permissions.private);
assert_eq!(map.offset, 0);
assert_eq!(map.device.major, 8);
assert_eq!(map.device.minor, 17);
assert_eq!(map.inode, 6572575);
assert_eq!(map.pathname, Pathname::Path("/bin/bash".into()));
}
#[test]
fn test_reference_inputs() {
let test_data = Path::new(env!("CARGO_MANIFEST_DIR")).join("test_data");
for maps_input in glob(test_data.join("*.maps").to_str().unwrap()).unwrap() {
let maps_input = maps_input.unwrap();
let reference_output = maps_input.with_extension("json");
let maps = from_path(&maps_input).unwrap().collect::<Vec<_>>();
let expected_maps: Vec<Map> =
serde_json::from_str(&fs::read_to_string(reference_output).unwrap()).unwrap();
assert_eq!(maps.len(), expected_maps.len());
for (map, emap) in maps.iter().zip(expected_maps.iter()) {
assert_eq!(map.as_ref().unwrap(), emap);
}
}
// TODO(ww): Add some invalid reference inputs.
}
}
| woodruffw/procmaps.rs | 15 | A small Rust library for reading process maps from procfs | Rust | woodruffw | William Woodruff | astral-sh |
pyrage-stubs/pyrage/__init__.pyi | Python | from io import BufferedIOBase
from typing import Sequence, Union
from pyrage import passphrase, plugin, ssh, x25519
from pyrage.plugin import IdentityPluginV1, RecipientPluginV1
from pyrage.ssh import Identity as SSHIdentity
from pyrage.ssh import Recipient as SSHRecipient
from pyrage.x25519 import Identity as X25519Identity
from pyrage.x25519 import Recipient as X25519Recipient
_Identity = Union[SSHIdentity, X25519Identity, IdentityPluginV1]
_Recipient = Union[SSHRecipient, X25519Recipient, RecipientPluginV1]
__all__ = (
"ssh",
"x25519",
"passphrase",
"plugin",
"encrypt",
"encrypt_file",
"encrypt_io",
"decrypt",
"decrypt_file",
"decrypt_io",
"RecipientError",
"IdentityError",
"EncryptError",
"DecryptError",
)
class RecipientError(Exception): ...
class IdentityError(Exception): ...
class EncryptError(Exception): ...
class DecryptError(Exception): ...
def encrypt(
plaintext: bytes,
recipients: Sequence[_Recipient],
armored: bool = False,
) -> bytes: ...
def encrypt_file(
infile: str,
outfile: str,
recipients: Sequence[_Recipient],
armored: bool = False,
) -> None: ...
def encrypt_io(
in_io: BufferedIOBase,
out_io: BufferedIOBase,
recipients: Sequence[_Recipient],
armored: bool = False,
) -> bytes: ...
def decrypt(ciphertext: bytes, identities: Sequence[_Identity]) -> bytes: ...
def decrypt_file(
infile: str, outfile: str, identities: Sequence[_Identity]
) -> None: ...
def decrypt_io(
in_io: BufferedIOBase, out_io: BufferedIOBase, identities: Sequence[_Identity]
) -> None: ...
| woodruffw/pyrage | 80 | Python bindings for rage (age in Rust) | Rust | woodruffw | William Woodruff | astral-sh |
pyrage-stubs/pyrage/passphrase.pyi | Python | def encrypt(plaintext: bytes, passphrase: str, armored: bool = False) -> bytes: ...
def decrypt(ciphertext: bytes, passphrase: str, armored: bool = False) -> bytes: ...
| woodruffw/pyrage | 80 | Python bindings for rage (age in Rust) | Rust | woodruffw | William Woodruff | astral-sh |
pyrage-stubs/pyrage/plugin.pyi | Python | from __future__ import annotations
from typing import Sequence, Self, Optional, Protocol
class Callbacks(Protocol):
def display_message(self, message: str) -> None:
...
def confirm(self, message: str, yes_string: str, no_string: Optional[str]) -> Optional[bool]:
...
def request_public_string(self, description: str) -> Optional[str]:
...
def request_passphrase(self, description: str) -> Optional[str]:
...
class Recipient:
@classmethod
def from_str(cls, v: str) -> Recipient:
...
def plugin(self) -> str:
...
class RecipientPluginV1:
def __new__(cls, plugin_name: str, recipients: Sequence[Recipient], identities: Sequence[Identity], callbacks: Callbacks) -> Self:
...
class Identity:
@classmethod
def from_str(cls, v: str) -> Identity:
...
@classmethod
def default_for_plugin(cls, plugin: str) -> Identity:
...
def plugin(self) -> str:
...
class IdentityPluginV1:
def __new__(cls, plugin_name: str, identities: Sequence[Identity], callbacks: Callbacks) -> Self:
...
| woodruffw/pyrage | 80 | Python bindings for rage (age in Rust) | Rust | woodruffw | William Woodruff | astral-sh |
pyrage-stubs/pyrage/ssh.pyi | Python | from __future__ import annotations
class Identity:
@classmethod
def from_buffer(cls, buf: bytes) -> Identity:
...
class Recipient:
@classmethod
def from_str(cls, v: str) -> Recipient:
...
| woodruffw/pyrage | 80 | Python bindings for rage (age in Rust) | Rust | woodruffw | William Woodruff | astral-sh |
pyrage-stubs/pyrage/x25519.pyi | Python | from __future__ import annotations
class Identity:
@classmethod
def generate(cls) -> Identity:
...
@classmethod
def from_str(cls, v: str) -> Identity:
...
def to_public(self) -> Recipient:
...
class Recipient:
@classmethod
def from_str(cls, v: str) -> Recipient:
...
| woodruffw/pyrage | 80 | Python bindings for rage (age in Rust) | Rust | woodruffw | William Woodruff | astral-sh |
pyrage-stubs/setup.py | Python | from setuptools import setup
# editable installs don't work with pyproject.toml + setuptools yet,
# so we need this stub.
setup()
| woodruffw/pyrage | 80 | Python bindings for rage (age in Rust) | Rust | woodruffw | William Woodruff | astral-sh |
src/lib.rs | Rust | #![deny(unsafe_code)]
use std::collections::HashSet;
use std::io::Write;
use std::{fs::File, io::Read};
use age::{
armor::ArmoredReader, armor::ArmoredWriter, armor::Format, DecryptError as RageDecryptError,
EncryptError as RageEncryptError, Encryptor, Identity, Recipient,
};
use age_core::format::{FileKey, Stanza};
use pyo3::{
create_exception,
exceptions::{PyException, PyTypeError},
prelude::*,
py_run,
types::PyBytes,
};
use pyo3_file::PyFileLikeObject;
mod passphrase;
mod plugin;
mod ssh;
mod x25519;
// These exceptions are raised by the `pyrage.ssh` and `pyrage.x25519` APIs,
// where appropriate.
create_exception!(pyrage, RecipientError, PyException);
create_exception!(pyrage, IdentityError, PyException);
// This is a wrapper trait for age's `Recipient`, providing trait downcasting.
//
// We need this so that we can pass multiple different types of recipients
// into the Python-level `encrypt` API.
trait PyrageRecipient: Recipient {
fn as_recipient(self: Box<Self>) -> Box<dyn Recipient>;
}
// This is a wrapper trait for age's `Identity`, providing trait downcasting.
//
// We need this so that we can pass multiple different types of identities
// into the Python-level `decrypt` API.
trait PyrageIdentity: Identity {
fn as_identity(&self) -> &dyn Identity;
}
// This macro generates two trait impls for each passed in type:
//
// * An age `Receipient` impl, using the underlying trait impl.
// * A `PyrageRecipient` impl, by consuming the instance and downcasting.
macro_rules! recipient_traits {
($($t:ty),+) => {
$(
impl Recipient for $t {
fn wrap_file_key(&self, file_key: &FileKey) -> Result<(Vec<Stanza>, HashSet<String>), RageEncryptError> {
self.0.wrap_file_key(file_key)
}
}
impl PyrageRecipient for $t {
fn as_recipient(self: Box<Self>) -> Box<dyn Recipient> {
self as Box<dyn Recipient + Send>
}
}
)*
}
}
recipient_traits!(ssh::Recipient, x25519::Recipient, plugin::RecipientPluginV1);
// This macro generates two trait impls for each passed in type:
//
// * An age `Identity` impl, using the underlying trait impl.
// * A `PyrageIdentity` impl, by borrowing the instance and downcasting.
macro_rules! identity_traits {
($($t:ty),+) => {
$(
impl Identity for $t {
fn unwrap_stanza(&self, stanza: &Stanza) -> Option<Result<FileKey, RageDecryptError>> {
self.0.unwrap_stanza(stanza)
}
}
impl PyrageIdentity for $t {
fn as_identity(&self) -> &dyn Identity {
self as &dyn Identity
}
}
)*
}
}
identity_traits!(ssh::Identity, x25519::Identity, plugin::IdentityPluginV1);
// This is where the magic happens, and why we need to do the trait dance
// above: `FromPyObject` is a third-party trait, so we need to implement it
// for `Box<dyn PyrageRecipient>` instead of `Box<dyn Recipient>`.
//
// The implementation itself is straightforward: we try to turn the
// `PyAny` into each concrete recipient type, which we then perform the trait
// cast on.
impl<'source> FromPyObject<'source> for Box<dyn PyrageRecipient> {
fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult<Self> {
if let Ok(recipient) = ob.extract::<x25519::Recipient>() {
Ok(Box::new(recipient) as Box<dyn PyrageRecipient>)
} else if let Ok(recipient) = ob.extract::<ssh::Recipient>() {
Ok(Box::new(recipient) as Box<dyn PyrageRecipient>)
} else if let Ok(recipient) = ob.extract::<plugin::RecipientPluginV1>() {
Ok(Box::new(recipient) as Box<dyn PyrageRecipient>)
} else {
Err(PyTypeError::new_err(
"invalid type (expected a recipient type)",
))
}
}
}
// Similar to the above: we try to turn the `PyAny` into a concrete identity type,
// which we then perform the trait cast on.
impl<'source> FromPyObject<'source> for Box<dyn PyrageIdentity> {
fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult<Self> {
if let Ok(identity) = ob.extract::<x25519::Identity>() {
Ok(Box::new(identity) as Box<dyn PyrageIdentity>)
} else if let Ok(identity) = ob.extract::<ssh::Identity>() {
Ok(Box::new(identity) as Box<dyn PyrageIdentity>)
} else if let Ok(identity) = ob.extract::<plugin::IdentityPluginV1>() {
Ok(Box::new(identity) as Box<dyn PyrageIdentity>)
} else {
Err(PyTypeError::new_err(
"invalid type (expected an identity type)",
))
}
}
}
create_exception!(pyrage, EncryptError, PyException);
#[pyfunction]
#[pyo3(signature = (plaintext, recipients, armored=false))]
fn encrypt<'p>(
py: Python<'p>,
plaintext: &[u8],
recipients: Vec<Box<dyn PyrageRecipient>>,
armored: bool,
) -> PyResult<Bound<'p, PyBytes>> {
// This turns each `dyn PyrageRecipient` into a `dyn Recipient`, which
// is what the underlying `age` API expects.
let recipients = recipients
.into_iter()
.map(|pr| pr.as_recipient())
.collect::<Vec<_>>();
let encryptor = Encryptor::with_recipients(recipients.iter().map(|r| r.as_ref()))
.map_err(|_| EncryptError::new_err("expected at least one recipient"))?;
let mut encrypted = vec![];
let mut writer = match armored {
true => encryptor
.wrap_output(ArmoredWriter::wrap_output(
&mut encrypted,
Format::AsciiArmor,
)?)
.map_err(|e| EncryptError::new_err(e.to_string()))?,
false => encryptor
.wrap_output(ArmoredWriter::wrap_output(&mut encrypted, Format::Binary)?)
.map_err(|e| EncryptError::new_err(e.to_string()))?,
};
writer
.write_all(plaintext)
.map_err(|e| EncryptError::new_err(e.to_string()))?;
writer
.finish()
.map_err(|e| EncryptError::new_err(e.to_string()))?
.finish()
.map_err(|e| EncryptError::new_err(e.to_string()))?;
// TODO: Avoid this copy. Maybe PyBytes::new_with?
Ok(PyBytes::new(py, &encrypted))
}
#[pyfunction]
#[pyo3(signature = (infile, outfile, recipients, armored=false))]
fn encrypt_file(
infile: String,
outfile: String,
recipients: Vec<Box<dyn PyrageRecipient>>,
armored: bool,
) -> PyResult<()> {
// This turns each `dyn PyrageRecipient` into a `dyn Recipient`, which
// is what the underlying `age` API expects.
let recipients = recipients
.into_iter()
.map(|pr| pr.as_recipient())
.collect::<Vec<_>>();
let reader = File::open(infile)?;
let writer = File::create(outfile)?;
let mut reader = std::io::BufReader::new(reader);
let mut writer = std::io::BufWriter::new(writer);
let encryptor = Encryptor::with_recipients(recipients.iter().map(|r| r.as_ref()))
.map_err(|_| EncryptError::new_err("expected at least one recipient"))?;
let mut writer = match armored {
true => encryptor
.wrap_output(ArmoredWriter::wrap_output(&mut writer, Format::AsciiArmor)?)
.map_err(|e| EncryptError::new_err(e.to_string()))?,
false => encryptor
.wrap_output(ArmoredWriter::wrap_output(&mut writer, Format::Binary)?)
.map_err(|e| EncryptError::new_err(e.to_string()))?,
};
std::io::copy(&mut reader, &mut writer).map_err(|e| EncryptError::new_err(e.to_string()))?;
writer
.finish()
.map_err(|e| EncryptError::new_err(e.to_string()))?
.finish()
.map_err(|e| EncryptError::new_err(e.to_string()))?;
Ok(())
}
create_exception!(pyrage, DecryptError, PyException);
#[pyfunction]
fn decrypt<'p>(
py: Python<'p>,
ciphertext: &[u8],
identities: Vec<Box<dyn PyrageIdentity>>,
) -> PyResult<Bound<'p, PyBytes>> {
let identities = identities.iter().map(|pi| pi.as_ref().as_identity());
let decryptor = age::Decryptor::new(ArmoredReader::new(ciphertext))
.map_err(|e| DecryptError::new_err(e.to_string()))?;
let mut decrypted = vec![];
let mut reader = decryptor
.decrypt(identities)
.map_err(|e| DecryptError::new_err(e.to_string()))?;
reader
.read_to_end(&mut decrypted)
.map_err(|e| DecryptError::new_err(e.to_string()))?;
// TODO: Avoid this copy. Maybe PyBytes::new_with?
Ok(PyBytes::new(py, &decrypted))
}
#[pyfunction]
fn decrypt_file(
infile: String,
outfile: String,
identities: Vec<Box<dyn PyrageIdentity>>,
) -> PyResult<()> {
let identities = identities.iter().map(|pi| pi.as_ref().as_identity());
let reader = File::open(infile)?;
let writer = File::create(outfile)?;
let reader = std::io::BufReader::new(reader);
let mut writer = std::io::BufWriter::new(writer);
let decryptor = age::Decryptor::new_buffered(ArmoredReader::new(reader))
.map_err(|e| DecryptError::new_err(e.to_string()))?;
let mut reader = decryptor
.decrypt(identities)
.map_err(|e| DecryptError::new_err(e.to_string()))?;
std::io::copy(&mut reader, &mut writer)?;
Ok(())
}
fn from_pyobject(file: PyObject, read_only: bool) -> PyResult<PyFileLikeObject> {
// is a file-like
PyFileLikeObject::with_requirements(file, read_only, !read_only, false, false)
}
#[pyfunction]
#[pyo3(signature = (reader, writer, recipients, armored=false))]
fn encrypt_io(
reader: PyObject,
writer: PyObject,
recipients: Vec<Box<dyn PyrageRecipient>>,
armored: bool,
) -> PyResult<()> {
// This turns each `dyn PyrageRecipient` into a `dyn Recipient`, which
// is what the underlying `age` API expects.
let recipients = recipients
.into_iter()
.map(|pr| pr.as_recipient())
.collect::<Vec<_>>();
let reader = from_pyobject(reader, true)?;
let writer = from_pyobject(writer, false)?;
let mut reader = std::io::BufReader::new(reader);
let mut writer = std::io::BufWriter::new(writer);
let encryptor = Encryptor::with_recipients(recipients.iter().map(|r| r.as_ref()))
.map_err(|_| EncryptError::new_err("expected at least one recipient"))?;
let mut writer = match armored {
true => encryptor
.wrap_output(ArmoredWriter::wrap_output(&mut writer, Format::AsciiArmor)?)
.map_err(|e| EncryptError::new_err(e.to_string()))?,
false => encryptor
.wrap_output(ArmoredWriter::wrap_output(&mut writer, Format::Binary)?)
.map_err(|e| EncryptError::new_err(e.to_string()))?,
};
std::io::copy(&mut reader, &mut writer).map_err(|e| EncryptError::new_err(e.to_string()))?;
writer
.finish()
.map_err(|e| EncryptError::new_err(e.to_string()))?
.finish()
.map_err(|e| EncryptError::new_err(e.to_string()))?;
Ok(())
}
#[pyfunction]
fn decrypt_io(
reader: PyObject,
writer: PyObject,
identities: Vec<Box<dyn PyrageIdentity>>,
) -> PyResult<()> {
let identities = identities.iter().map(|pi| pi.as_ref().as_identity());
let reader = from_pyobject(reader, true)?;
let writer = from_pyobject(writer, false)?;
let reader = std::io::BufReader::new(reader);
let mut writer = std::io::BufWriter::new(writer);
let decryptor = age::Decryptor::new_buffered(ArmoredReader::new(reader))
.map_err(|e| DecryptError::new_err(e.to_string()))?;
let mut reader = decryptor
.decrypt(identities)
.map_err(|e| DecryptError::new_err(e.to_string()))?;
std::io::copy(&mut reader, &mut writer)?;
Ok(())
}
#[pymodule]
fn pyrage(py: Python, m: &Bound<'_, PyModule>) -> PyResult<()> {
// HACK(ww): pyO3 modules are not packages, so we need this nasty
// `py_run!` hack to support `from pyrage import ...` and similar
// import patterns.
let x25519 = x25519::module(py)?;
py_run!(
py,
x25519,
"import sys; sys.modules['pyrage.x25519'] = x25519"
);
m.add_submodule(&x25519)?;
let ssh = ssh::module(py)?;
py_run!(py, ssh, "import sys; sys.modules['pyrage.ssh'] = ssh");
m.add_submodule(&ssh)?;
let passphrase = passphrase::module(py)?;
py_run!(
py,
passphrase,
"import sys; sys.modules['pyrage.passphrase'] = passphrase"
);
m.add_submodule(&passphrase)?;
let plugin = plugin::module(py)?;
py_run!(
py,
plugin,
"import sys; sys.modules['pyrage.plugin'] = plugin"
);
m.add_submodule(&plugin)?;
m.add("IdentityError", py.get_type::<IdentityError>())?;
m.add("RecipientError", py.get_type::<RecipientError>())?;
m.add("EncryptError", py.get_type::<EncryptError>())?;
m.add_wrapped(wrap_pyfunction!(encrypt))?;
m.add_wrapped(wrap_pyfunction!(encrypt_file))?;
m.add_wrapped(wrap_pyfunction!(encrypt_io))?;
m.add("DecryptError", py.get_type::<DecryptError>())?;
m.add_wrapped(wrap_pyfunction!(decrypt))?;
m.add_wrapped(wrap_pyfunction!(decrypt_file))?;
m.add_wrapped(wrap_pyfunction!(decrypt_io))?;
Ok(())
}
| woodruffw/pyrage | 80 | Python bindings for rage (age in Rust) | Rust | woodruffw | William Woodruff | astral-sh |
src/passphrase.rs | Rust | use std::{
io::{Read, Write},
iter,
};
use age::{
armor::ArmoredReader, armor::ArmoredWriter, armor::Format, scrypt, Decryptor, Encryptor,
};
use pyo3::{prelude::*, types::PyBytes};
use crate::{DecryptError, EncryptError};
#[pyfunction]
#[pyo3(signature = (plaintext, passphrase, armored=false))]
fn encrypt<'p>(
py: Python<'p>,
plaintext: &[u8],
passphrase: &str,
armored: bool,
) -> PyResult<Bound<'p, PyBytes>> {
let encryptor = Encryptor::with_user_passphrase(passphrase.into());
let mut encrypted = vec![];
let writer_result = match armored {
true => encryptor.wrap_output(
ArmoredWriter::wrap_output(&mut encrypted, Format::AsciiArmor)
.map_err(|e| EncryptError::new_err(e.to_string()))?,
),
false => encryptor.wrap_output(
ArmoredWriter::wrap_output(&mut encrypted, Format::Binary)
.map_err(|e| EncryptError::new_err(e.to_string()))?,
),
};
let mut writer = writer_result.map_err(|e| EncryptError::new_err(e.to_string()))?;
writer
.write_all(plaintext)
.map_err(|e| EncryptError::new_err(e.to_string()))?;
writer
.finish()
.map_err(|e| EncryptError::new_err(e.to_string()))?
.finish()
.map_err(|e| EncryptError::new_err(e.to_string()))?;
Ok(PyBytes::new(py, &encrypted))
}
#[pyfunction]
fn decrypt<'p>(
py: Python<'p>,
ciphertext: &[u8],
passphrase: &str,
) -> PyResult<Bound<'p, PyBytes>> {
let decryptor = Decryptor::new_buffered(ArmoredReader::new(ciphertext))
.map_err(|e| DecryptError::new_err(e.to_string()))?;
let mut decrypted = vec![];
let mut reader = decryptor
.decrypt(iter::once(&scrypt::Identity::new(passphrase.into()) as _))
.map_err(|e| DecryptError::new_err(e.to_string()))?;
reader
.read_to_end(&mut decrypted)
.map_err(|e| DecryptError::new_err(e.to_string()))?;
Ok(PyBytes::new(py, &decrypted))
}
pub(crate) fn module(py: Python<'_>) -> PyResult<Bound<'_, PyModule>> {
let module = PyModule::new(py, "passphrase")?;
module.add_wrapped(wrap_pyfunction!(encrypt))?;
module.add_wrapped(wrap_pyfunction!(decrypt))?;
Ok(module)
}
| woodruffw/pyrage | 80 | Python bindings for rage (age in Rust) | Rust | woodruffw | William Woodruff | astral-sh |
src/plugin.rs | Rust | use std::str::FromStr;
use std::sync::Arc;
use pyo3::{prelude::*, types::PyType};
use crate::{DecryptError, EncryptError, IdentityError, RecipientError};
/// Hack, because the orphan rule would prevent us from deriving a
/// foreign trait on a foreign object. Instead, define a newtype.
///
/// Inner type is PyAny, because we do duck-typing at runtime, and
/// declaring a protocol in the type stubs.
#[derive(Clone)]
pub(crate) struct PyCallbacks(Py<PyAny>);
impl PyCallbacks {
fn new(inner: Bound<'_, PyAny>) -> PyResult<Self> {
Ok(Self(inner.unbind()))
}
}
// Since we have no way to pass errors from these callbacks, we might
// as well panic.
//
// These callbacks don't look like they're supposed to fail anyway.
impl age::Callbacks for PyCallbacks {
fn display_message(&self, message: &str) {
Python::with_gil(|py| {
self.0
.call_method1(py, pyo3::intern!(py, "display_message"), (message,))
.expect("`display_message` callback error")
});
}
fn confirm(&self, message: &str, yes_string: &str, no_string: Option<&str>) -> Option<bool> {
Python::with_gil(|py| {
self.0
.call_method1(
py,
pyo3::intern!(py, "confirm"),
(message, yes_string, no_string),
)
.expect("`confirm` callback error")
.extract::<Option<bool>>(py)
})
.expect("type error in `confirm` callback")
}
fn request_public_string(&self, description: &str) -> Option<String> {
Python::with_gil(|py| {
self.0
.call_method1(
py,
pyo3::intern!(py, "request_public_string"),
(description,),
)
.expect("`request_public_string` callback error")
.extract::<Option<String>>(py)
})
.expect("type error in `request_public_string` callback")
}
fn request_passphrase(&self, description: &str) -> Option<age::secrecy::SecretString> {
Python::with_gil(|py| {
self.0
.call_method1(py, pyo3::intern!(py, "request_passphrase"), (description,))
.expect("`request_passphrase` callback error")
.extract::<Option<String>>(py)
})
.expect("type error in `request_passphrase` callback")
.map(age::secrecy::SecretString::from)
}
}
#[pyclass(module = "pyrage.plugin")]
#[derive(Clone)]
pub(crate) struct Recipient(pub(crate) age::plugin::Recipient);
#[pymethods]
impl Recipient {
#[classmethod]
fn from_str(_cls: &Bound<'_, PyType>, v: &str) -> PyResult<Self> {
age::plugin::Recipient::from_str(v)
.map(Self)
.map_err(RecipientError::new_err)
}
fn plugin(&self) -> String {
self.0.plugin().to_owned()
}
fn __str__(&self) -> String {
self.0.to_string()
}
}
#[pyclass(module = "pyrage.plugin")]
#[derive(Clone)]
pub(crate) struct Identity(pub(crate) age::plugin::Identity);
#[pymethods]
impl Identity {
#[classmethod]
fn from_str(_cls: &Bound<'_, PyType>, v: &str) -> PyResult<Self> {
age::plugin::Identity::from_str(v)
.map(Self)
.map_err(|e| IdentityError::new_err(e.to_string()))
}
#[classmethod]
fn default_for_plugin(_cls: &Bound<'_, PyType>, plugin: &str) -> Self {
Self(age::plugin::Identity::default_for_plugin(plugin))
}
fn plugin(&self) -> String {
self.0.plugin().to_owned()
}
fn __str__(&self) -> String {
self.0.to_string()
}
}
#[pyclass(module = "pyrage.plugin")]
#[derive(Clone)]
pub(crate) struct RecipientPluginV1(pub(crate) Arc<age::plugin::RecipientPluginV1<PyCallbacks>>);
#[pymethods]
impl RecipientPluginV1 {
#[new]
#[pyo3(
text_signature = "(plugin_name: str, recipients: typing.Sequence[Recipient], identities: typing.Sequence[Identity], callbacks: Callbacks)"
)]
fn new(
_py: Python<'_>,
plugin_name: &str,
recipients: Vec<Recipient>,
identities: Vec<Identity>,
callbacks: Bound<'_, PyAny>,
) -> PyResult<Self> {
age::plugin::RecipientPluginV1::new(
plugin_name,
recipients
.into_iter()
.map(|i| i.0)
.collect::<Vec<_>>()
.as_slice(),
identities
.into_iter()
.map(|i| i.0)
.collect::<Vec<_>>()
.as_slice(),
PyCallbacks::new(callbacks)?,
)
.map(Arc::new)
.map(Self)
.map_err(|err| EncryptError::new_err(err.to_string()))
}
}
#[pyclass(module = "pyrage.plugin")]
#[derive(Clone)]
pub(crate) struct IdentityPluginV1(pub(crate) Arc<age::plugin::IdentityPluginV1<PyCallbacks>>);
#[pymethods]
impl IdentityPluginV1 {
#[new]
#[pyo3(
text_signature = "(plugin_name: str, identities: typing.Sequence[Identity], callbacks: Callbacks)"
)]
fn new(
_py: Python<'_>,
plugin_name: &str,
identities: Vec<Identity>,
callbacks: Bound<'_, PyAny>,
) -> PyResult<Self> {
age::plugin::IdentityPluginV1::new(
plugin_name,
identities
.into_iter()
.map(|i| i.0)
.collect::<Vec<_>>()
.as_slice(),
PyCallbacks::new(callbacks)?,
)
.map(Arc::new)
.map(Self)
.map_err(|err| DecryptError::new_err(err.to_string()))
}
}
pub(crate) fn module(py: Python<'_>) -> PyResult<Bound<'_, PyModule>> {
let module = PyModule::new(py, "plugin")?;
module.add_class::<Recipient>()?;
module.add_class::<Identity>()?;
module.add_class::<RecipientPluginV1>()?;
module.add_class::<IdentityPluginV1>()?;
Ok(module)
}
| woodruffw/pyrage | 80 | Python bindings for rage (age in Rust) | Rust | woodruffw | William Woodruff | astral-sh |
src/ssh.rs | Rust | use std::str::FromStr;
use pyo3::{prelude::*, types::PyType};
use crate::{IdentityError, RecipientError};
#[pyclass(module = "pyrage.ssh")]
#[derive(Clone)]
pub(crate) struct Recipient(pub(crate) age::ssh::Recipient);
#[pymethods]
impl Recipient {
#[classmethod]
fn from_str(_cls: &Bound<'_, PyType>, v: &str) -> PyResult<Self> {
let recipient = age::ssh::Recipient::from_str(v)
.map_err(|e| RecipientError::new_err(format!("invalid public key: {:?}", e)))?;
Ok(Self(recipient))
}
}
#[pyclass(module = "pyrage.ssh")]
#[derive(Clone)]
pub(crate) struct Identity(pub(crate) age::ssh::Identity);
#[pymethods]
impl Identity {
#[classmethod]
fn from_buffer(_cls: &Bound<'_, PyType>, buf: &[u8]) -> PyResult<Self> {
let identity = age::ssh::Identity::from_buffer(buf, None)
.map_err(|e| IdentityError::new_err(e.to_string()))?;
match identity {
age::ssh::Identity::Unencrypted(_) => Ok(Self(identity)),
age::ssh::Identity::Encrypted(_) => {
Err(IdentityError::new_err("ssh key must be decrypted first"))
}
age::ssh::Identity::Unsupported(uk) => {
// Unsupported doesn't have a Display impl, only a hardcoded `display` function.
Err(IdentityError::new_err(format!("unsupported key: {:?}", uk)))
}
}
}
}
pub(crate) fn module(py: Python<'_>) -> PyResult<Bound<'_, PyModule>> {
let module = PyModule::new(py, "ssh")?;
module.add_class::<Recipient>()?;
module.add_class::<Identity>()?;
Ok(module)
}
| woodruffw/pyrage | 80 | Python bindings for rage (age in Rust) | Rust | woodruffw | William Woodruff | astral-sh |
src/x25519.rs | Rust | use std::str::FromStr;
use age::secrecy::ExposeSecret;
use pyo3::{prelude::*, types::PyType};
use crate::{IdentityError, RecipientError};
#[pyclass(module = "pyrage.x25519")]
#[derive(Clone)]
pub(crate) struct Recipient(pub(crate) age::x25519::Recipient);
#[pymethods]
impl Recipient {
#[classmethod]
fn from_str(_cls: &Bound<'_, PyType>, v: &str) -> PyResult<Self> {
age::x25519::Recipient::from_str(v)
.map(Self)
.map_err(RecipientError::new_err)
}
fn __str__(&self) -> String {
self.0.to_string()
}
}
#[pyclass(module = "pyrage.x25519")]
#[derive(Clone)]
pub(crate) struct Identity(pub(crate) age::x25519::Identity);
#[pymethods]
impl Identity {
#[classmethod]
fn generate(_cls: &Bound<'_, PyType>) -> Self {
Self(age::x25519::Identity::generate())
}
#[classmethod]
fn from_str(_cls: &Bound<'_, PyType>, v: &str) -> PyResult<Self> {
let identity = age::x25519::Identity::from_str(v)
.map_err(|e| IdentityError::new_err(e.to_string()))?;
Ok(Self(identity))
}
fn to_public(&self) -> Recipient {
Recipient(self.0.to_public())
}
fn __str__(&self) -> String {
self.0.to_string().expose_secret().into()
}
}
pub(crate) fn module(py: Python<'_>) -> PyResult<Bound<'_, PyModule>> {
let module = PyModule::new(py, "x25519")?;
module.add_class::<Recipient>()?;
module.add_class::<Identity>()?;
Ok(module)
}
| woodruffw/pyrage | 80 | Python bindings for rage (age in Rust) | Rust | woodruffw | William Woodruff | astral-sh |
test/test_passphrase.py | Python | import unittest
from parameterized import parameterized
from pyrage import passphrase
class TestPassphrase(unittest.TestCase):
@parameterized.expand([(False,), (True,)])
def test_roundtrip(self, armored):
plaintext = b"junk"
encrypted = passphrase.encrypt(plaintext, "some password", armored=armored)
decrypted = passphrase.decrypt(encrypted, "some password")
self.assertEqual(plaintext, decrypted)
| woodruffw/pyrage | 80 | Python bindings for rage (age in Rust) | Rust | woodruffw | William Woodruff | astral-sh |
test/test_pyrage.py | Python | import os
import tempfile
import unittest
from io import BytesIO
from parameterized import parameterized
import pyrage
from .utils import ssh_keypair
class TestPyrage(unittest.TestCase):
def test_encrypt_fails_with_no_receipients(self):
with self.assertRaisesRegex(
pyrage.EncryptError, "expected at least one recipient"
):
pyrage.encrypt(b"test", [])
@parameterized.expand([(False,), (True,)])
def test_roundtrip(self, armored):
identity = pyrage.x25519.Identity.generate()
recipient = identity.to_public()
encrypted = pyrage.encrypt(b"test", [recipient], armored=armored)
decrypted = pyrage.decrypt(encrypted, [identity])
self.assertEqual(b"test", decrypted)
@parameterized.expand([(False,), (True,)])
def test_roundtrip_io_fh(self, armored):
identity = pyrage.x25519.Identity.generate()
recipient = identity.to_public()
with tempfile.TemporaryFile() as unencrypted:
unencrypted.write(b"test")
unencrypted.seek(0)
with tempfile.TemporaryFile() as encrypted:
pyrage.encrypt_io(unencrypted, encrypted, [recipient], armored=armored)
encrypted.seek(0)
with tempfile.TemporaryFile() as decrypted:
pyrage.decrypt_io(encrypted, decrypted, [identity])
decrypted.seek(0)
unencrypted.seek(0)
self.assertEqual(unencrypted.read(), decrypted.read())
@parameterized.expand([(False,), (True,)])
def test_roundtrip_io_bytesio(self, armored):
identity = pyrage.x25519.Identity.generate()
recipient = identity.to_public()
unencrypted = BytesIO(b"test")
encrypted = BytesIO()
decrypted = BytesIO()
pyrage.encrypt_io(unencrypted, encrypted, [recipient], armored=armored)
encrypted.seek(0)
pyrage.decrypt_io(encrypted, decrypted, [identity])
decrypted.seek(0)
unencrypted.seek(0)
self.assertEqual(unencrypted.read(), decrypted.read())
def test_roundtrip_io_fail(self):
identity = pyrage.x25519.Identity.generate()
recipient = identity.to_public()
with self.assertRaises(TypeError):
input = "test"
output = BytesIO()
pyrage.encrypt_io(input, output, [recipient])
with self.assertRaises(TypeError):
input = BytesIO()
output = "test"
pyrage.encrypt_io(input, output, [recipient])
with self.assertRaises(TypeError):
input = "test"
output = BytesIO()
pyrage.decrypt_io(input, output, [recipient])
with self.assertRaises(TypeError):
input = BytesIO()
output = "test"
pyrage.decrypt_io(input, output, [recipient])
@parameterized.expand([(False,), (True,)])
def test_roundtrip_file(self, armored):
identity = pyrage.x25519.Identity.generate()
recipient = identity.to_public()
with tempfile.TemporaryDirectory() as tempdir:
unencrypted = os.path.join(tempdir, "unencrypted")
encrypted = os.path.join(tempdir, "encrypted")
decrypted = os.path.join(tempdir, "decrypted")
with open(unencrypted, "wb") as file:
file.write(b"test")
pyrage.encrypt_file(unencrypted, encrypted, [recipient], armored=armored)
pyrage.decrypt_file(encrypted, decrypted, [identity])
with open(unencrypted, "rb") as file1:
with open(decrypted, "rb") as file2:
self.assertEqual(file1.read(), file2.read())
def test_decrypt_fails_wrong_recipient(self):
alice = pyrage.x25519.Identity.generate()
bob = pyrage.x25519.Identity.generate()
# alice encrypts to herself
encrypted = pyrage.encrypt(b"test", [alice.to_public()])
# bob tries to decrypt and fails
with self.assertRaisesRegex(pyrage.DecryptError, "No matching keys found"):
pyrage.decrypt(encrypted, [bob])
# one key matches, so decryption succeeds
decrypted = pyrage.decrypt(encrypted, [alice, bob])
self.assertEqual(b"test", decrypted)
@parameterized.expand([(False,), (True,)])
def test_roundtrip_matrix(self, armored):
identities = []
recipients = []
age_identity = pyrage.x25519.Identity.generate()
identities.append(age_identity)
age_recipient = age_identity.to_public()
recipients.append(age_recipient)
for filename in ["ed25519", "rsa4096", "rsa2048"]:
pubkey, privkey = ssh_keypair(filename)
identities.append(pyrage.ssh.Identity.from_buffer(privkey.encode()))
recipients.append(pyrage.ssh.Recipient.from_str(pubkey))
# Encrypt to all recipients, decode using each identity.
encrypted = pyrage.encrypt(b"test matrix", recipients, armored=armored)
for identity in identities:
self.assertEqual(b"test matrix", pyrage.decrypt(encrypted, [identity]))
if __name__ == "__main__":
unittest.main()
| woodruffw/pyrage | 80 | Python bindings for rage (age in Rust) | Rust | woodruffw | William Woodruff | astral-sh |
test/test_ssh.py | Python | import unittest
from pyrage import RecipientError, ssh
from .utils import ssh_keypair
class TestIdentity(unittest.TestCase):
def test_from_buffer(self):
for filename in ["ed25519", "rsa4096", "rsa2048"]:
_pubkey, privkey = ssh_keypair(filename)
identity = ssh.Identity.from_buffer(privkey.encode())
self.assertIsInstance(identity, ssh.Identity)
class TestRecipient(unittest.TestCase):
def test_from_str(self):
for filename in ["ed25519", "rsa4096", "rsa2048"]:
pubkey, _privkey = ssh_keypair(filename)
recipient = ssh.Recipient.from_str(pubkey)
self.assertIsInstance(recipient, ssh.Recipient)
def test_from_str_invalid(self):
with self.assertRaisesRegex(RecipientError, "invalid SSH recipient"):
ssh.Recipient.from_str("invalid ssh pubkey")
| woodruffw/pyrage | 80 | Python bindings for rage (age in Rust) | Rust | woodruffw | William Woodruff | astral-sh |
test/test_x25519.py | Python | import unittest
from pyrage import x25519, IdentityError, RecipientError
class TestIdentity(unittest.TestCase):
def test_generate(self):
identity = x25519.Identity.generate()
self.assertIsInstance(identity, x25519.Identity)
self.assertTrue(str(identity).startswith("AGE-SECRET-KEY"))
recipient = identity.to_public()
self.assertTrue(str(recipient).startswith("age"))
def test_from_str(self):
generated = x25519.Identity.generate()
parsed = x25519.Identity.from_str(str(generated))
self.assertIsInstance(parsed, x25519.Identity)
def test_from_str_invalid(self):
with self.assertRaisesRegex(IdentityError, "invalid Bech32 encoding"):
x25519.Identity.from_str("BAD-PREFIX")
class TestRecipient(unittest.TestCase):
def test_from_str(self):
recipient = x25519.Recipient.from_str(
"age1zvkyg2lqzraa2lnjvqej32nkuu0ues2s82hzrye869xeexvn73equnujwj"
)
self.assertIsInstance(recipient, x25519.Recipient)
self.assertEqual(
str(recipient),
"age1zvkyg2lqzraa2lnjvqej32nkuu0ues2s82hzrye869xeexvn73equnujwj",
)
def test_from_str_invalid(self):
with self.assertRaisesRegex(RecipientError, "invalid Bech32 encoding"):
x25519.Recipient.from_str("badprefix")
if __name__ == "__main__":
unittest.main()
| woodruffw/pyrage | 80 | Python bindings for rage (age in Rust) | Rust | woodruffw | William Woodruff | astral-sh |
test/utils.py | Python | from pathlib import Path
_HERE = Path(__file__).parent
_ASSETS = _HERE / "assets"
assert _ASSETS.is_dir(), "missing test assets directory"
def ssh_keypair(name):
(pub, priv) = (_ASSETS / f"{name}.pub", _ASSETS / name)
return (pub.read_text(), priv.read_text())
| woodruffw/pyrage | 80 | Python bindings for rage (age in Rust) | Rust | woodruffw | William Woodruff | astral-sh |
shaq/__init__.py | Python | __version__ = "0.0.5"
| woodruffw/shaq | 116 | A CLI client for Shazam | Python | woodruffw | William Woodruff | astral-sh |
shaq/__main__.py | Python | if __name__ == "__main__":
from shaq._cli import main
main()
| woodruffw/shaq | 116 | A CLI client for Shazam | Python | woodruffw | William Woodruff | astral-sh |
shaq/_cli.py | Python | import argparse
import asyncio
import json
import logging
import os
import shutil
import sys
import wave
from collections.abc import Iterator
from contextlib import contextmanager
from io import BytesIO
from pathlib import Path
from typing import Any
import pyaudio
from pydub import AudioSegment
from rich import progress
from rich.console import Console
from rich.logging import RichHandler
from rich.status import Status
from shazamio import Serialize, Shazam
logging.basicConfig(
level=os.environ.get("SHAQ_LOGLEVEL", "INFO").upper(),
format="%(message)s",
datefmt="[%X]",
)
_DEFAULT_CHUNK_SIZE = 1024
_FORMAT = pyaudio.paInt16
_DEFAULT_CHANNELS = 1
_DEFAULT_SAMPLE_RATE = 16000
_DEFAULT_DURATION = 10
logger = logging.getLogger(__name__)
@contextmanager
def _console() -> Iterator[Console]:
"""
Temporarily dups and nulls the standard streams, while yielding a
rich `Console` on the dup'd stderr.
This is done because of PyAudio's misbehaving internals.
See: https://stackoverflow.com/questions/67765911
"""
try:
# Save stdout and stderr, then clobber them.
dup_fds = (os.dup(sys.stdout.fileno()), os.dup(sys.stderr.fileno()))
null_fds = tuple(os.open(os.devnull, os.O_WRONLY) for _ in range(2))
os.dup2(null_fds[0], sys.stdout.fileno())
os.dup2(null_fds[1], sys.stderr.fileno())
dup_stderr = os.fdopen(dup_fds[1], mode="w")
yield Console(file=dup_stderr)
finally:
# Restore the original stdout and stderr; close everything except
# the original FDs.
os.dup2(dup_fds[0], sys.stdout.fileno())
os.dup2(dup_fds[1], sys.stderr.fileno())
for fd in [*null_fds, *dup_fds]:
os.close(fd)
@contextmanager
def _pyaudio() -> Iterator[pyaudio.PyAudio]:
try:
p = pyaudio.PyAudio()
yield p
finally:
p.terminate()
def _listen(console: Console, args: argparse.Namespace) -> bytearray:
with _pyaudio() as p, BytesIO() as io, wave.open(io, "wb") as wav:
# Use the same parameters as shazamio uses internally for audio
# normalization, to reduce unnecessary transcoding.
wav.setnchannels(args.channels)
wav.setsampwidth(p.get_sample_size(_FORMAT))
wav.setframerate(args.sample_rate)
stream = p.open(format=_FORMAT, channels=args.channels, rate=args.sample_rate, input=True)
for _ in progress.track(
range(0, args.sample_rate // args.chunk_size * args.duration),
description="shaq is listening...",
console=console,
):
wav.writeframes(stream.read(args.chunk_size))
stream.close()
# TODO: Optimize if necessary; this makes at least one pointless copy.
return bytearray(io.getvalue())
def _from_file(console: Console, args: argparse.Namespace) -> AudioSegment:
with Status(f"Extracting from {args.input}", console=console):
input = AudioSegment.from_file(args.input)
# pydub measures things in milliseconds
duration = args.duration * 1000
return input[:duration]
async def _shaq(console: Console, args: argparse.Namespace) -> dict[str, Any]:
input: bytearray | AudioSegment
if args.listen:
input = _listen(console, args)
else:
input = _from_file(console, args)
shazam = Shazam(language="en-US", endpoint_country="US")
return await shazam.recognize_song(input, proxy=args.proxy) # type: ignore
def _parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
input_group = parser.add_mutually_exclusive_group(required=True)
input_group.add_argument(
"--listen", action="store_true", help="detect from the system's microphone"
)
input_group.add_argument("--input", type=Path, help="detect from the given audio input file")
parser.add_argument(
"-d",
"--duration",
metavar="SECS",
type=int,
default=_DEFAULT_DURATION,
help="only analyze the first SECS of the input (microphone or file)",
)
parser.add_argument(
"-j", "--json", action="store_true", help="emit Shazam's response as JSON on stdout"
)
parser.add_argument("--albumcover", action="store_true", help="return url to HD album cover")
advanced_group = parser.add_argument_group(
title="Advanced Options",
description="Advanced users only: options to tweak recording, transcoding, etc. behavior.",
)
advanced_group.add_argument(
"--chunk-size",
type=int,
default=_DEFAULT_CHUNK_SIZE,
help="read from the microphone in chunks of this size; only affects --listen",
)
advanced_group.add_argument(
"--channels",
type=int,
choices=(1, 2),
default=_DEFAULT_CHANNELS,
help="the number of channels to use; only affects --listen",
)
advanced_group.add_argument(
"--sample-rate",
type=int,
default=_DEFAULT_SAMPLE_RATE,
help="the sample rate to use; only affects --listen",
)
advanced_group.add_argument(
"--proxy",
type=str,
help="send the request to a proxy server",
)
return parser
def main() -> None:
args = _parser().parse_args()
with _console() as console:
logger.addHandler(RichHandler(console=console))
logger.debug(f"parsed {args=}")
if not shutil.which("ffmpeg"):
console.print("[red]Fatal: ffmpeg not found on $PATH[/red]")
sys.exit(1)
try:
raw = asyncio.run(_shaq(console, args))
track = Serialize.full_track(raw)
except KeyboardInterrupt:
console.print("[red]Interrupted.[/red]")
sys.exit(2)
if args.json:
json.dump(raw, sys.stdout, indent=2)
else:
track = Serialize.full_track(raw)
if not track.matches:
print("No matches.")
else:
print(f"Track: {track.track.title}")
print(f"Artist: {track.track.subtitle}")
if args.albumcover:
if "images" in raw["track"]:
album_cover = raw["track"]["images"]["coverart"]
# Forces the shazam image server to fetch a
# high-resolution album cover.
album_cover_hq = album_cover.replace("/400x400cc.jpg", "/1000x1000cc.png")
print(f"Album Cover: {album_cover_hq}")
if not track.matches:
sys.exit(1)
| woodruffw/shaq | 116 | A CLI client for Shazam | Python | woodruffw | William Woodruff | astral-sh |
assets/static/index.js | JavaScript | const sortListAlpha = list => [...list].sort((a, b) => {
const A = a.textContent.trim(), B = b.textContent.trim();
return (A < B) ? -1 : (A > B) ? 1 : 0;
});
const sortListTopicCount = list => [...list].sort((a, b) => {
const A = parseInt(a.querySelector(".til-tag-count").textContent, 10);
const B = parseInt(b.querySelector(".til-tag-count").textContent, 10);
return B - A;
});
function sortAlpha() {
const ul = document.querySelector(".topic-list");
const list = ul.querySelectorAll("li");
ul.append(...sortListAlpha(list));
}
function sortCount() {
const ul = document.querySelector(".topic-list");
const list = ul.querySelectorAll("li");
ul.append(...sortListTopicCount(list));
}
| woodruffw/tiller | 11 | Tiller Tills TILs | Rust | woodruffw | William Woodruff | astral-sh |
assets/static/style.css | CSS | body {
font-family: "Helvetica", sans-serif;
hyphens: auto;
margin: auto;
padding: 1em;
max-width: 50em;
background-color: #F6F6F6;
}
h1.til-title {
margin-bottom: 0px;
text-wrap: balance;
}
pre {
border: 1px solid black;
padding: 15px;
font-size: 14px;
overflow: scroll;
}
hr {
margin: auto;
border: 1px dashed gray;
}
ul.topic-list {
list-style: none;
list-style-type: none;
padding: 0;
}
li.topic-list-tag {
display: inline-block;
margin-right: 0.25em;
margin-top: 0.25em;
}
a {
color: blue;
background-color: transparent;
text-decoration: none;
}
a:hover {
color: #F6F6F6;
background-color: #0000FF;
}
a.til-tag {
background-color: lightgray;
font-size: small;
border: 1px ridge blue;
border-radius: 15px;
padding: 5px 10px;
display: inline-block;
}
a:hover.til-tag {
background-color: darkblue;
}
header.til-header,
div.til-subheader {
display: flex;
align-items: center;
gap: 0.5em;
}
.til-date {
font-style: italic;
}
button.sort {
background-color: lightgreen;
}
blockquote {
background: #f9f9f9;
border-left: 10px solid #ccc;
margin: 1.5em 10px;
padding: 0.5em 10px;
}
.top-link {
margin-right: 2em;
}
| woodruffw/tiller | 11 | Tiller Tills TILs | Rust | woodruffw | William Woodruff | astral-sh |
src/config.rs | Rust | use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
pub(crate) struct Config {
pub(crate) base_url: String,
pub(crate) mastodon: Option<String>,
#[serde(default)]
pub(crate) top_links: Vec<Link>,
}
#[derive(Serialize, Deserialize)]
pub(crate) struct Link {
title: String,
url: String,
}
| woodruffw/tiller | 11 | Tiller Tills TILs | Rust | woodruffw | William Woodruff | astral-sh |
src/main.rs | Rust | use std::{fs, path::PathBuf};
use anyhow::{anyhow, Context, Result};
use clap::Parser;
use config::Config;
mod config;
mod render;
mod tiller;
/// Yet another TIL tracker.
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
struct Args {
/// The directory to render from. Must contain a `tils` subdirectory.
/// Defaults to $CWD/tils.
#[arg(short, long)]
indir: Option<PathBuf>,
/// The directory to render into.
/// Defaults to $CWD/site.
#[arg(short, long)]
outdir: Option<PathBuf>,
/// Generates in 'dev' mode, meaning suitable for use with a local
/// development HTTP server.
#[arg(long)]
dev: bool,
}
fn main() -> Result<()> {
let args = Args::parse();
let cwd = std::env::current_dir()?;
let indir = match args.indir {
Some(indir) => indir,
None => cwd.clone(),
};
let tildir = indir.join("tils");
let index = indir.join("_index.md");
let index = match index.is_file() {
true => Some(fs::read_to_string(index)?),
false => None,
};
let mut config = toml::from_str::<Config>(
&fs::read_to_string(indir.join("tiller.toml"))
.with_context(|| "could not load config file")?,
)?;
if args.dev {
config.base_url = "/".into();
} else {
// All URL joining assumes a terminating `/`.
if !config.base_url.ends_with('/') {
config.base_url.push('/');
}
}
let outdir = match args.outdir {
Some(outdir) => outdir,
None => [cwd, "site".into()].iter().collect(),
};
if !tildir.is_dir() {
return Err(anyhow!("expected directory at {tildir:?}"));
}
if !outdir.is_dir() {
std::fs::create_dir(&outdir).with_context(|| "failed to create output directory")?;
}
let tiller = tiller::Tiller::new();
let tils = tiller.till(&tildir)?;
let renderer = render::Renderer::new(outdir, config, index, tils)?;
renderer.render()
}
| woodruffw/tiller | 11 | Tiller Tills TILs | Rust | woodruffw | William Woodruff | astral-sh |
src/render.rs | Rust | use std::{collections::BTreeMap, path::PathBuf};
use anyhow::{Context, Result};
use comrak::{markdown_to_html, Options};
use handlebars::{handlebars_helper, Handlebars};
use rss::{CategoryBuilder, ChannelBuilder, ItemBuilder};
use rust_embed::Embed;
use serde::Serialize;
use syntect::highlighting::ThemeSet;
use crate::{
config::Config,
tiller::{Meta, TILs, TIL},
};
#[derive(Embed)]
#[folder = "assets/templates"]
#[include = "*.hbs"]
struct Templates;
#[derive(Embed)]
#[folder = "assets/partials"]
#[include = "*.hbs"]
struct Partials;
#[derive(Embed)]
#[folder = "assets/static"]
struct Static;
#[derive(Serialize)]
struct Index<'a> {
config: &'a Config,
index_fragment: Option<&'a str>,
tag_counts: BTreeMap<&'a str, usize>,
recent: Vec<&'a TIL>,
}
#[derive(Serialize)]
struct Category<'a> {
config: &'a Config,
tag: &'a str,
tils: Vec<&'a TIL>,
}
#[derive(Serialize)]
struct TILPost<'a> {
config: &'a Config,
meta: &'a Meta,
content: &'a str,
}
impl<'a> TILPost<'a> {
fn new(config: &'a Config, til: &'a TIL) -> Self {
Self {
config,
meta: &til.meta,
content: &til.content,
}
}
}
pub(crate) struct Renderer {
outdir: PathBuf,
config: Config,
index: Option<String>,
tils: TILs,
hbs: Handlebars<'static>,
}
impl Renderer {
pub(crate) fn new(
outdir: PathBuf,
config: Config,
index: Option<String>,
tils: TILs,
) -> Result<Self> {
let mut hbs = Handlebars::new();
hbs.set_strict_mode(true);
hbs.register_embed_templates::<Templates>()?;
// Partials intentionally have .hbs removed since it's visual clutter
// when referenced within the templates themselves.
hbs.register_embed_templates_with_extension::<Partials>(".hbs")?;
// Inject some useful helpers.
handlebars_helper!(slugify: |x: String| slug::slugify(x));
hbs.register_helper("slugify", Box::new(slugify));
let mut options = Options::default();
options.extension.footnotes = true;
options.extension.strikethrough = true;
options.extension.superscript = true;
options.extension.underline = true;
options.extension.table = true;
let index = index.map(|i| markdown_to_html(&i, &options));
Ok(Self {
outdir,
config,
index,
tils,
hbs,
})
}
pub(crate) fn render(&self) -> Result<()> {
// Static assets (CSS, JS).
std::fs::write(
self.outdir.join("style.css"),
Static::get("style.css").unwrap().data,
)?;
std::fs::write(
self.outdir.join("index.js"),
Static::get("index.js").unwrap().data,
)?;
std::fs::write(
self.outdir.join("syntect.css"),
syntect::html::css_for_theme_with_class_style(
// TODO: Make this configurable.
&ThemeSet::load_defaults().themes["Solarized (dark)"],
syntect::html::ClassStyle::Spaced,
)?,
)?;
// Index page.
let index = Index {
config: &self.config,
index_fragment: self.index.as_deref(),
tag_counts: self.tils.tag_counts(),
recent: self.tils.by_age().take(20).collect(),
};
let index_html = self.hbs.render("index.hbs", &index)?;
std::fs::write(self.outdir.join("index.html"), index_html)?;
// Category pages.
let categories_dir = self.outdir.join("category");
std::fs::create_dir_all(&categories_dir)
.with_context(|| "failed to create categories dir")?;
for (tag, tils) in self.tils.by_tag() {
let category_dir = categories_dir.join(tag);
std::fs::create_dir_all(&category_dir)
.with_context(|| "failed to create individual category dir")?;
let category = Category {
config: &self.config,
tag,
tils,
};
let category_html = self.hbs.render("category.hbs", &category)?;
std::fs::write(category_dir.join("index.html"), category_html)?;
}
// Individual TILs.
let posts_dir = self.outdir.join("post");
std::fs::create_dir_all(&posts_dir).with_context(|| "failed to create posts dir")?;
for til in self.tils.0.iter() {
let slug = slug::slugify(&til.meta.title);
let post_dir = posts_dir.join(&slug);
std::fs::create_dir_all(&post_dir)
.with_context(|| "failed to create individual post dir")?;
let til_html = self
.hbs
.render("til.hbs", &TILPost::new(&self.config, til))?;
std::fs::write(post_dir.join("index.html"), &til_html)?;
}
// RSS feed.
// TODO: Per-category feeds?
let mut items = vec![];
for til in self.tils.by_age().take(20) {
items.push(
ItemBuilder::default()
.categories(
til.meta
.tags
.iter()
.map(|t| CategoryBuilder::default().name(t).build())
.collect::<Vec<_>>(),
)
.title(til.meta.title.clone())
// This is technically wrong, since RSS requires RFC 822
// timestamps. But I can't be bothered to munge into
// such an annoying format.
.pub_date(til.meta.date.clone())
.content(til.content.clone())
.build(),
);
}
let channel = ChannelBuilder::default()
.title("TILs")
.link(&self.config.base_url)
.items(items)
.build();
std::fs::write(self.outdir.join("feed.rss"), channel.to_string())?;
Ok(())
}
}
| woodruffw/tiller | 11 | Tiller Tills TILs | Rust | woodruffw | William Woodruff | astral-sh |
src/tiller.rs | Rust | use std::{
collections::{BTreeMap, BTreeSet},
path::Path,
};
use anyhow::{anyhow, Result};
use comrak::{
markdown_to_html_with_plugins, options,
plugins::syntect::{SyntectAdapter, SyntectAdapterBuilder},
Options,
};
use gray_matter::{engine::YAML, Matter};
use serde::{Deserialize, Serialize};
/// A Tiller tills TILs.
pub(crate) struct Tiller {
matter: Matter<YAML>,
md_options: Options<'static>,
md_adapter: SyntectAdapter,
}
impl Tiller {
pub(crate) fn new() -> Self {
// TODO: Consider making the theme configurable.
let mut options = Options::default();
options.extension.footnotes = true;
options.extension.strikethrough = true;
options.extension.superscript = true;
options.extension.underline = true;
options.extension.table = true;
Self {
matter: Matter::<YAML>::new(),
md_options: options,
md_adapter: SyntectAdapterBuilder::new().css().build(),
}
}
pub(crate) fn till(&self, tildir: &Path) -> Result<TILs> {
let mut tils = vec![];
for til_file in tildir.read_dir()? {
let til_file = til_file?.path();
if !til_file.to_string_lossy().ends_with(".md") {
continue;
}
let raw_til = std::fs::read_to_string(&til_file)?;
let parsed = self
.matter
.parse::<Meta>(&raw_til)
.map_err(|_| anyhow!("couldn't parse front matter"))?;
let mut plugins = options::Plugins::default();
plugins.render.codefence_syntax_highlighter = Some(&self.md_adapter);
let content =
markdown_to_html_with_plugins(&parsed.content, &self.md_options, &plugins);
tils.push(TIL {
meta: parsed.data.ok_or(anyhow!("missing front matter data"))?,
content,
})
}
// TODO: impl Ord for TIL
tils.sort_unstable_by(|a, b| a.meta.date.partial_cmp(&b.meta.date).unwrap());
Ok(TILs(tils))
}
}
#[derive(Deserialize, Debug, Clone, Serialize)]
pub(crate) struct Meta {
pub(crate) title: String,
pub(crate) tags: BTreeSet<String>,
pub(crate) date: String,
pub(crate) origin: Option<String>,
}
#[allow(clippy::upper_case_acronyms)]
#[derive(Debug, Clone, Serialize)]
/// A single "TIL".
pub(crate) struct TIL {
pub(crate) meta: Meta,
pub(crate) content: String,
}
pub(crate) struct TILs(pub(crate) Vec<TIL>);
impl TILs {
pub(crate) fn by_age(&self) -> impl Iterator<Item = &TIL> {
let mut sorted = self.0.iter().collect::<Vec<_>>();
sorted.sort_by(|a, b| a.meta.date.partial_cmp(&b.meta.date).unwrap());
sorted.reverse();
sorted.into_iter()
}
fn tags(&self) -> impl Iterator<Item = &str> {
self.0
.iter()
.flat_map(|til| til.meta.tags.iter())
.map(|s| s.as_str())
}
pub(crate) fn by_tag(&self) -> BTreeMap<&str, Vec<&TIL>> {
let mut tils_by_tag = BTreeMap::new();
for tag in self.tags() {
let mut tils = self
.0
.iter()
.filter(|til| til.meta.tags.contains(tag))
.collect::<Vec<_>>();
tils.sort_by(|a, b| a.meta.date.partial_cmp(&b.meta.date).unwrap());
tils.reverse();
tils_by_tag.insert(tag, tils);
}
tils_by_tag
}
pub(crate) fn tag_counts(&self) -> BTreeMap<&str, usize> {
let mut counts = BTreeMap::new();
for (tag, tils) in self.by_tag() {
counts.insert(tag, tils.len());
}
counts
}
}
| woodruffw/tiller | 11 | Tiller Tills TILs | Rust | woodruffw | William Woodruff | astral-sh |
src/main.rs | Rust | #![forbid(unsafe_code)]
use std::fs;
use std::io::{self, Read};
use anyhow::{Context, Result};
use clap::Parser;
/// Convert TOML to JSON
#[derive(Parser)]
#[command(name = env!("CARGO_PKG_NAME"))]
#[command(version = env!("CARGO_PKG_VERSION"))]
#[command(about = env!("CARGO_PKG_DESCRIPTION"))]
struct Args {
/// pretty print the JSON
#[arg(short, long)]
pretty: bool,
/// the TOML to convert
#[arg(default_value = "-")]
input: String,
}
fn main() -> Result<()> {
let args = Args::parse();
// Get our input source (which can be - or a filename) and its
// corresponding buffer. We don't bother streaming or chunking,
// since the `toml` crate only supports slices and strings.
let input_src = &args.input;
let input_buf = match input_src.as_ref() {
"-" => {
let mut input_buf = String::new();
io::stdin()
.read_to_string(&mut input_buf)
.with_context(|| "failed to collect stdin")?;
input_buf
}
input => fs::read_to_string(input)
.with_context(|| format!("failed to collect from input: {input}"))?,
};
// Turn our collected input into a value. We can't be more specific than
// value, since we're doing arbitrary valid TOML conversions.
let value = toml::from_str::<toml::Value>(&input_buf)
.with_context(|| format!("parsing TOML from {input_src} failed"))?;
// Spit back out, but as JSON. `serde_json` *does* support streaming, so
// we do it.
if args.pretty {
serde_json::to_writer_pretty(io::stdout(), &value)
} else {
serde_json::to_writer(io::stdout(), &value)
}
.with_context(|| "JSON serialization and/or stdout streaming failed")?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_args() {
use clap::CommandFactory;
Args::command().debug_assert();
}
}
| woodruffw/toml2json | 92 | A very small CLI for converting TOML to JSON | Rust | woodruffw | William Woodruff | astral-sh |
build.rs | Rust | use std::env;
use std::fs;
use std::io::{BufRead, BufReader, BufWriter, Write};
use std::path::Path;
use phf_codegen::Map;
use quote::quote;
/* This build script contains a "parser" for the USB ID database.
* "Parser" is in scare-quotes because it's really a line matcher with a small amount
* of context needed for pairing nested entities (e.g. devices) with their parents (e.g. vendors).
*/
// these are the definitions for the generated maps that will be written to the source file
const VENDOR_PROLOGUE: &str = "static USB_IDS: phf::Map<u16, Vendor> = ";
const CLASS_PROLOGUE: &str = "static USB_CLASSES: phf::Map<u8, Class> = ";
const AUDIO_TERMINAL_PROLOGUE: &str = "static USB_AUDIO_TERMINALS: phf::Map<u16, AudioTerminal> = ";
const HID_ID_PROLOGUE: &str = "static USB_HID_IDS: phf::Map<u8, Hid> = ";
const HID_R_PROLOGUE: &str = "static USB_HID_R_TYPES: phf::Map<u8, HidItemType> = ";
const BIAS_PROLOGUE: &str = "static USB_BIASES: phf::Map<u8, Bias> = ";
const PHY_PROLOGUE: &str = "static USB_PHYS: phf::Map<u8, Phy> = ";
const HUT_PROLOGUE: &str = "static USB_HUTS: phf::Map<u8, HidUsagePage> = ";
const LANG_PROLOGUE: &str = "static USB_LANGS: phf::Map<u16, Language> = ";
const HID_CC_PROLOGUE: &str = "static USB_HID_CCS: phf::Map<u8, HidCountryCode> = ";
const TERMINAL_PROLOGUE: &str = "static USB_VIDEO_TERMINALS: phf::Map<u16, VideoTerminal> = ";
trait CgEntry<T> {
fn id(&self) -> T;
}
struct CgVendor {
id: u16,
name: String,
devices: Vec<CgDevice>,
}
struct CgDevice {
id: u16,
name: String,
interfaces: Vec<CgInterface>,
}
struct CgClass {
id: u8,
name: String,
sub_classes: Vec<CgSubClass>,
}
type CgSubClass = CgParentType<u8, CgProtocol>;
struct CgParentType<T, C> {
id: T,
name: String,
children: Vec<C>,
}
impl<T: Copy, C: CgEntry<T>> CgEntry<T> for CgParentType<T, C> {
fn id(&self) -> T {
self.id
}
}
struct CgType<T> {
id: T,
name: String,
}
impl<T: Copy> CgEntry<T> for CgType<T> {
fn id(&self) -> T {
self.id
}
}
type CgInterface = CgType<u8>;
type CgProtocol = CgType<u8>;
type CgAtType = CgType<u16>;
type CgHidType = CgType<u8>;
type CgRType = CgType<u8>;
type CgRBiasType = CgType<u8>;
type CgPhyType = CgType<u8>;
type CgHidUsage = CgType<u16>;
type CgHut = CgParentType<u8, CgHidUsage>;
type CgDialect = CgType<u8>;
type CgLang = CgParentType<u16, CgDialect>;
type CgCountryCode = CgType<u8>;
type CgTerminalType = CgType<u16>;
/// Parser state parses only the type for the current section, this is because some
/// parsers are ambiguous without context; device.interface == subclass.protocol for example.
enum ParserState<'a> {
Vendors(Map<'a, u16>, Option<CgVendor>, u16),
Classes(Map<'a, u8>, Option<CgClass>, u8),
AtType(Map<'a, u16>, Option<CgAtType>),
HidType(Map<'a, u8>, Option<CgHidType>),
RType(Map<'a, u8>, Option<CgRType>),
BiasType(Map<'a, u8>, Option<CgRBiasType>),
PhyType(Map<'a, u8>, Option<CgPhyType>),
HutType(Map<'a, u8>, Option<CgHut>),
Lang(Map<'a, u16>, Option<CgLang>),
CountryCode(Map<'a, u8>, Option<CgCountryCode>),
TerminalType(Map<'a, u16>, Option<CgTerminalType>),
}
impl<'a> ParserState<'a> {
/// Return the prologue string for the current state; the type definition
fn prologue_str(&self) -> &'static str {
match self {
ParserState::Vendors(_, _, _) => VENDOR_PROLOGUE,
ParserState::Classes(_, _, _) => CLASS_PROLOGUE,
ParserState::AtType(_, _) => AUDIO_TERMINAL_PROLOGUE,
ParserState::HidType(_, _) => HID_ID_PROLOGUE,
ParserState::RType(_, _) => HID_R_PROLOGUE,
ParserState::BiasType(_, _) => BIAS_PROLOGUE,
ParserState::PhyType(_, _) => PHY_PROLOGUE,
ParserState::HutType(_, _) => HUT_PROLOGUE,
ParserState::Lang(_, _) => LANG_PROLOGUE,
ParserState::CountryCode(_, _) => HID_CC_PROLOGUE,
ParserState::TerminalType(_, _) => TERMINAL_PROLOGUE,
}
}
/// Emit any pending entries to the map
fn emit(&mut self) {
match self {
ParserState::Vendors(m, Some(vendor), _) => {
m.entry(vendor.id, quote!(#vendor).to_string());
}
ParserState::Classes(m, Some(class), _) => {
m.entry(class.id, quote!(#class).to_string());
}
ParserState::AtType(m, Some(t)) | ParserState::TerminalType(m, Some(t)) => {
m.entry(t.id(), quote!(#t).to_string());
}
ParserState::HidType(m, Some(t))
| ParserState::RType(m, Some(t))
| ParserState::BiasType(m, Some(t))
| ParserState::CountryCode(m, Some(t))
| ParserState::PhyType(m, Some(t)) => {
m.entry(t.id(), quote!(#t).to_string());
}
ParserState::HutType(m, Some(t)) => {
m.entry(t.id, quote!(#t).to_string());
}
ParserState::Lang(m, Some(t)) => {
m.entry(t.id, quote!(#t).to_string());
}
_ => {}
}
}
/// Detects the next state based on the header line
///
/// Not very efficient but since it only checks # lines and required length it is not terrible
fn next_from_header(&mut self, line: &str, output: &mut impl Write) -> Option<ParserState<'a>> {
if line.len() < 7 || !line.starts_with('#') {
return None;
}
match &line[..7] {
"# C cla" => {
self.finalize(output);
Some(ParserState::Classes(Map::<u8>::new(), None, 0u8))
}
"# AT te" => {
self.finalize(output);
Some(ParserState::AtType(Map::<u16>::new(), None))
}
"# HID d" => {
self.finalize(output);
Some(ParserState::HidType(Map::<u8>::new(), None))
}
"# R ite" => {
self.finalize(output);
Some(ParserState::RType(Map::<u8>::new(), None))
}
"# BIAS " => {
self.finalize(output);
Some(ParserState::BiasType(Map::<u8>::new(), None))
}
"# PHY i" => {
self.finalize(output);
Some(ParserState::PhyType(Map::<u8>::new(), None))
}
"# HUT h" => {
self.finalize(output);
Some(ParserState::HutType(Map::<u8>::new(), None))
}
"# L lan" => {
self.finalize(output);
Some(ParserState::Lang(Map::<u16>::new(), None))
}
"# HCC c" => {
self.finalize(output);
Some(ParserState::CountryCode(Map::<u8>::new(), None))
}
"# VT te" => {
self.finalize(output);
Some(ParserState::TerminalType(Map::<u16>::new(), None))
}
_ => None,
}
}
/// Process a line of input for the current state
fn process(&mut self, line: &str) {
if line.is_empty() || line.starts_with('#') {
return;
}
// Switch parser state based on line prefix and current state
// this relies on ordering of classes and types in the file...
match self {
ParserState::Vendors(m, ref mut curr_vendor, ref mut curr_device_id) => {
if let Ok((name, id)) = parser::vendor(line) {
if let Some(cv) = curr_vendor {
m.entry(cv.id, quote!(#cv).to_string());
}
// Set our new vendor as the current vendor.
*curr_vendor = Some(CgVendor {
id,
name: name.into(),
devices: vec![],
});
// We should always have a current vendor; failure here indicates a malformed input.
} else {
let curr_vendor = curr_vendor
.as_mut()
.expect("No parent vendor whilst parsing vendors");
if let Ok((name, id)) = parser::device(line) {
curr_vendor.devices.push(CgDevice {
id,
name: name.into(),
interfaces: vec![],
});
*curr_device_id = id;
} else if let Ok((name, id)) = parser::interface(line) {
let curr_device = curr_vendor
.devices
.iter_mut()
.find(|d| d.id == *curr_device_id)
.expect("No parent device whilst parsing interfaces");
curr_device.interfaces.push(CgInterface {
id,
name: name.into(),
});
}
}
}
ParserState::Classes(m, ref mut curr_class, ref mut curr_class_id) => {
if let Ok((name, id)) = parser::class(line) {
if let Some(cv) = curr_class {
m.entry(cv.id, quote!(#cv).to_string());
}
// Set our new class as the current class.
*curr_class = Some(CgClass {
id,
name: name.into(),
sub_classes: vec![],
});
} else {
let curr_class = curr_class
.as_mut()
.expect("No parent class whilst parsing classes");
if let Ok((name, id)) = parser::sub_class(line) {
curr_class.sub_classes.push(CgSubClass {
id,
name: name.into(),
children: vec![],
});
*curr_class_id = id;
} else if let Ok((name, id)) = parser::protocol(line) {
let curr_device = curr_class
.sub_classes
.iter_mut()
.find(|d| d.id == *curr_class_id)
.expect("No parent sub-class whilst parsing protocols");
curr_device.children.push(CgProtocol {
id,
name: name.into(),
});
}
}
}
ParserState::AtType(m, ref mut current) => {
let (name, id) =
parser::audio_terminal_type(line).expect("Invalid audio terminal line");
if let Some(cv) = current {
m.entry(cv.id, quote!(#cv).to_string());
}
// Set our new class as the current class.
*current = Some(CgAtType {
id,
name: name.into(),
});
}
ParserState::HidType(m, ref mut current) => {
let (name, id) = parser::hid_type(line).expect("Invalid hid type line");
if let Some(cv) = current {
m.entry(cv.id, quote!(#cv).to_string());
}
// Set our new class as the current class.
*current = Some(CgHidType {
id,
name: name.into(),
});
}
ParserState::RType(m, ref mut current) => {
let (name, id) = parser::hid_item_type(line).expect("Invalid hid item type line");
if let Some(cv) = current {
m.entry(cv.id, quote!(#cv).to_string());
}
// Set our new class as the current class.
*current = Some(CgRType {
id,
name: name.into(),
});
}
ParserState::BiasType(m, ref mut current) => {
let (name, id) = parser::bias_type(line).expect("Invalid bias type line");
if let Some(cv) = current {
m.entry(cv.id, quote!(#cv).to_string());
}
// Set our new class as the current class.
*current = Some(CgRBiasType {
id,
name: name.into(),
});
}
ParserState::PhyType(m, ref mut current) => {
let (name, id) = parser::phy_type(line).expect("Invalid phy type line");
if let Some(cv) = current {
m.entry(cv.id, quote!(#cv).to_string());
}
// Set our new class as the current class.
*current = Some(CgPhyType {
id,
name: name.into(),
});
}
ParserState::HutType(m, ref mut current) => {
if let Ok((name, id)) = parser::hut_type(line) {
if let Some(cv) = current {
m.entry(cv.id, quote!(#cv).to_string());
}
// Set our new class as the current class.
*current = Some(CgHut {
id,
name: name.into(),
children: vec![],
});
} else {
let curr_hut = current.as_mut().expect("No parent hut whilst parsing huts");
if let Ok((name, id)) = parser::hid_usage_name(line) {
curr_hut.children.push(CgHidUsage {
id,
name: name.into(),
});
}
}
}
ParserState::Lang(m, ref mut current) => {
if let Ok((name, id)) = parser::language(line) {
if let Some(cv) = current {
m.entry(cv.id, quote!(#cv).to_string());
}
// Set our new class as the current class.
*current = Some(CgLang {
id,
name: name.into(),
children: vec![],
});
} else {
let curr_lang = current
.as_mut()
.expect("No parent lang whilst parsing langs");
if let Ok((name, id)) = parser::dialect(line) {
curr_lang.children.push(CgDialect {
id,
name: name.into(),
});
}
}
}
ParserState::CountryCode(m, ref mut current) => {
let (name, id) = parser::country_code(line).expect("Invalid country code line");
if let Some(cv) = current {
m.entry(cv.id, quote!(#cv).to_string());
}
// Set our new class as the current class.
*current = Some(CgCountryCode {
id,
name: name.into(),
});
}
ParserState::TerminalType(m, ref mut current) => {
let (name, id) = parser::terminal_type(line).expect("Invalid terminal type line");
if let Some(cv) = current {
m.entry(cv.id, quote!(#cv).to_string());
}
// Set our new class as the current class.
*current = Some(CgTerminalType {
id,
name: name.into(),
});
}
}
}
/// Emit the prologue and map to the output file.
///
/// Should only be called once per state, used before switching.
fn finalize(&mut self, output: &mut impl Write) {
// Emit any pending contained within
self.emit();
// Write the prologue
writeln!(output, "{}", self.prologue_str()).unwrap();
// And the map itself
match self {
ParserState::Vendors(m, _, _) => {
writeln!(output, "{};", m.build()).unwrap();
}
ParserState::Classes(m, _, _) => {
writeln!(output, "{};", m.build()).unwrap();
}
ParserState::AtType(m, _) | ParserState::TerminalType(m, _) => {
writeln!(output, "{};", m.build()).unwrap();
}
ParserState::HidType(m, _)
| ParserState::RType(m, _)
| ParserState::BiasType(m, _)
| ParserState::CountryCode(m, _)
| ParserState::PhyType(m, _) => {
writeln!(output, "{};", m.build()).unwrap();
}
ParserState::HutType(m, _) => {
writeln!(output, "{};", m.build()).unwrap();
}
ParserState::Lang(m, _) => {
writeln!(output, "{};", m.build()).unwrap();
}
}
}
/// Return the next state for the current state based on the standard ordering of the file
///
/// Not as robust as the next_from_header but at lot less overhead. The issue is reliably detecting the end of a section; # comments are not reliable as there are some '# typo?' strings
#[allow(dead_code)]
fn next(&mut self, output: &mut impl Write) -> Option<ParserState<'a>> {
self.finalize(output);
match self {
ParserState::Vendors(_, _, _) => {
Some(ParserState::Classes(Map::<u8>::new(), None, 0u8))
}
ParserState::Classes(_, _, _) => Some(ParserState::AtType(Map::<u16>::new(), None)),
ParserState::AtType(_, _) => Some(ParserState::HidType(Map::<u8>::new(), None)),
ParserState::HidType(_, _) => Some(ParserState::RType(Map::<u8>::new(), None)),
ParserState::RType(_, _) => Some(ParserState::BiasType(Map::<u8>::new(), None)),
ParserState::BiasType(_, _) => Some(ParserState::PhyType(Map::<u8>::new(), None)),
ParserState::PhyType(_, _) => Some(ParserState::HutType(Map::<u8>::new(), None)),
ParserState::HutType(_, _) => Some(ParserState::Lang(Map::<u16>::new(), None)),
ParserState::Lang(_, _) => Some(ParserState::CountryCode(Map::<u8>::new(), None)),
ParserState::CountryCode(_, _) => {
Some(ParserState::TerminalType(Map::<u16>::new(), None))
}
ParserState::TerminalType(_, _) => None,
}
}
}
#[allow(clippy::redundant_field_names)]
fn main() {
let out_dir = env::var_os("OUT_DIR").unwrap();
let src_path = Path::new("src/usb.ids");
let dest_path = Path::new(&out_dir).join("usb_ids.cg.rs");
let input = {
let f = fs::File::open(src_path).unwrap();
BufReader::new(f)
};
let mut output = {
let f = fs::File::create(dest_path).unwrap();
BufWriter::new(f)
};
// Parser state machine starts with vendors (first in file)
let mut parser_state: ParserState = ParserState::Vendors(Map::<u16>::new(), None, 0u16);
#[allow(clippy::lines_filter_map_ok)]
for line in input.lines().flatten() {
// Check for a state change based on the header comments
if let Some(next_state) = parser_state.next_from_header(&line, &mut output) {
parser_state = next_state;
}
// Process line for current parser
parser_state.process(&line);
}
// Last call for last parser in file
parser_state.finalize(&mut output);
println!("cargo:rerun-if-changed=build.rs");
println!("cargo:rerun-if-changed=src/usb.ids");
}
mod parser {
use std::num::ParseIntError;
use nom::bytes::complete::{tag, take};
use nom::character::complete::{hex_digit1, tab};
use nom::combinator::{all_consuming, map_parser, map_res};
use nom::sequence::{delimited, terminated};
use nom::{IResult, Parser};
fn id<T, F>(size: usize, from_str_radix: F) -> impl Fn(&str) -> IResult<&str, T>
where
F: Fn(&str, u32) -> Result<T, ParseIntError>,
{
move |input| {
map_res(map_parser(take(size), all_consuming(hex_digit1)), |input| {
from_str_radix(input, 16)
})
.parse(input)
}
}
pub fn vendor(input: &str) -> IResult<&str, u16> {
let id = id(4, u16::from_str_radix);
terminated(id, tag(" ")).parse(input)
}
pub fn device(input: &str) -> IResult<&str, u16> {
let id = id(4, u16::from_str_radix);
delimited(tab, id, tag(" ")).parse(input)
}
pub fn interface(input: &str) -> IResult<&str, u8> {
let id = id(2, u8::from_str_radix);
delimited(tag("\t\t"), id, tag(" ")).parse(input)
}
pub fn class(input: &str) -> IResult<&str, u8> {
let id = id(2, u8::from_str_radix);
delimited(tag("C "), id, tag(" ")).parse(input)
}
pub fn sub_class(input: &str) -> IResult<&str, u8> {
let id = id(2, u8::from_str_radix);
delimited(tab, id, tag(" ")).parse(input)
}
pub fn protocol(input: &str) -> IResult<&str, u8> {
let id = id(2, u8::from_str_radix);
delimited(tag("\t\t"), id, tag(" ")).parse(input)
}
pub fn audio_terminal_type(input: &str) -> IResult<&str, u16> {
let id = id(4, u16::from_str_radix);
delimited(tag("AT "), id, tag(" ")).parse(input)
}
pub fn hid_type(input: &str) -> IResult<&str, u8> {
let id = id(2, u8::from_str_radix);
delimited(tag("HID "), id, tag(" ")).parse(input)
}
pub fn hid_item_type(input: &str) -> IResult<&str, u8> {
let id = id(2, u8::from_str_radix);
delimited(tag("R "), id, tag(" ")).parse(input)
}
pub fn bias_type(input: &str) -> IResult<&str, u8> {
let id = id(1, u8::from_str_radix);
delimited(tag("BIAS "), id, tag(" ")).parse(input)
}
pub fn phy_type(input: &str) -> IResult<&str, u8> {
let id = id(2, u8::from_str_radix);
delimited(tag("PHY "), id, tag(" ")).parse(input)
}
pub fn hut_type(input: &str) -> IResult<&str, u8> {
let id = id(2, u8::from_str_radix);
delimited(tag("HUT "), id, tag(" ")).parse(input)
}
pub fn hid_usage_name(input: &str) -> IResult<&str, u16> {
let id = id(3, u16::from_str_radix);
delimited(tab, id, tag(" ")).parse(input)
}
pub fn language(input: &str) -> IResult<&str, u16> {
let id = id(4, u16::from_str_radix);
delimited(tag("L "), id, tag(" ")).parse(input)
}
pub fn dialect(input: &str) -> IResult<&str, u8> {
let id = id(2, u8::from_str_radix);
delimited(tab, id, tag(" ")).parse(input)
}
pub fn country_code(input: &str) -> IResult<&str, u8> {
let id = id(2, u8::from_str_radix);
delimited(tag("HCC "), id, tag(" ")).parse(input)
}
pub fn terminal_type(input: &str) -> IResult<&str, u16> {
let id = id(4, u16::from_str_radix);
delimited(tag("VT "), id, tag(" ")).parse(input)
}
}
impl quote::ToTokens for CgVendor {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let CgVendor {
id: vendor_id,
name,
devices,
} = self;
let devices = devices.iter().map(|CgDevice { id, name, interfaces }| {
quote!{
Device { vendor_id: #vendor_id, id: #id, name: #name, interfaces: &[#(#interfaces),*] }
}
});
tokens.extend(quote! {
Vendor { id: #vendor_id, name: #name, devices: &[#(#devices),*] }
});
}
}
impl quote::ToTokens for CgClass {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let CgClass {
id: class_id,
name,
sub_classes,
} = self;
let sub_classes = sub_classes.iter().map(|CgSubClass { id, name, children }| {
quote! {
SubClass { class_id: #class_id, id: #id, name: #name, protocols: &[#(#children),*] }
}
});
tokens.extend(quote! {
Class { id: #class_id, name: #name, sub_classes: &[#(#sub_classes),*] }
});
}
}
impl<T: quote::ToTokens, C: quote::ToTokens> quote::ToTokens for CgParentType<T, C> {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let CgParentType { id, name, children } = self;
tokens.extend(quote! {
UsbIdWithChildren { id: #id, name: #name, children: &[#(#children),*] }
});
}
}
impl<T: quote::ToTokens> quote::ToTokens for CgType<T> {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let CgType { id, name } = self;
tokens.extend(quote! {
UsbId { id: #id, name: #name }
});
}
}
| woodruffw/usb-ids.rs | 25 | Cross-platform Rust wrappers for the USB ID Repository | Rust | woodruffw | William Woodruff | astral-sh |
src/lib.rs | Rust | //!
//! Rust wrappers for the [USB ID Repository](http://www.linux-usb.org/usb-ids.html).
//!
//! The USB ID Repository is the canonical source of USB device information for most
//! Linux userspaces; this crate vendors the USB ID database to allow non-Linux hosts to
//! access the same canonical information.
//!
//! # Usage
//!
//! Iterating over all known vendors:
//!
//! ```rust
//! use usb_ids::Vendors;
//!
//! for vendor in Vendors::iter() {
//! for device in vendor.devices() {
//! println!("vendor: {}, device: {}", vendor.name(), device.name());
//! }
//! }
//! ```
//!
//! Iterating over all known classes:
//!
//! ```rust
//! use usb_ids::Classes;
//!
//! for class in Classes::iter() {
//! println!("class: {}", class.name());
//! for subclass in class.sub_classes() {
//! println!("\tsubclass: {}", subclass.name());
//! for protocol in subclass.protocols() {
//! println!("\t\tprotocol: {}", protocol.name());
//! }
//! }
//! }
//! ```
//!
//! See the individual documentation for each structure for more details.
//!
#![warn(missing_docs)]
include!(concat!(env!("OUT_DIR"), "/usb_ids.cg.rs"));
/// Represents a generic USB ID in the USB database.
///
/// Not designed to be used directly; use one of the type aliases instead.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct UsbId<const ID: u8, T> {
id: T,
name: &'static str,
}
impl<const ID: u8, T: Copy> UsbId<ID, T> {
/// Returns the type's ID.
pub fn id(&self) -> T {
self.id
}
/// Returns the type's name.
pub fn name(&self) -> &'static str {
self.name
}
}
/// Represents a generic USB ID in the USB database with children IDs.
///
/// Not designed to be used directly; use one of the type aliases instead.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct UsbIdWithChildren<T: Copy, C: 'static> {
id: T,
name: &'static str,
children: &'static [C],
}
impl<T: Copy, C: 'static> UsbIdWithChildren<T, C> {
/// Returns the type's ID.
pub fn id(&self) -> T {
self.id
}
/// Returns the type's name.
pub fn name(&self) -> &'static str {
self.name
}
/// Returns an iterator over the type's children.
fn children(&self) -> impl Iterator<Item = &'static C> {
self.children.iter()
}
}
/// An abstraction for iterating over all vendors in the USB database.
pub struct Vendors;
impl Vendors {
/// Returns an iterator over all vendors in the USB database.
pub fn iter() -> impl Iterator<Item = &'static Vendor> {
USB_IDS.values()
}
}
/// An abstraction for iterating over all classes in the USB database.
pub struct Classes;
impl Classes {
/// Returns an iterator over all classes in the USB database.
pub fn iter() -> impl Iterator<Item = &'static Class> {
USB_CLASSES.values()
}
}
/// An abstraction for iterating over all languages in the USB database.
///
/// ```
/// use usb_ids::Languages;
/// for language in Languages::iter() {
/// println!("language: {}", language.name());
/// for dialect in language.dialects() {
/// println!("\tdialect: {}", dialect.name());
/// }
/// }
/// ```
pub struct Languages;
impl Languages {
/// Returns an iterator over all languages in the USB database.
pub fn iter() -> impl Iterator<Item = &'static Language> {
USB_LANGS.values()
}
}
/// An abstraction for iterating over all HID usage pages in the USB database.
///
/// ```
/// use usb_ids::HidUsagePages;
///
/// for page in HidUsagePages::iter() {
/// println!("page: {}", page.name());
/// for usage in page.usages() {
/// println!("\tusage: {}", usage.name());
/// }
/// }
/// ```
pub struct HidUsagePages;
impl HidUsagePages {
/// Returns an iterator over all HID usage pages in the USB database.
pub fn iter() -> impl Iterator<Item = &'static HidUsagePage> {
USB_HUTS.values()
}
}
/// Represents a USB device vendor in the USB database.
///
/// Every device vendor has a vendor ID, a pretty name, and a
/// list of associated [`Device`]s.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct Vendor {
id: u16,
name: &'static str,
devices: &'static [Device],
}
impl Vendor {
/// Returns the vendor's ID.
pub fn id(&self) -> u16 {
self.id
}
/// Returns the vendor's name.
pub fn name(&self) -> &'static str {
self.name
}
/// Returns an iterator over the vendor's [`Device`]s.
pub fn devices(&self) -> impl Iterator<Item = &'static Device> {
self.devices.iter()
}
}
/// Represents a single device in the USB database.
///
/// Every device has a corresponding vendor, a device ID, a pretty name,
/// and a list of associated [`Interface`]s.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct Device {
vendor_id: u16,
id: u16,
name: &'static str,
interfaces: &'static [Interface],
}
impl Device {
/// Returns the [`Device`] corresponding to the given vendor and product IDs,
/// or `None` if no such device exists in the DB.
///
/// ```
/// use usb_ids::Device;
/// let device = Device::from_vid_pid(0x1d6b, 0x0003).unwrap();
/// assert_eq!(device.name(), "3.0 root hub");
/// ```
pub fn from_vid_pid(vid: u16, pid: u16) -> Option<&'static Device> {
let vendor = Vendor::from_id(vid);
vendor.and_then(|v| v.devices().find(|d| d.id == pid))
}
/// Returns the [`Vendor`] that this device belongs to.
///
/// Looking up a vendor by device is cheap (`O(1)`).
pub fn vendor(&self) -> &'static Vendor {
USB_IDS.get(&self.vendor_id).unwrap()
}
/// Returns a tuple of (vendor id, device/"product" id) for this device.
///
/// This is convenient for interactions with other USB libraries.
pub fn as_vid_pid(&self) -> (u16, u16) {
(self.vendor_id, self.id)
}
/// Returns the device's ID.
pub fn id(&self) -> u16 {
self.id
}
/// Returns the device's name.
pub fn name(&self) -> &'static str {
self.name
}
/// Returns an iterator over the device's [`Interface`]s.
///
/// **NOTE**: The USB database does not include interface information for
/// most devices. This list is not authoritative.
pub fn interfaces(&self) -> impl Iterator<Item = &'static Interface> {
self.interfaces.iter()
}
}
/// Represents an interface to a USB device in the USB database.
///
/// Every interface has an interface ID (which is an index on the device)
/// and a pretty name.
///
/// **NOTE**: The USB database is not a canonical or authoritative source
/// of interface information for devices. Users who wish to discover interfaces
/// on their USB devices should query those devices directly.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct Interface {
id: u8,
name: &'static str,
}
impl Interface {
/// Returns the interface's ID.
pub fn id(&self) -> u8 {
self.id
}
/// Returns the interface's name.
pub fn name(&self) -> &'static str {
self.name
}
}
/// Represents a USB device class in the USB database.
///
/// Every device class has a class ID, a pretty name, and a
/// list of associated [`SubClass`]s.
///
/// ```
/// use usb_ids::{Class, Classes, FromId};
/// let class = Class::from_id(0x03).unwrap();
/// assert_eq!(class.name(), "Human Interface Device");
/// ```
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct Class {
id: u8,
name: &'static str,
sub_classes: &'static [SubClass],
}
impl Class {
/// Returns the class's ID.
pub fn id(&self) -> u8 {
self.id
}
/// Returns the class's name.
pub fn name(&self) -> &'static str {
self.name
}
/// Returns an iterator over the class's [`SubClass`]s.
pub fn sub_classes(&self) -> impl Iterator<Item = &'static SubClass> {
self.sub_classes.iter()
}
}
/// Represents a class subclass in the USB database. Subclasses are part of the
/// USB class code triplet (base class, subclass, protocol).
///
/// Contained within a [`Class`] and may contain a list of associated
/// [`Protocol`]s.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct SubClass {
class_id: u8,
id: u8,
name: &'static str,
protocols: &'static [Protocol],
}
impl SubClass {
/// Returns the [`SubClass`] corresponding to the given class and subclass IDs,
/// or `None` if no such subclass exists in the DB.
///
/// ```
/// use usb_ids::SubClass;
/// let subclass = SubClass::from_cid_scid(0x02, 0x03).unwrap();
/// assert_eq!(subclass.name(), "Telephone");
///
/// assert!(SubClass::from_cid_scid(0x3c, 0x02).is_none());
/// ```
pub fn from_cid_scid(class_id: u8, id: u8) -> Option<&'static Self> {
let class = Class::from_id(class_id);
class.and_then(|c| c.sub_classes().find(|s| s.id == id))
}
/// Returns the [`Class`] that this subclass belongs to.
///
/// Looking up a class by subclass is cheap (`O(1)`).
///
/// ```
/// use usb_ids::SubClass;
/// let subclass = SubClass::from_cid_scid(0x02, 0x03).unwrap();
/// let class = subclass.class();
/// assert_eq!(class.id(), 0x02);
/// ```
pub fn class(&self) -> &'static Class {
USB_CLASSES.get(&self.class_id).unwrap()
}
/// Returns a tuple of (class id, subclass id) for this subclass.
///
/// This is convenient for interactions with other USB libraries.
pub fn as_cid_scid(&self) -> (u8, u8) {
(self.class_id, self.id)
}
/// Returns the subclass' ID.
pub fn id(&self) -> u8 {
self.id
}
/// Returns the subclass' name.
pub fn name(&self) -> &'static str {
self.name
}
/// Returns an iterator over the subclasses's [`Protocol`]s.
///
/// **NOTE**: The USB database nor USB-IF includes protocol information for
/// all subclassess. This list is not authoritative.
pub fn protocols(&self) -> impl Iterator<Item = &'static Protocol> {
self.protocols.iter()
}
}
/// These are tags for UsbId type aliases to make them unique and allow a
/// [`FromId`] for each alias. The values are arbitrary but must be unique.
///
/// [`std::marker::PhantomData`] would be nicer but was unable to figure out a
/// generic way to add the _tag: PhantomData in the ToToken trait
/// implementation within build.rs
const PROTOCOL_TAG: u8 = 0;
const AT_TAG: u8 = 1;
const HID_TAG: u8 = 2;
const HID_TYPE_TAG: u8 = 3;
const HID_USAGE_TAG: u8 = 4;
const BIAS_TAG: u8 = 5;
const PHY_TAG: u8 = 6;
const DIALECT_TAG: u8 = 7;
const HCC_TAG: u8 = 8;
const VT_TAG: u8 = 9;
/// Represents a subclass protocol in the USB database.
///
/// Protocols are part of the USB class code triplet (base class, subclass,
/// protocol), contained within a [`SubClass`].
pub type Protocol = UsbId<PROTOCOL_TAG, u8>;
impl Protocol {
/// Returns the [`Protocol`] corresponding to the given class, subclass, and protocol IDs,
/// or `None` if no such protocol exists in the DB.
///
/// ```
/// use usb_ids::Protocol;
/// let protocol = Protocol::from_cid_scid_pid(0x02, 0x02, 0x05).unwrap();
/// assert_eq!(protocol.name(), "AT-commands (3G)");
/// ```
pub fn from_cid_scid_pid(class_id: u8, subclass_id: u8, id: u8) -> Option<&'static Self> {
let subclass = SubClass::from_cid_scid(class_id, subclass_id);
subclass.and_then(|s| s.protocols().find(|p| p.id == id))
}
}
/// Represents an audio terminal type in the USB database.
///
/// ```
/// use usb_ids::{AudioTerminal, FromId};
/// let audio_terminal = AudioTerminal::from_id(0x0201).unwrap();
/// assert_eq!(audio_terminal.name(), "Microphone");
/// ```
pub type AudioTerminal = UsbId<AT_TAG, u16>;
/// Represents a HID descriptor type in the USB database.
///
/// ```
/// use usb_ids::{Hid, FromId};
/// let hid = Hid::from_id(0x22).unwrap();
/// assert_eq!(hid.name(), "Report");
/// ```
pub type Hid = UsbId<HID_TAG, u8>;
/// Represents a HID descriptor item type in the USB database.
///
/// ```
/// use usb_ids::{HidItemType, FromId};
/// let hid_item_type = HidItemType::from_id(0xb4).unwrap();
/// assert_eq!(hid_item_type.name(), "Pop");
/// ```
pub type HidItemType = UsbId<HID_TYPE_TAG, u8>;
/// Represents a HID usage page in the USB database.
///
/// Every HID usage page has a usage page ID, a pretty name, and a list of
/// associated [`HidUsage`]s.
///
/// ```
/// use usb_ids::{HidUsagePage, FromId};
/// let hid_usage_page = HidUsagePage::from_id(0x01).unwrap();
/// assert_eq!(hid_usage_page.name(), "Generic Desktop Controls");
///
/// for usage in hid_usage_page.usages() {
/// println!("usage: {}", usage.name());
/// }
/// ```
pub type HidUsagePage = UsbIdWithChildren<u8, HidUsage>;
impl HidUsagePage {
/// Returns an iterator over the page's [`HidUsage`]s.
pub fn usages(&self) -> impl Iterator<Item = &'static HidUsage> {
self.children()
}
}
/// Represents a HID usage type in the USB database.
///
/// ```
/// use usb_ids::{HidUsage, HidUsagePage, FromId};
///
/// let hid_usage_page = HidUsagePage::from_id(0x01).unwrap();
/// for usage in hid_usage_page.usages() {
/// println!("usage: {}", usage.name());
/// }
/// ```
pub type HidUsage = UsbId<HID_USAGE_TAG, u16>;
impl HidUsage {
/// Returns the [`HidUsage`] corresponding to the given usage page and usage ID,
/// or `None` if no such usage exists in the DB.
///
/// ```
/// use usb_ids::HidUsage;
/// let hid_usage = HidUsage::from_pageid_uid(0x01, 0x002).unwrap();
/// assert_eq!(hid_usage.name(), "Mouse");
/// ```
pub fn from_pageid_uid(page_id: u8, id: u16) -> Option<&'static Self> {
let page = HidUsagePage::from_id(page_id)?;
page.children().find(|u| u.id() == id)
}
}
/// Represents physical descriptor bias type in the USB database.
///
/// ```
/// use usb_ids::{Bias, FromId};
/// let bias = Bias::from_id(0x02).unwrap();
/// assert_eq!(bias.name(), "Left Hand");
/// ```
pub type Bias = UsbId<BIAS_TAG, u8>;
/// Represents physical descriptor item type in the USB database.
///
/// ```
/// use usb_ids::{Phy, FromId};
/// let phy = Phy::from_id(0x25).unwrap();
/// assert_eq!(phy.name(), "Fifth Toe");
/// ```
pub type Phy = UsbId<PHY_TAG, u8>;
/// Represents a language type in the USB database.
///
/// Languages have a language ID, a pretty name, and a list of associated
/// [`Dialect`]s.
///
/// ```
/// use usb_ids::{Language, FromId};
/// let language = Language::from_id(0x000c).unwrap();
/// assert_eq!(language.name(), "French");
///
/// for dialect in language.dialects() {
/// println!("dialect: {}", dialect.name());
/// }
/// ```
pub type Language = UsbIdWithChildren<u16, Dialect>;
impl Language {
/// Returns an iterator over the language's [`Dialect`]s.
pub fn dialects(&self) -> impl Iterator<Item = &'static Dialect> {
self.children()
}
}
/// Represents a language dialect in the USB database.
///
/// ```
/// use usb_ids::{Dialect, Language, FromId};
/// let lang = Language::from_id(0x0007).unwrap();
///
/// println!("language: {}", lang.name());
/// for dialect in lang.dialects() {
/// println!("\tdialect: {}", dialect.name());
/// }
/// ```
pub type Dialect = UsbId<DIALECT_TAG, u8>;
impl Dialect {
/// Returns the [`Dialect`] corresponding to the given language and dialect IDs,
/// or `None` if no such dialect exists in the DB.
///
/// ```
/// use usb_ids::Dialect;
/// let dialect = Dialect::from_lid_did(0x0007, 0x02).unwrap();
/// assert_eq!(dialect.name(), "Swiss");
/// ```
pub fn from_lid_did(language_id: u16, id: u8) -> Option<&'static Self> {
let language = Language::from_id(language_id)?;
language.children().find(|d| d.id() == id)
}
}
/// Represents a HID descriptor country code in the USB database.
///
/// ```
/// use usb_ids::{HidCountryCode, FromId};
/// let hid_country_code = HidCountryCode::from_id(0x29).unwrap();
/// assert_eq!(hid_country_code.name(), "Switzerland");
/// ```
pub type HidCountryCode = UsbId<HCC_TAG, u8>;
/// Represents a video class terminal type in the USB database.
///
/// ```
/// use usb_ids::{VideoTerminal, FromId};
/// let video_terminal = VideoTerminal::from_id(0x0101).unwrap();
/// assert_eq!(video_terminal.name(), "USB Streaming");
/// ```
pub type VideoTerminal = UsbId<VT_TAG, u16>;
/// A convenience trait for retrieving a top-level entity (like a [`Vendor`]) from the USB
/// database by its unique ID.
///
/// ```
/// use usb_ids::{FromId, Vendor};
/// let vendor = Vendor::from_id(0x1d6b).unwrap();
/// assert_eq!(vendor.name(), "Linux Foundation");
/// ```
pub trait FromId<T> {
/// Returns the entity corresponding to `id`, or `None` if none exists.
fn from_id(id: T) -> Option<&'static Self>;
}
impl FromId<u16> for Vendor {
fn from_id(id: u16) -> Option<&'static Self> {
USB_IDS.get(&id)
}
}
impl FromId<u8> for Class {
fn from_id(id: u8) -> Option<&'static Self> {
USB_CLASSES.get(&id)
}
}
impl FromId<u16> for AudioTerminal {
fn from_id(id: u16) -> Option<&'static Self> {
USB_AUDIO_TERMINALS.get(&id)
}
}
impl FromId<u8> for Hid {
fn from_id(id: u8) -> Option<&'static Self> {
USB_HID_IDS.get(&id)
}
}
impl FromId<u8> for HidItemType {
fn from_id(id: u8) -> Option<&'static Self> {
USB_HID_R_TYPES.get(&id)
}
}
impl FromId<u8> for HidUsagePage {
fn from_id(id: u8) -> Option<&'static Self> {
USB_HUTS.get(&id)
}
}
impl FromId<u8> for Bias {
fn from_id(id: u8) -> Option<&'static Self> {
USB_BIASES.get(&id)
}
}
impl FromId<u8> for Phy {
fn from_id(id: u8) -> Option<&'static Self> {
USB_PHYS.get(&id)
}
}
impl FromId<u16> for Language {
fn from_id(id: u16) -> Option<&'static Self> {
USB_LANGS.get(&id)
}
}
impl FromId<u8> for HidCountryCode {
fn from_id(id: u8) -> Option<&'static Self> {
USB_HID_CCS.get(&id)
}
}
impl FromId<u16> for VideoTerminal {
fn from_id(id: u16) -> Option<&'static Self> {
USB_VIDEO_TERMINALS.get(&id)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_from_id() {
let vendor = Vendor::from_id(0x1d6b).unwrap();
assert_eq!(vendor.name(), "Linux Foundation");
assert_eq!(vendor.id(), 0x1d6b);
}
#[test]
fn test_vendor_devices() {
let vendor = Vendor::from_id(0x1d6b).unwrap();
for device in vendor.devices() {
assert_eq!(device.vendor(), vendor);
assert!(!device.name().is_empty());
}
}
#[test]
fn test_from_vid_pid() {
let device = Device::from_vid_pid(0x1d6b, 0x0003).unwrap();
assert_eq!(device.name(), "3.0 root hub");
let (vid, pid) = device.as_vid_pid();
assert_eq!(vid, device.vendor().id());
assert_eq!(pid, device.id());
let device2 = Device::from_vid_pid(vid, pid).unwrap();
assert_eq!(device, device2);
let last_device = Device::from_vid_pid(0xffee, 0x0100).unwrap();
assert_eq!(
last_device.name(),
"Card Reader Controller RTS5101/RTS5111/RTS5116"
);
}
#[test]
fn test_class_from_id() {
let class = Class::from_id(0x03).unwrap();
assert_eq!(class.name(), "Human Interface Device");
assert_eq!(class.id(), 0x03);
}
#[test]
fn test_subclass_from_cid_scid() {
let subclass = SubClass::from_cid_scid(0x03, 0x01).unwrap();
assert_eq!(subclass.name(), "Boot Interface Subclass");
assert_eq!(subclass.id(), 0x01);
}
#[test]
fn test_protocol_from_cid_scid_pid() {
let protocol = Protocol::from_cid_scid_pid(0x03, 0x01, 0x01).unwrap();
assert_eq!(protocol.name(), "Keyboard");
assert_eq!(protocol.id(), 0x01);
let protocol = Protocol::from_cid_scid_pid(0x07, 0x01, 0x03).unwrap();
assert_eq!(protocol.name(), "IEEE 1284.4 compatible bidirectional");
assert_eq!(protocol.id(), 0x03);
let protocol = Protocol::from_cid_scid_pid(0xff, 0xff, 0xff).unwrap();
// check last entry for parsing
assert_eq!(protocol.name(), "Vendor Specific Protocol");
assert_eq!(protocol.id(), 0xff);
}
#[test]
fn test_at_from_id() {
let at = AudioTerminal::from_id(0x0713).unwrap();
assert_eq!(at.name(), "Synthesizer");
assert_eq!(at.id(), 0x0713);
}
#[test]
fn test_hid_from_id() {
let hid = Hid::from_id(0x23).unwrap();
assert_eq!(hid.name(), "Physical");
assert_eq!(hid.id(), 0x23);
}
#[test]
fn test_hid_type_from_id() {
let hid_type = HidItemType::from_id(0xc0).unwrap();
assert_eq!(hid_type.name(), "End Collection");
assert_eq!(hid_type.id(), 0xc0);
}
#[test]
fn test_bias_from_id() {
let bias = Bias::from_id(0x04).unwrap();
assert_eq!(bias.name(), "Either Hand");
assert_eq!(bias.id(), 0x04);
}
#[test]
fn test_phy_from_id() {
let phy = Phy::from_id(0x27).unwrap();
assert_eq!(phy.name(), "Cheek");
assert_eq!(phy.id(), 0x27);
}
#[test]
fn test_hid_usages_from_id() {
let hid_usage_page = HidUsagePage::from_id(0x0d).unwrap();
assert_eq!(hid_usage_page.name(), "Digitizer");
assert_eq!(hid_usage_page.id(), 0x0d);
let hid_usage = HidUsage::from_pageid_uid(0x0d, 0x01).unwrap();
assert_eq!(hid_usage.name(), "Digitizer");
assert_eq!(hid_usage.id(), 0x01);
}
#[test]
fn test_language_from_id() {
let language = Language::from_id(0x0007).unwrap();
assert_eq!(language.name(), "German");
assert_eq!(language.id(), 0x0007);
let dialect = language.dialects().find(|d| d.id() == 0x02).unwrap();
assert_eq!(dialect.name(), "Swiss");
assert_eq!(dialect.id(), 0x02);
}
#[test]
fn test_hid_country_code_from_id() {
let hid_country_code = HidCountryCode::from_id(0x29).unwrap();
assert_eq!(hid_country_code.name(), "Switzerland");
assert_eq!(hid_country_code.id(), 0x29);
let hid_country_code = HidCountryCode::from_id(0x00).unwrap();
assert_eq!(hid_country_code.name(), "Not supported");
}
#[test]
fn test_video_terminal_from_id() {
let video_terminal = VideoTerminal::from_id(0x0100).unwrap();
assert_eq!(video_terminal.name(), "USB Vendor Specific");
assert_eq!(video_terminal.id(), 0x0100);
let video_terminal = VideoTerminal::from_id(0x0403).unwrap();
assert_eq!(video_terminal.name(), "Component Video");
}
}
| woodruffw/usb-ids.rs | 25 | Cross-platform Rust wrappers for the USB ID Repository | Rust | woodruffw | William Woodruff | astral-sh |
index.d.ts | TypeScript | import type {VFileMessage} from 'vfile-message'
export {deadOrAlive, defaultAnchorAllowlist, defaultSleep} from './lib/index.js'
/**
* Allow extra anchors.
* The first item is a regular expression to match URLs (origin and path,
* so without search or hash),
* and the second item is a regular expression to match hashes (without `#`).
* When both match,
* the hash is allowed.
*/
export type AnchorAllow = [url: RegExp, anchor: RegExp]
/**
* Configuration.
*/
export interface Options {
/**
* Allow anchors (default: `defaultAnchorAllowlist`);
* each tuple is checked to match URLs (origin and path,
* so without search or hash),
* and then to match hashes (without `#`);
* when both match,
* the hash is allowed,
* and no `missing-anchor` error is used.
*/
anchorAllowlist?: ReadonlyArray<Readonly<AnchorAllow>> | null | undefined
/**
* Check whether URL hashes point to elements (default: `true`).
*/
checkAnchor?: boolean | null | undefined
/**
* Find URLs in the final resource (default: `true`);
* currently applies to HTML.
*/
findUrls?: boolean | null | undefined
/**
* Follow HTML redirects (default: `true`);
* a `<meta content=0;to http-equiv=refresh>` can be useful for static sites
* such as those on GH pages.
*/
followMetaHttpEquiv?: boolean | null | undefined
/**
* Inclusive maximum redirects to follow (default: `5`).
*/
maxRedirects?: number | null | undefined
/**
* Inclusive maximum number to try again on failures (default: `1`).
*/
maxRetries?: number | null | undefined
/**
* Accept `user-content-` prefix in `id` on elements (default: `true`).
*/
resolveClobberPrefix?: boolean | null | undefined
/**
* Calculate miliseconds to sleep between tries (default: `defaultSleep`).
*/
sleep?: Sleep | null | undefined
/**
* Timeout for HTTP request in miliseconds (default: `3000`).
*/
timeout?: number | null | undefined
/**
* User agent (default: `'Mozilla/5.0 … Safari/537.36'`, a modern Chrome on macOS user agent).
*/
userAgent?: string | null | undefined
}
/**
* Result.
*/
interface ResultAlive {
/**
* Messages where the first is a fatal error when dead.
*/
messages: Array<VFileMessage>
/**
* Whether all redirects were permanent.
*/
permanent: boolean | undefined
/**
* Status.
*/
status: 'alive'
/**
* Final URL if alive.
*/
url: string
/**
* Further URLs if `findUrls: true` and the resource was HTML.
*/
urls: Set<string> | undefined
}
/**
* Result.
*/
interface ResultDead {
/**
* Messages where the first is a fatal error when dead.
*/
messages: [VFileMessage, ...Array<VFileMessage>]
/**
* Whether all redirects were permanent.
*/
permanent: boolean | undefined
/**
* Status.
*/
status: 'dead'
/**
* Final URL if alive.
*/
url: undefined
/**
* Further URLs if `findUrls: true` and the resource was HTML.
*/
urls: Set<string> | undefined
}
/**
* Result.
*/
export type Result = ResultAlive | ResultDead
/**
* Calculate miliseconds to sleep between tries.
*/
export type Sleep = (retries: number) => number
| wooorm/dead-or-alive | 50 | check if urls are dead or alive | JavaScript | wooorm | Titus | |
index.js | JavaScript | // Note: types exposed from `index.d.ts`.
export {deadOrAlive, defaultAnchorAllowlist, defaultSleep} from './lib/index.js'
| wooorm/dead-or-alive | 50 | check if urls are dead or alive | JavaScript | wooorm | Titus | |
lib/anchors.js | JavaScript | /**
* @import {Element, Root} from 'hast'
*/
/**
* @typedef Anchor
* @property {Element} [systemId]
* @property {Element} [systemName]
* @property {Element} [userId]
* @property {Element} [userName]
*
* @typedef Options
* @property {boolean} resolveClobberPrefix
* Accept `user-content-` prefix on elements.
*/
import {visit} from 'unist-util-visit'
const clobberPrefix = 'user-content-'
/**
* @param {Root} tree
* @param {Options} options
* @returns {Map<string, Anchor>}
*/
export function getAnchors(tree, options) {
/** @type {Map<string, Anchor>} */
const map = new Map()
visit(tree, 'element', function (node) {
if (node.properties.id) {
set('Id', String(node.properties.id), node)
}
// Continue walking: `id` is preferred.
if (node.tagName === 'a' && node.properties.name) {
set('Name', String(node.properties.name), node)
}
})
return map
/**
* @param {'Id' | 'Name'} type
* @param {string} value
* @param {Element} node
*/
function set(type, value, node) {
/** @type {'system' | 'user'} */
let kind = 'system'
if (value.startsWith(clobberPrefix) && options.resolveClobberPrefix) {
value = value.slice(clobberPrefix.length)
kind = 'user'
}
let anchor = map.get(value)
if (!anchor) {
anchor = {}
map.set(value, anchor)
}
anchor[`${kind}${type}`] = node
}
}
| wooorm/dead-or-alive | 50 | check if urls are dead or alive | JavaScript | wooorm | Titus | |
lib/fetch.default.js | JavaScript | const fetch_ = fetch
export {fetch_ as fetch}
| wooorm/dead-or-alive | 50 | check if urls are dead or alive | JavaScript | wooorm | Titus | |
lib/fetch.node.js | JavaScript | // Note: we use `undici` as it supports mocking.
export {fetch} from 'undici'
| wooorm/dead-or-alive | 50 | check if urls are dead or alive | JavaScript | wooorm | Titus | |
lib/index.js | JavaScript | /**
* @import {AnchorAllow, Options, Result, Sleep} from 'dead-or-alive'
* @import {Root} from 'hast'
*/
/**
* @typedef State
* State.
* @property {ReadonlyArray<Readonly<AnchorAllow>>} anchorAllowlist
* Allow anchors.
* @property {boolean} checkAnchor
* Check whether URL hashes point to elements.
* @property {boolean} findUrls
* Find links in the final resource.
* @property {boolean} followMetaHttpEquiv
* Follow HTML redirects.
* @property {number} maxRedirects
* Maximum redirects to follow, inclusive.
* @property {number} maxRetries
* Maximum number to try again on failures, inclusive.
* @property {Array<VFileMessage>} messages
* Collected messages.
* @property {boolean | undefined} permanent
* Whether all redirects were permanent.
* @property {boolean} resolveClobberPrefix
* Accept `user-content-` prefix in `id` on elements.
* @property {number} redirects
* Number of redirects.
* @property {number} retries
* Number of retries.
* @property {Sleep} sleep
* Calculate miliseconds to sleep between tries.
* @property {number} timeout
* Timeout for HTTP request in miliseconds.
* @property {Set<string> | undefined} urls
* Further URLs, if `findUrls: true`.
* @property {string} userAgent
* User agent.
*/
import {parse} from 'fast-content-type-parse'
import {fromHtmlIsomorphic} from 'hast-util-from-html-isomorphic'
import {isElement} from 'hast-util-is-element'
import {select} from 'hast-util-select'
import {urlAttributes} from 'html-url-attributes'
import {visit} from 'unist-util-visit'
import {VFileMessage} from 'vfile-message'
import {getAnchors} from './anchors.js'
import {propose} from './propose.js'
import {sharedDeclarativeRefresh} from './shared-declarative-refresh.js'
import {fetch} from '#fetch'
/** @type {Readonly<Options>} */
const emptyOptions = {}
const defaultUserAgent =
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36'
const documentation = 'https://github.com/wooorm/dead-or-alive'
const listFormat = new Intl.ListFormat('en', {
style: 'long',
type: 'disjunction'
})
/**
* Check if a url is dead or alive.
*
* ###### Notes
*
* To improve performance,
* decrease `maxRetries` and/or decrease the value used
* for `sleep`.
* The normal behavior is to assume connections might be flakey and to sleep a
* while and retry a couple times.
*
* If you do not care about HTML redirects,
* whether anchors work,
* and what further URLs are used on,
* you can pass `checkAnchor: false`,
* `findUrls: false`,
* and `followMetaHttpEquiv: false`,
* which enables a fast path without parsing HTML.
*
* @param {Readonly<URL> | string} href
* URL.
* @param {Readonly<Options> | null | undefined} [options]
* Configuration (optional).
* @returns {Promise<Result>}
* Result.
*/
export async function deadOrAlive(href, options) {
const settings = options || emptyOptions
const anchorAllowlist = settings.anchorAllowlist || defaultAnchorAllowlist
const checkAnchor = settings.checkAnchor !== false
const findUrls = settings.findUrls !== false
const followMetaHttpEquiv = settings.followMetaHttpEquiv !== false
const maxRedirects =
typeof settings.maxRedirects === 'number' ? settings.maxRedirects : 5
const maxRetries =
typeof settings.maxRetries === 'number' ? settings.maxRetries : 1
const resolveClobberPrefix = settings.resolveClobberPrefix !== false
const sleep = settings.sleep || defaultSleep
const timeout = typeof settings.timeout === 'number' ? settings.timeout : 3000
const userAgent = settings.userAgent || defaultUserAgent
/** @type {State} */
const state = {
anchorAllowlist,
checkAnchor,
findUrls,
followMetaHttpEquiv,
maxRedirects,
maxRetries,
messages: [],
permanent: undefined,
redirects: 0,
resolveClobberPrefix,
retries: 0,
sleep,
timeout,
urls: undefined,
userAgent
}
try {
const url = await deadOrAliveInternal(
state,
typeof href === 'string' ? new URL(href) : href
)
return {
messages: state.messages,
permanent: state.permanent,
status: 'alive',
url: url.href,
urls: state.urls
}
} catch (error) {
const cause = /** @type {VFileMessage} */ (error)
return {
messages: [cause, ...state.messages],
permanent: state.permanent,
status: 'dead',
url: undefined,
urls: state.urls
}
}
}
/**
* Allow certain anchors.
*
* This currently allows text fragments everywhere.
*
* @type {ReadonlyArray<Readonly<AnchorAllow>>}
*/
export const defaultAnchorAllowlist = [[/./, /^:~:/]]
/**
* Calculate miliseconds to sleep between tries.
*
* The function is defined as `x ** 3 * 1000`,
* so the first sleep is `1 ** 3 * 1000` is 1s,
* 2nd is 8s,
* 3rd is 27s,
* etc.
*
* @param {number} retries
* Try.
* @returns {number}
* Miliseconds to sleep.
*/
export function defaultSleep(retries) {
return retries ** 3 * 1000
}
/**
* Check if urls are dead or alive.
*
* @param {State} state
* Info passed around.
* @param {Readonly<URL>} url
* URL.
* @returns {Promise<URL>}
* Result.
*/
async function deadOrAliveInternal(state, url) {
if (state.redirects > state.maxRedirects) {
const message = new VFileMessage(
'Unexpected redirect to `' + url.href + '`, too many redirects',
{ruleId: 'max-redirect', source: 'dead-or-alive'}
)
message.url = documentation + '#' + message.ruleId
message.fatal = true
throw message
}
/** @type {Awaited<ReturnType<typeof fetch>>} */
let response
try {
// Create a manually abortable fetch,
// instead of `AbortSignal.timeout(state.timeout)`.
// This way we only abort slow requests; not the other work.
const controller = new AbortController()
const id = setTimeout(function () {
controller.abort()
}, state.timeout)
response = await fetch(url, {
headers: {
userAgent: state.userAgent,
// <https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Upgrade-Insecure-Requests>
'Upgrade-Insecure-Requests': '1',
accept:
'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'accept-encoding': 'gzip',
'accept-language': 'en-US,en;q=0.9'
},
method: 'GET',
redirect: 'manual',
signal: controller.signal
})
clearTimeout(id)
} catch (error) {
if (state.retries < state.maxRetries) return retry(state, url)
const cause = /** @type {Error} */ (error)
const message = new VFileMessage(
'Unexpected error fetching `' + url.href + '`',
{cause, ruleId: 'fetch', source: 'dead-or-alive'}
)
message.url = documentation + '#' + message.ruleId
message.fatal = true
throw message
}
// Reset retries if successful.
state.retries = 0
if (response.status >= 300 && response.status < 400) {
const location = response.headers.get('location')
if (location) {
const redirect = new URL(location, url)
if (response.status === 301 || response.status === 308) {
if (state.permanent === undefined) state.permanent = true
} else {
state.permanent = false
}
if (url.hash) {
const message = new VFileMessage(
'Unexpected hash in URL `' +
url.href +
'` that redirects to `' +
redirect.href +
'` losing the hash, remove the hash from the original URL',
{
ruleId: 'lost-hash-with-redirect',
source: 'dead-or-alive'
}
)
message.url = documentation + '#' + message.ruleId
state.messages.push(message)
}
state.redirects++
return deadOrAliveInternal(state, redirect)
}
}
if (!response.ok) {
if (
state.retries < state.maxRetries &&
// When the server says the client is wrong, we don’t try again.
(response.status < 400 || response.status >= 500)
) {
return retry(state, url)
}
const message = new VFileMessage(
'Unexpected not ok response `' +
response.status +
'` (`' +
response.statusText +
'`) on `' +
response.url +
'`',
{ruleId: 'dead', source: 'dead-or-alive'}
)
message.url = documentation + '#' + message.ruleId
message.fatal = true
throw message
}
// Note: defaulting to HTML might not be great?
const contentType = response.headers.get('content-type') || undefined
if (contentType) {
const type = parse(contentType)
if (type.type === 'text/html') {
return handleTextHtml(state, url, response)
}
}
return handleUnknown(state, url, response, contentType)
}
/**
* @param {State} state
* @param {Readonly<URL>} url
* @returns {Promise<URL>}
*/
async function retry(state, url) {
state.retries++
await new Promise(function (resolve) {
setTimeout(resolve, state.sleep(state.retries))
})
return deadOrAliveInternal(state, url)
}
/**
* @param {State} state
* @param {Readonly<URL>} url
* @param {Response} response
* @returns {Promise<URL>}
*/
// eslint-disable-next-line complexity
async function handleTextHtml(state, url, response) {
// If we don’t need to dig into the HTML, we’re done.
if (
!(
(state.checkAnchor && url.hash) ||
state.findUrls ||
state.followMetaHttpEquiv
)
) {
return new URL(response.url)
}
const text = await response.text()
const tree = fromHtmlIsomorphic(text)
if (state.followMetaHttpEquiv) {
const meta = select('meta[http-equiv=refresh]', tree)
// Note: this also throws a proper `VFileMessage` when an invalid URL
// is defined in the HTML.
const redirect =
meta && meta.properties.content
? sharedDeclarativeRefresh(
String(meta.properties.content),
new URL(response.url)
)
: undefined
if (redirect) {
if (state.checkAnchor && url.hash) {
const message = new VFileMessage(
'Unexpected hash in URL `' +
url.href +
'` that redirects with `meta[http-equiv=refresh]` to `' +
redirect.href +
'` losing the hash, remove the hash from the original URL',
{
ruleId: 'lost-hash-with-meta-http-equiv',
source: 'dead-or-alive'
}
)
message.url = documentation + '#' + message.ruleId
state.messages.push(message)
}
// Treat all HTML redirects as non-permanent.
state.permanent = false
state.redirects++
return deadOrAliveInternal(state, redirect)
}
}
if (state.findUrls) {
state.urls = findUrls(url, tree)
}
if (state.checkAnchor && url.hash) {
const responseUrl = new URL(response.url)
const baseUrl = responseUrl.origin + responseUrl.pathname
const fragment = url.hash.slice(1)
const result = new URL(response.url)
result.hash = url.hash
for (const [urlRe, fragmentRe] of state.anchorAllowlist) {
if (urlRe.test(baseUrl) && fragmentRe.test(fragment)) {
return result
}
}
const anchors = getAnchors(tree, state)
const match = anchors.get(fragment) || {}
const node =
match.systemId || match.systemName || match.userId || match.userName
if (node) {
return result
}
const proposals = listFormat.format(
propose(fragment, [...anchors.keys()]).map((d) => '`' + d + '`')
)
const message = new VFileMessage(
'Unexpected missing anchor element on `' +
response.url +
'` for fragment `' +
fragment +
'`, remove if unneeded or refer to an existing element' +
(proposals ? ' such as ' + proposals : ''),
{ruleId: 'missing-anchor', source: 'dead-or-alive'}
)
message.url = documentation + '#' + message.ruleId
message.fatal = true
throw message
}
// Allow the hash to remain.
return new URL(response.url + url.hash)
}
/**
* @param {State} state
* @param {Readonly<URL>} url
* @param {Response} response
* @param {string | undefined} contentType
* @returns {URL}
*/
function handleUnknown(state, url, response, contentType) {
if (state.checkAnchor && url.hash) {
const message = new VFileMessage(
'Unexpected hash in URL `' +
url.href +
'` to non-html (`' +
contentType +
'`) losing the hash, remove the hash from the original URL',
{ruleId: 'lost-hash-with-non-html', source: 'dead-or-alive'}
)
message.url = documentation + '#' + message.ruleId
state.messages.push(message)
}
return new URL(response.url)
}
/**
* @param {Readonly<URL>} url
* @param {Root} tree
* @returns {Set<string>}
*/
function findUrls(url, tree) {
/** @type {Set<string>} */
const urls = new Set()
visit(tree, 'element', function (node) {
/** @type {string} */
let key
for (key in node.properties) {
if (
Object.hasOwn(node.properties, key) &&
Object.hasOwn(urlAttributes, key) &&
isElement(node, urlAttributes[key])
) {
const value = node.properties[key]
if (Array.isArray(value)) {
for (const item of value) {
add(item)
}
} else if (typeof value === 'string') {
add(value)
}
}
}
})
return urls
/**
* @param {boolean | number | string} value
* @returns {undefined}
*/
function add(value) {
if (typeof value === 'string') {
/** @type {URL} */
let found
try {
found = new URL(value, url)
} catch {
// Note: we currently silently bail
return
}
urls.add(found.href)
}
}
}
| wooorm/dead-or-alive | 50 | check if urls are dead or alive | JavaScript | wooorm | Titus | |
lib/propose.js | JavaScript | /**
* @typedef {[value: string, score: number]} ValueScoreTuple
*/
import {levenshteinEditDistance} from 'levenshtein-edit-distance'
const relativeThreshold = 0.5
const max = 4
/**
* @param {string} value
* @param {ReadonlyArray<string>} ideas
* @returns {Array<string>}
*/
export function propose(value, ideas) {
return ideas
.map((d) => score(value, d))
.sort(sort)
.filter((d) => filter(d))
.map((d) => pick(d))
.slice(0, max)
}
/**
* @param {string} value
* @param {string} d
* @returns {ValueScoreTuple} d
*/
function score(value, d) {
return [d, levenshteinEditDistance(value, d) / value.length]
}
/**
* @param {ValueScoreTuple} a
* @param {ValueScoreTuple} b
* @returns {number}
*/
function sort(a, b) {
return a[1] - b[1]
}
/**
* @param {ValueScoreTuple} d
* @returns {boolean}
*/
function filter(d) {
return d[1] < relativeThreshold
}
/**
* @param {ValueScoreTuple} d
* @returns {string}
*/
function pick(d) {
return d[0]
}
| wooorm/dead-or-alive | 50 | check if urls are dead or alive | JavaScript | wooorm | Titus | |
lib/shared-declarative-refresh.js | JavaScript | import {VFileMessage} from 'vfile-message'
/**
* Implementation of <https://html.spec.whatwg.org/multipage/semantics.html#shared-declarative-refresh-steps>.
*
* @param {string} input
* @param {Readonly<URL>} from
* @returns {URL | undefined}
*/
export function sharedDeclarativeRefresh(input, from) {
// 2.
let position = 0
// 3.
skipAsciiWhitespace()
// 4.
let before = position
// 5. Skip time.
while (position < input.length && asciiDigit(input.charCodeAt(position))) {
position++
}
// 6. and 6.1
if (position === before && !dot(input.charCodeAt(position))) {
return
}
// 7. (unneeded).
// 8. Discard more digits and dots.
while (
position < input.length &&
asciiDigitOrDot(input.charCodeAt(position))
) {
position++
}
// 9. (unneeded).
// 10.
before = position
if (position < input.length) {
// 10.2.
skipAsciiWhitespace()
// 10.3.
if (commaOrSemicolon(input.charCodeAt(position))) {
position++
}
// 10.4.
skipAsciiWhitespace()
}
// 10.1: if no `,` or `;` was found, exit; or: 11.0.
if (before === position || position === input.length) return
// 11.1.
let urlString = input.slice(position)
let quote = 0
// 11.2.
let code = input.charCodeAt(position)
if (code !== 85 /* `U` */ && code !== 117 /* `u` */) return skipQuotes()
position++
// 11.3.
code = input.charCodeAt(position)
if (code !== 82 /* `R` */ && code !== 114 /* `r` */) return parse()
position++
// 11.4.
code = input.charCodeAt(position)
if (code !== 76 /* `L` */ && code !== 108 /* `l` */) return parse()
position++
// 11.5.
skipAsciiWhitespace()
// 11.6.
if (input.charCodeAt(position) !== 61 /* `=` */) return parse()
position++
// 11.7.
skipAsciiWhitespace()
// 11.8.
return skipQuotes()
// 11.8.
function skipQuotes() {
const code = input.charCodeAt(position)
if (code === 34 /* `"` */ || code === 39 /* `'` */) {
quote = code
position++
}
// 11.9.
urlString = input.slice(position)
// 11.10.
if (quote) {
const index = urlString.indexOf(String.fromCharCode(quote))
if (index !== -1) urlString = urlString.slice(0, index)
}
return parse()
}
function parse() {
try {
return new URL(urlString, from)
} catch (error) {
const cause = /** @type {Error} */ (error)
const message = new VFileMessage(
'Unexpected invalid URL `' +
urlString +
'` in `content` on `meta[http-equiv=refresh] relative to `' +
from.href +
'`',
{cause, ruleId: 'shared-declarative-refresh', source: 'dead-or-alive'}
)
message.url = 'https://github.com/wooorm/dead-or-alive#' + message.ruleId
message.fatal = true
throw message
}
}
function skipAsciiWhitespace() {
while (
position < input.length &&
asciiWhitespace(input.charCodeAt(position))
) {
position++
}
}
/**
* @param {number} code
* @returns {boolean}
*/
function asciiDigit(code) {
return code >= 48 /* `0` */ && code <= 57
}
/**
* @param {number} code
* @returns {boolean}
*/
function asciiDigitOrDot(code) {
return asciiDigit(code) || dot(code)
}
/**
* @param {number} code
* @returns {boolean}
*/
function asciiWhitespace(code) {
return (
code === 9 /* `\t` */ ||
code === 10 /* `\n` */ ||
code === 12 /* `\f` */ ||
code === 13 /* `\r` */ ||
code === 32 /* ` ` */
)
}
/**
* @param {number} code
* @returns {boolean}
*/
function dot(code) {
return code === 46 /* `.` */
}
/**
* @param {number} code
* @returns {boolean}
*/
function commaOrSemicolon(code) {
return code === 44 /* `,` */ || code === 59 /* `;` */
}
}
| wooorm/dead-or-alive | 50 | check if urls are dead or alive | JavaScript | wooorm | Titus | |
test.js | JavaScript | /**
* @import {VFileMessage} from 'vfile-message'
*/
import assert from 'node:assert/strict'
import test from 'node:test'
import {deadOrAlive} from 'dead-or-alive'
import {MockAgent, getGlobalDispatcher, setGlobalDispatcher} from 'undici'
import {sharedDeclarativeRefresh} from './lib/shared-declarative-refresh.js'
import {propose} from './lib/propose.js'
test('core', async function (t) {
await t.test('should expose the public api', async function () {
assert.deepEqual(Object.keys(await import('dead-or-alive')).sort(), [
'deadOrAlive',
'defaultAnchorAllowlist',
'defaultSleep'
])
})
})
test('deadOrAlive (real)', async function (t) {
await t.test('should work for a real url', async function () {
const result = await deadOrAlive('https://github.com')
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://github.com/')
assert.equal(result.messages.length, 0)
})
await t.test('should work for a real `http` url', async function () {
const result = await deadOrAlive('http://github.com')
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://github.com/')
assert.equal(result.messages.length, 0)
})
await t.test('should work for a redirecting real url', async function () {
const result = await deadOrAlive('https://mdn.io')
assert.equal(result.status, 'alive')
assert.equal(
result.url,
'https://developer.mozilla.org/en-US/docs/Web/JavaScript'
)
assert.equal(result.messages.length, 0)
})
await t.test('should work for a real url w/ anchor', async function () {
const result = await deadOrAlive(
'https://github.com/wooorm/dead-or-alive#dead-or-alive'
)
assert.equal(result.status, 'alive')
assert.equal(
result.url,
'https://github.com/wooorm/dead-or-alive#dead-or-alive'
)
assert.equal(result.messages.length, 0)
})
await t.test(
'should fail for a missing real url w/ anchor',
async function () {
// To do: when released, use that.
const result = await deadOrAlive(
'https://github.com/wooorm/dead-or-alive#deader-live'
)
assert.equal(result.status, 'dead')
assert.equal(result.messages.length, 1)
const message = result.messages[0]
assert.equal(
message.reason,
'Unexpected missing anchor element on `https://github.com/wooorm/dead-or-alive` for fragment `deader-live`, remove if unneeded or refer to an existing element such as `dead-or-alive`'
)
assert.equal(message.ruleId, 'missing-anchor')
assert.equal(message.source, 'dead-or-alive')
}
)
})
test('deadOrAlive (mocked)', async function (t) {
await t.test('should work w/ a 200', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable.intercept({path: '/'}).reply(200, 'ok')
const result = await deadOrAlive('https://example.com')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.permanent, undefined)
assert.equal(result.url, 'https://example.com/')
assert.equal(result.messages.length, 0)
})
await t.test('should work w/ URLs', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable.intercept({path: '/'}).reply(200, 'ok')
const result = await deadOrAlive(new URL('https://example.com'))
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/')
assert.equal(result.messages.length, 0)
})
await t.test('should work w/ a timeout', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable.intercept({path: '/'}).reply(200).delay(100)
const result = await deadOrAlive('https://example.com', {timeout: 50})
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'dead')
assert.equal(result.messages.length, 1)
const message = result.messages[0]
assert.equal(
message.reason,
'Unexpected error fetching `https://example.com/`'
)
assert.equal(message.ruleId, 'fetch')
assert.equal(message.source, 'dead-or-alive')
})
await t.test('should work w/ retries', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
let called = false
interceptable
.intercept({
path(path) {
if (path === '/') {
if (called) {
return true
}
called = true
}
return false
}
})
.reply(200)
const result = await deadOrAlive('https://example.com', {
sleep: shortSleep
})
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/')
assert.equal(result.messages.length, 0)
})
await t.test('should work w/ a 404', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable.intercept({path: '/'}).reply(404)
const result = await deadOrAlive('https://example.com')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'dead')
assert.equal(result.messages.length, 1)
assert.equal(result.permanent, undefined)
const message = result.messages[0]
assert.equal(
message.reason,
'Unexpected not ok response `404` (`Not Found`) on `https://example.com/`'
)
assert.equal(message.ruleId, 'dead')
assert.equal(message.source, 'dead-or-alive')
})
await t.test('should work w/ a 500', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable.intercept({path: '/'}).reply(500)
const result = await deadOrAlive('https://example.com', {sleep: shortSleep})
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'dead')
assert.equal(result.messages.length, 1)
assert.equal(result.permanent, undefined)
const message = result.messages[0]
assert.equal(
message.reason,
'Unexpected error fetching `https://example.com/`'
)
assert.equal(message.ruleId, 'fetch')
assert.equal(message.source, 'dead-or-alive')
})
await t.test('should work w/ a 301', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable
.intercept({path: '/'})
.reply(301, '', {headers: {Location: '/to'}})
interceptable
.intercept({path: '/to'})
.reply(200, 'ok', {headers: {'Content-type': 'text/html'}})
const result = await deadOrAlive('https://example.com')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/to')
assert.equal(result.messages.length, 0)
assert.equal(result.permanent, true)
})
await t.test('should work w/ a 302', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable
.intercept({path: '/'})
.reply(302, '', {headers: {Location: '/to'}})
interceptable
.intercept({path: '/to'})
.reply(200, 'ok', {headers: {'Content-type': 'text/html'}})
const result = await deadOrAlive('https://example.com')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/to')
assert.equal(result.messages.length, 0)
assert.equal(result.permanent, false)
})
await t.test('should work w/ 5 redirects (default)', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
const max = 5
let index = 0
while (index < max) {
interceptable
.intercept({path: '/' + index})
.reply(301, '', {headers: {Location: '/' + (index + 1)}})
index++
}
interceptable
.intercept({path: '/' + max})
.reply(200, 'ok', {headers: {'Content-type': 'text/html'}})
const result = await deadOrAlive('https://example.com/0')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/' + max)
assert.equal(result.messages.length, 0)
assert.equal(result.permanent, true)
})
await t.test('should fail w/ 6 redirects (default)', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
const max = 6
let index = 0
while (index < max) {
interceptable
.intercept({path: '/' + index})
.reply(301, '', {headers: {Location: '/' + (index + 1)}})
index++
}
interceptable
.intercept({path: '/' + max})
.reply(200, 'ok', {headers: {'Content-type': 'text/html'}})
const result = await deadOrAlive('https://example.com/0')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'dead')
assert.equal(result.messages.length, 1)
assert.equal(result.permanent, true)
const message = result.messages[0]
assert.equal(
message.reason,
'Unexpected redirect to `https://example.com/6`, too many redirects'
)
assert.equal(message.ruleId, 'max-redirect')
assert.equal(message.source, 'dead-or-alive')
})
await t.test('should support `maxRedirects`', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
const max = 10
let index = 0
while (index < max) {
interceptable
.intercept({path: '/' + index})
.reply(301, '', {headers: {Location: '/' + (index + 1)}})
index++
}
interceptable
.intercept({path: '/' + max})
.reply(200, 'ok', {headers: {'Content-type': 'text/html'}})
const result = await deadOrAlive('https://example.com/0', {
maxRedirects: 7
})
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'dead')
assert.equal(result.messages.length, 1)
assert.equal(result.permanent, true)
const message = result.messages[0]
assert.equal(
message.reason,
'Unexpected redirect to `https://example.com/8`, too many redirects'
)
assert.equal(message.ruleId, 'max-redirect')
assert.equal(message.source, 'dead-or-alive')
})
await t.test('should warn w/ a redirect w/ hash', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable
.intercept({path: '/'})
.reply(301, '', {headers: {Location: '/to'}})
interceptable
.intercept({path: '/to'})
.reply(200, 'ok', {headers: {'Content-type': 'text/html'}})
const result = await deadOrAlive('https://example.com#hi')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/to')
assert.equal(result.permanent, true)
assert.equal(result.messages.length, 1)
const message = result.messages[0]
assert.equal(
message.reason,
'Unexpected hash in URL `https://example.com/#hi` that redirects to `https://example.com/to` losing the hash, remove the hash from the original URL'
)
assert.equal(message.ruleId, 'lost-hash-with-redirect')
assert.equal(message.source, 'dead-or-alive')
})
await t.test('should work with an HTML redirect', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable
.intercept({path: '/'})
.reply(
200,
'<!doctypehtml><title>Redirecting…</title><meta http-equiv=refresh content="0;url=https://example.com/i/was/redirected/">',
{headers: {'Content-type': 'text/html'}}
)
interceptable
.intercept({path: '/i/was/redirected/'})
.reply(200, 'ok', {headers: {'Content-type': 'text/html'}})
const result = await deadOrAlive('https://example.com')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/i/was/redirected/')
assert.equal(result.messages.length, 0)
})
await t.test(
'should not follow HTML redirects w/ `followMetaHttpEquiv: false`',
async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable
.intercept({path: '/'})
.reply(
200,
'<!doctypehtml><title>Redirecting…</title><meta http-equiv=refresh content="0;url=https://example.com/i/was/redirected/">',
{headers: {'Content-type': 'text/html'}}
)
const result = await deadOrAlive('https://example.com', {
followMetaHttpEquiv: false
})
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/')
assert.equal(result.messages.length, 0)
}
)
await t.test(
'should ignore an HTML redirect w/o `content`',
async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable
.intercept({path: '/'})
.reply(
200,
'<!doctypehtml><title>Redirecting…</title><meta http-equiv=refresh>',
{headers: {'Content-type': 'text/html'}}
)
const result = await deadOrAlive('https://example.com')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/')
assert.equal(result.messages.length, 0)
}
)
await t.test('should warn w/ HTML redirect losing hash', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable
.intercept({path: '/'})
.reply(
200,
'<!doctypehtml><title>Redirecting…</title><meta http-equiv=refresh content="0;/to/">',
{headers: {'Content-type': 'text/html'}}
)
interceptable
.intercept({path: '/to/'})
.reply(200, 'ok', {headers: {'Content-type': 'text/html'}})
const result = await deadOrAlive('https://example.com#hi')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/to/')
assert.equal(result.messages.length, 1)
const message = result.messages[0]
assert.equal(
message.reason,
'Unexpected hash in URL `https://example.com/#hi` that redirects with `meta[http-equiv=refresh]` to `https://example.com/to/` losing the hash, remove the hash from the original URL'
)
assert.equal(message.ruleId, 'lost-hash-with-meta-http-equiv')
assert.equal(message.source, 'dead-or-alive')
})
await t.test('should find an anchor by `id`', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable
.intercept({path: '/'})
.reply(200, '<!doctypehtml><title></title><h1 id=hi>hi</h1>', {
headers: {'Content-type': 'text/html'}
})
const result = await deadOrAlive('https://example.com#hi')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/#hi')
assert.equal(result.messages.length, 0)
})
await t.test(
'should find an anchor by `id` (clobber prefix)',
async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable
.intercept({path: '/'})
.reply(
200,
'<!doctypehtml><title></title><h1 id=user-content-hi>hi</h1>',
{headers: {'Content-type': 'text/html'}}
)
const result = await deadOrAlive('https://example.com#hi')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/#hi')
assert.equal(result.messages.length, 0)
}
)
await t.test('should find an anchor by `a[name]`', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable
.intercept({path: '/'})
.reply(200, '<!doctypehtml><title></title><a name=hi>hi</a>', {
headers: {'Content-type': 'text/html'}
})
const result = await deadOrAlive('https://example.com#hi')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/#hi')
assert.equal(result.messages.length, 0)
})
await t.test(
'should find an anchor by `a[name]` (clobber prefix)',
async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable
.intercept({path: '/'})
.reply(
200,
'<!doctypehtml><title></title><a name=user-content-hi>hi</a>',
{headers: {'Content-type': 'text/html'}}
)
const result = await deadOrAlive('https://example.com#hi')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/#hi')
assert.equal(result.messages.length, 0)
}
)
await t.test('should find similar anchors', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable
.intercept({path: '/'})
.reply(
200,
'<!doctypehtml><title></title><h1 id=aaab>hi</h1><h1 id=user-content-aaac>hi</h1><a name=aaad>hi</a><a name=user-content-aaae>hi</a>',
{headers: {'Content-type': 'text/html'}}
)
const result = await deadOrAlive('https://example.com#aaaa')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'dead')
assert.equal(result.messages.length, 1)
const message = result.messages[0]
assert.equal(
message.reason,
'Unexpected missing anchor element on `https://example.com/` for fragment `aaaa`, remove if unneeded or refer to an existing element such as `aaab`, `aaac`, `aaad`, or `aaae`'
)
assert.equal(message.ruleId, 'missing-anchor')
assert.equal(message.source, 'dead-or-alive')
})
await t.test('should allow text fragments by default', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable
.intercept({path: '/'})
.reply(200, 'ok', {headers: {'Content-type': 'text/html'}})
const result = await deadOrAlive('https://example.com#:~:text=hi')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/#:~:text=hi')
assert.equal(result.messages.length, 0)
})
await t.test('should find similar anchors (none)', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable
.intercept({path: '/'})
.reply(200, '<!doctypehtml><title></title><h1 id=xxxx>hi</h1>', {
headers: {'Content-type': 'text/html'}
})
const result = await deadOrAlive('https://example.com#aaaa')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'dead')
assert.equal(result.messages.length, 1)
const message = result.messages[0]
assert.equal(
message.reason,
'Unexpected missing anchor element on `https://example.com/` for fragment `aaaa`, remove if unneeded or refer to an existing element'
)
assert.equal(message.ruleId, 'missing-anchor')
assert.equal(message.source, 'dead-or-alive')
})
await t.test('should work w/o `Content-type`', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable.intercept({path: '/'}).reply(200, 'hi')
const result = await deadOrAlive('https://example.com')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/')
assert.equal(result.messages.length, 0)
})
await t.test('should work with non-html', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable.intercept({path: '/'}).reply(200, '*{color:red}', {
headers: {'Content-type': 'text/css'}
})
const result = await deadOrAlive('https://example.com')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/')
assert.equal(result.messages.length, 0)
})
await t.test('should warn about anchors into non-html', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable.intercept({path: '/'}).reply(200, '*{color:red}', {
headers: {'Content-type': 'text/css'}
})
const result = await deadOrAlive('https://example.com#hi')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/')
assert.equal(result.messages.length, 1)
const message = result.messages[0]
assert.equal(
message.reason,
'Unexpected hash in URL `https://example.com/#hi` to non-html (`text/css`) losing the hash, remove the hash from the original URL'
)
assert.equal(message.ruleId, 'lost-hash-with-non-html')
assert.equal(message.source, 'dead-or-alive')
})
await t.test('should find further URLs', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable
.intercept({path: '/'})
.reply(200, '<a href=/>a</a><q cite=#a>b</q><a ping="b c">c</a>', {
headers: {'Content-type': 'text/html'}
})
const result = await deadOrAlive('https://example.com')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/')
assert.equal(result.messages.length, 0)
assert.deepEqual(result.urls ? [...result.urls] : [], [
'https://example.com/',
'https://example.com/#a',
'https://example.com/b',
'https://example.com/c'
])
})
await t.test('should ignore broken further URLs', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable
.intercept({path: '/'})
.reply(200, '<a href=https://exa[mple.org>a</a>', {
headers: {'Content-type': 'text/html'}
})
const result = await deadOrAlive('https://example.com')
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/')
assert.equal(result.messages.length, 0)
assert.deepEqual(result.urls ? [...result.urls] : [], [])
})
await t.test(
'should not find further urls w/ `findUrls: false`',
async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable.intercept({path: '/'}).reply(200, '<a href=a>b</a>', {
headers: {'Content-type': 'text/html'}
})
const result = await deadOrAlive('https://example.com', {findUrls: false})
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/')
assert.equal(result.messages.length, 0)
assert.equal(result.urls, undefined)
}
)
await t.test('should be fast if html is not needed', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const interceptable = mockAgent.get('https://example.com')
interceptable
.intercept({path: '/'})
.reply(200, 'ok', {headers: {'Content-type': 'text/html'}})
const result = await deadOrAlive('https://example.com', {
checkAnchor: false,
findUrls: false,
followMetaHttpEquiv: false
})
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'alive')
assert.equal(result.url, 'https://example.com/')
assert.equal(result.messages.length, 0)
assert.equal(result.urls, undefined)
})
await t.test('should be fast if retries are not needed', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const result = await deadOrAlive('https://example.com', {
maxRetries: 0
})
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'dead')
assert.equal(result.url, undefined)
assert.equal(result.urls, undefined)
const message = result.messages[0]
assert.equal(
message.reason,
'Unexpected error fetching `https://example.com/`'
)
assert.equal(message.ruleId, 'fetch')
assert.equal(message.source, 'dead-or-alive')
})
await t.test('should be fast if sleep is fast', async function () {
const globalDispatcher = getGlobalDispatcher()
const mockAgent = new MockAgent()
mockAgent.enableNetConnect(/(?=a)b/)
setGlobalDispatcher(mockAgent)
const result = await deadOrAlive('https://example.com', {
sleep() {
return 40
}
})
await mockAgent.close()
await setGlobalDispatcher(globalDispatcher)
assert.equal(result.status, 'dead')
assert.equal(result.url, undefined)
assert.equal(result.urls, undefined)
const message = result.messages[0]
assert.equal(
message.reason,
'Unexpected error fetching `https://example.com/`'
)
assert.equal(message.ruleId, 'fetch')
assert.equal(message.source, 'dead-or-alive')
})
})
test('sharedDeclarativeRefresh (internal util)', async function (t) {
const from = new URL('https://example.com/from')
await t.test('should work', async function () {
assert.equal(
sharedDeclarativeRefresh('0;/to', from)?.href,
'https://example.com/to'
)
})
await t.test('should support initial whitespace', async function () {
assert.equal(
sharedDeclarativeRefresh('\t 0;/to', from)?.href,
'https://example.com/to'
)
})
await t.test('should work w/ float', async function () {
assert.equal(
sharedDeclarativeRefresh('1.234;/to', from)?.href,
'https://example.com/to'
)
})
await t.test('should work w/ missing integer', async function () {
assert.equal(
sharedDeclarativeRefresh('.;/to', from)?.href,
'https://example.com/to'
)
})
await t.test('should fail w/o time', async function () {
assert.equal(sharedDeclarativeRefresh(';/to', from), undefined)
})
await t.test('should fail w/o separator', async function () {
assert.equal(sharedDeclarativeRefresh('0?/to', from), undefined)
})
await t.test(
'should fail w/o continuation after separator',
async function () {
assert.equal(sharedDeclarativeRefresh('0;', from), undefined)
}
)
await t.test('should work w/ semicolon separator', async function () {
assert.equal(
sharedDeclarativeRefresh('0;to', from)?.href,
'https://example.com/to'
)
})
await t.test('should work w/ comma separator', async function () {
assert.equal(
sharedDeclarativeRefresh('0,to', from)?.href,
'https://example.com/to'
)
})
await t.test('should work w/ whitespace before separator', async function () {
assert.equal(
sharedDeclarativeRefresh('0 ,to', from)?.href,
'https://example.com/to'
)
})
await t.test('should work w/ whitespace after separator', async function () {
assert.equal(
sharedDeclarativeRefresh('0, to', from)?.href,
'https://example.com/to'
)
})
await t.test('should work w/ double quotes', async function () {
assert.equal(
sharedDeclarativeRefresh('0;"to"', from)?.href,
'https://example.com/to'
)
})
await t.test('should work w/ single quotes', async function () {
assert.equal(
sharedDeclarativeRefresh("0;'to'", from)?.href,
'https://example.com/to'
)
})
await t.test('should work w/ quotes, truncating at end', async function () {
assert.equal(
sharedDeclarativeRefresh("0;'to'c", from)?.href,
'https://example.com/to'
)
})
await t.test('should work w/ quotes, w/o end', async function () {
assert.equal(
sharedDeclarativeRefresh("0;'to", from)?.href,
'https://example.com/to'
)
})
await t.test('should work w/ `url` prefix', async function () {
assert.equal(
sharedDeclarativeRefresh('0;url=to', from)?.href,
'https://example.com/to'
)
})
await t.test('should work w/ `url` prefix (uppercase)', async function () {
assert.equal(
sharedDeclarativeRefresh('0;URL=to', from)?.href,
'https://example.com/to'
)
})
await t.test('should work w/ `url` prefix (funkycase)', async function () {
assert.equal(
sharedDeclarativeRefresh('0;uRl=to', from)?.href,
'https://example.com/to'
)
})
await t.test('should work w/o prefix, w/ `u`', async function () {
assert.equal(
sharedDeclarativeRefresh('0;us', from)?.href,
'https://example.com/us'
)
})
await t.test('should work w/o prefix, w/ `ur`', async function () {
assert.equal(
sharedDeclarativeRefresh('0;ura', from)?.href,
'https://example.com/ura'
)
})
await t.test('should work w/o prefix, w/ `url` (no `=`)', async function () {
assert.equal(
sharedDeclarativeRefresh('0;urlang', from)?.href,
'https://example.com/urlang'
)
})
await t.test(
'should work w/ whitespace before `=` in prefix',
async function () {
assert.equal(
sharedDeclarativeRefresh('0;url =to', from)?.href,
'https://example.com/to'
)
}
)
await t.test(
'should work w/ whitespace after `=` in prefix',
async function () {
assert.equal(
sharedDeclarativeRefresh('0;url= to', from)?.href,
'https://example.com/to'
)
}
)
await t.test('should work w/ quotes after prefix', async function () {
assert.equal(
sharedDeclarativeRefresh('0;url="to"', from)?.href,
'https://example.com/to'
)
})
await t.test('should work w/ a different domain', async function () {
assert.equal(
sharedDeclarativeRefresh('0;https://example.org/stuff', from)?.href,
'https://example.org/stuff'
)
})
await t.test('should throw on invalid URLs', async function () {
try {
sharedDeclarativeRefresh('0;url="https://exa[mple.org"', from)
assert.fail()
} catch (error) {
const message = /** @type {VFileMessage} */ (error)
assert.equal(
message.reason,
'Unexpected invalid URL `https://exa[mple.org` in `content` on `meta[http-equiv=refresh] relative to `https://example.com/from`'
)
assert.equal(message.ruleId, 'shared-declarative-refresh')
assert.equal(message.source, 'dead-or-alive')
}
})
})
test('propose (internal util)', async function (t) {
await t.test('should work when empty', async function () {
assert.deepEqual(propose('woolwork', []), [])
})
await t.test('should work (1)', async function () {
assert.deepEqual(
propose('woolwork', ['woolworker', 'woolworking', 'woolwinder']),
['woolworker', 'woolworking']
)
})
await t.test('should work (2)', async function () {
assert.deepEqual(
propose('woolworkir', ['woolworker', 'woolworking', 'woolwinder']),
['woolworker', 'woolworking', 'woolwinder']
)
})
})
/**
* @param {number} retries
* @returns {number}
*/
function shortSleep(retries) {
return retries ** 2 * 100
}
| wooorm/dead-or-alive | 50 | check if urls are dead or alive | JavaScript | wooorm | Titus | |
benches/bench.rs | Rust | use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use std::fs;
fn readme(c: &mut Criterion) {
let doc = fs::read_to_string("readme.md").unwrap();
c.bench_with_input(BenchmarkId::new("readme", "readme"), &doc, |b, s| {
b.iter(|| markdown::to_html(s));
});
}
// fn one_and_a_half_mb(c: &mut Criterion) {
// let doc = fs::read_to_string("../a-dump-of-markdown/markdown.md").unwrap();
// let mut group = c.benchmark_group("giant");
// group.sample_size(10);
// group.bench_with_input(BenchmarkId::new("giant", "1.5 mb"), &doc, |b, s| {
// b.iter(|| markdown::to_html(s));
// });
// group.finish();
// }
// , one_and_a_half_mb
criterion_group!(benches, readme);
criterion_main!(benches);
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
examples/lib.rs | Rust | fn main() -> Result<(), markdown::message::Message> {
// Turn on debugging.
// You can show it with `RUST_LOG=debug cargo run --features log --example lib`
env_logger::init();
// Safely turn (untrusted?) markdown into HTML.
println!("{:?}", markdown::to_html("## Hello, *world*!"));
// Turn trusted markdown into HTML.
println!(
"{:?}",
markdown::to_html_with_options(
"<div style=\"color: goldenrod\">\n\n# Hi, *Saturn*! 🪐\n\n</div>",
&markdown::Options {
compile: markdown::CompileOptions {
allow_dangerous_html: true,
allow_dangerous_protocol: true,
..markdown::CompileOptions::default()
},
..markdown::Options::default()
}
)
);
// Support GFM extensions.
println!(
"{}",
markdown::to_html_with_options(
"* [x] contact ~Mercury~Venus at hi@venus.com!",
&markdown::Options::gfm()
)?
);
// Access syntax tree and support MDX extensions:
println!(
"{:?}",
markdown::to_mdast(
"# <HelloMessage />, {username}!",
&markdown::ParseOptions::mdx()
)?
);
Ok(())
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
fuzz/fuzz_targets/markdown_honggfuzz.rs | Rust | use honggfuzz::fuzz;
fn main() {
loop {
fuzz!(|data: &[u8]| {
if let Ok(s) = std::str::from_utf8(data) {
let _ = markdown::to_html(s);
let _ = markdown::to_html_with_options(s, &markdown::Options::gfm());
let _ = markdown::to_mdast(s, &markdown::ParseOptions::default());
let _ = markdown::to_mdast(s, &markdown::ParseOptions::gfm());
let _ = markdown::to_mdast(s, &markdown::ParseOptions::mdx());
}
});
}
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
fuzz/fuzz_targets/markdown_libfuzz.rs | Rust | #![no_main]
use libfuzzer_sys::fuzz_target;
fuzz_target!(|data: &[u8]| {
if let Ok(s) = std::str::from_utf8(data) {
let _ = markdown::to_html(s);
let _ = markdown::to_html_with_options(s, &markdown::Options::gfm());
let _ = markdown::to_mdast(s, &markdown::ParseOptions::default());
let _ = markdown::to_mdast(s, &markdown::ParseOptions::gfm());
let _ = markdown::to_mdast(s, &markdown::ParseOptions::mdx());
}
});
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
generate/src/main.rs | Rust | // To regenerate, run the following from the repository root:
//
// ```sh
// cargo run --manifest-path generate/Cargo.toml
// ```
use regex::Regex;
use std::fs;
#[tokio::main]
async fn main() {
commonmark().await;
punctuation().await;
}
async fn commonmark() {
let url = "https://raw.githubusercontent.com/commonmark/commonmark-spec/0.31.2/spec.txt";
let data_url = "commonmark-data.txt";
let code_url = "tests/commonmark.rs";
let value = if let Ok(value) = fs::read_to_string(data_url) {
value
} else {
let value = reqwest::get(url).await.unwrap().text().await.unwrap();
fs::write(data_url, value.clone()).unwrap();
value
};
let re = Regex::new(r"(?m)(?:^`{32} example\n[\s\S]*?\n`{32}$|^#{1,6} *(.*)$)").unwrap();
let re_heading_prefix = Regex::new(r"#{1,6} ").unwrap();
let re_in_out = Regex::new(r"\n\.(?:\n|$)").unwrap();
let mut current_heading = None;
let mut number = 1;
let value = Regex::new(r"<!-- END TESTS -->[\s\S]*")
.unwrap()
.replace(&value, "");
let value = Regex::new(r"→").unwrap().replace_all(&value, "\t");
let mut cases = vec![];
for mat in re.find_iter(&value) {
let mut lines = mat.as_str().lines().collect::<Vec<_>>();
if lines.len() == 1 {
current_heading = Some(re_heading_prefix.replace(lines[0], "").to_string());
} else {
lines.remove(0);
lines.pop();
let section = current_heading.as_ref().unwrap();
let case = lines.join("\n");
let parts = re_in_out.split(&case).collect::<Vec<_>>();
let input = format!("{}\n", parts[0]);
let output = if parts[1].is_empty() {
"".into()
} else {
format!("{}\n", parts[1])
};
let test = format!(" assert_eq!(\n to_html_with_options(\n r###\"{}\"###,\n &danger\n )?,\n r###\"{}\"###,\n r###\"{} ({})\"###\n);", input, output, section, number);
cases.push(test);
number += 1;
}
}
let doc = format!(
"//! `CommonMark` test suite.
// > 👉 **Important**: this module is generated by `generate/src/main.rs`.
// > It is generate from the latest CommonMark website.
use markdown::{{message, to_html_with_options, CompileOptions, Options}};
use pretty_assertions::assert_eq;
#[rustfmt::skip]
#[test]
fn commonmark() -> Result<(), message::Message> {{
let danger = Options {{
compile: CompileOptions {{
allow_dangerous_html: true,
allow_dangerous_protocol: true,
..CompileOptions::default()
}},
..Options::default()
}};
{}
Ok(())
}}
",
cases.join("\n\n")
);
fs::write(code_url, doc).unwrap();
}
async fn punctuation() {
let url = "https://www.unicode.org/Public/UCD/latest/ucd/UnicodeData.txt";
let data_url = "unicode-data.txt";
let code_url = "src/util/unicode.rs";
let value = if let Ok(value) = fs::read_to_string(data_url) {
value
} else {
let value = reqwest::get(url).await.unwrap().text().await.unwrap();
fs::write(data_url, value.clone()).unwrap();
value
};
let search = [
"Pc", // Punctuation, Connector
"Pd", // Punctuation, Dash
"Pe", // Punctuation, Close
"Pf", // Punctuation, FinalQuote
"Pi", // Punctuation, InitialQuote
"Po", // Punctuation, Other
"Ps", // Punctuation, Open
"Sc", // Symbol, Currency
"Sk", // Symbol, Modifier
"Sm", // Symbol, Math
"So", // Symbol, Other
];
let found = value
.lines()
.map(|line| line.split(';').collect::<Vec<_>>())
.map(|cells| (cells[0], cells[2]))
.filter(|c| search.contains(&c.1))
.map(|c| c.0)
.collect::<Vec<_>>();
let doc = format!(
"//! Info on Unicode.
/// List of characters that are considered punctuation.
///
/// > 👉 **Important**: this module is generated by `generate/src/main.rs`.
/// > It is generate from the latest Unicode data.
///
/// Rust does not contain an `is_punctuation` method on `char`, while it does
/// support [`is_ascii_alphanumeric`](char::is_ascii_alphanumeric).
///
/// `CommonMark` handles attention (emphasis, strong) markers based on what
/// comes before or after them.
/// One such difference is if those characters are Unicode punctuation.
///
/// ## References
///
/// * [*§ 2.1 Characters and lines* in `CommonMark`](https://spec.commonmark.org/0.31.2/#unicode-punctuation-character)
pub static PUNCTUATION: [char; {}] = [
{}
];
",
found.len(),
found.iter().map(|d| format!(" '\\u{{{}}}',", d)).collect::<Vec<_>>().join("\n")
);
fs::write(code_url, doc).unwrap();
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/association.rs | Rust | //! Traits for <https://github.com/syntax-tree/mdast#association>.
//!
//! JS equivalent: https://github.com/DefinitelyTyped/DefinitelyTyped/blob/70e1a4f/types/mdast/index.d.ts#L48.
use alloc::string::String;
use markdown::mdast::{Definition, ImageReference, LinkReference};
pub trait Association {
fn identifier(&self) -> &String;
fn label(&self) -> &Option<String>;
}
impl Association for Definition {
fn identifier(&self) -> &String {
&self.identifier
}
fn label(&self) -> &Option<String> {
&self.label
}
}
impl Association for ImageReference {
fn identifier(&self) -> &String {
&self.identifier
}
fn label(&self) -> &Option<String> {
&self.label
}
}
impl Association for LinkReference {
fn identifier(&self) -> &String {
&self.identifier
}
fn label(&self) -> &Option<String> {
&self.label
}
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/configure.rs | Rust | //! Configuration.
//!
//! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/fd6a508/lib/types.js#L307.
#[derive(Clone, Copy)]
/// Configuration for indent of lists.
pub enum IndentOptions {
/// Depends on the item and its parent list: uses `IndentOptions::One` if
/// the item and list are tight and `IndentOptions::Tab` otherwise.
Mixed,
/// The size of the bullet plus one space.
One,
/// Tab stop.
Tab,
}
/// Configuration.
pub struct Options {
/// Marker to use for bullets of items in unordered lists (`'*'`, `'+'`, or
/// `'-'`, default: `'*'`).
pub bullet: char,
/// Marker to use for bullets of items in ordered lists (`'.'` or `')'`,
/// default: `'.'`).
pub bullet_ordered: char,
/// Marker to use in certain cases where the primary bullet doesn’t work
/// (`'*'`, `'+'`, or `'-'`, default: `'-'` when bullet is `'*'`, `'*'`
/// otherwise).
pub bullet_other: char,
/// Whether to add the same number of number signs (`#`) at the end of an
/// ATX heading as the opening sequence (`bool`, default: `false`).
pub close_atx: bool,
/// Marker to use for emphasis (`'*'` or `'_'`, default: `'*'`).
pub emphasis: char,
/// Marker to use for fenced code (``'`'`` or `'~'`, default: ``'`'``).
pub fence: char,
/// Whether to use fenced code always (`bool`, default: `true`).
/// The default is to use fenced code if there is a language defined,
/// if the code is empty,
/// or if it starts or ends in blank lines.
pub fences: bool,
/// Whether to increment the counter of ordered lists items (`bool`,
/// default: `true`).
pub increment_list_marker: bool,
/// How to indent the content of list items (default: `IndentOptions::One`).
pub list_item_indent: IndentOptions,
/// Marker to use for titles (`'"'` or `"'"`, default: `'"'`).
pub quote: char,
/// Whether to always use resource links (`bool`, default: `false`).
/// The default is to use autolinks (`<https://example.com>`) when possible
/// and resource links (`[text](url)`) otherwise.
pub resource_link: bool,
/// Marker to use for thematic breaks (`'*'`, `'-'`, or `'_'`, default:
/// `'*'`).
pub rule: char,
/// Number of markers to use for thematic breaks (`u32`, default: `3`, min:
/// `3`).
pub rule_repetition: u32,
/// Whether to add spaces between markers in thematic breaks (`bool`,
/// default: `false`).
pub rule_spaces: bool,
/// Whether to use setext headings when possible (`bool`, default:
/// `false`).
/// The default is to always use ATX headings (`# heading`) instead of
/// setext headings (`heading\n=======`).
/// Setext headings cannot be used for empty headings or headings with a
/// rank of three or more.
pub setext: bool,
/// Whether to support math (text) with a single dollar (`bool`, default: `true`).
/// Single dollars work in Pandoc and many other places, but often interfere with “normal”
/// dollars in text.
/// If you turn this off, you can still use two or more dollars for text math.
pub single_dollar_text_math: bool,
/// Marker to use for strong (`'*'` or `'_'`, default: `'*'`).
pub strong: char,
/// Whether to join definitions without a blank line (`bool`, default:
/// `false`).
pub tight_definitions: bool,
}
impl Default for Options {
fn default() -> Self {
Self {
bullet: '*',
bullet_ordered: '.',
bullet_other: '-',
close_atx: false,
emphasis: '*',
fence: '`',
fences: true,
increment_list_marker: true,
list_item_indent: IndentOptions::One,
quote: '"',
resource_link: false,
rule: '*',
rule_repetition: 3,
rule_spaces: false,
setext: false,
single_dollar_text_math: true,
strong: '*',
tight_definitions: false,
}
}
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/construct_name.rs | Rust | //! Names of the things being serialized.
//!
//! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/fd6a508/index.d.ts#L18.
#[derive(Clone, PartialEq)]
pub enum ConstructName {
/// Whole autolink.
///
/// ```markdown
/// > | <https://example.com> and <admin@example.com>
/// ^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^
/// ```
Autolink,
/// Whole block quote.
///
/// ```markdown
/// > | > a
/// ^^^
/// > | b
/// ^
/// ```
Blockquote,
/// Whole code (fenced).
///
/// ````markdown
/// > | ```js
/// ^^^^^
/// > | console.log(1)
/// ^^^^^^^^^^^^^^
/// > | ```
/// ^^^
/// ````
CodeFenced,
/// Code (fenced) language, when fenced with grave accents.
///
/// ````markdown
/// > | ```js
/// ^^
/// | console.log(1)
/// | ```
/// ````
CodeFencedLangGraveAccent,
/// Code (fenced) language, when fenced with tildes.
///
/// ````markdown
/// > | ~~~js
/// ^^
/// | console.log(1)
/// | ~~~
/// ````
CodeFencedLangTilde,
/// Code (fenced) meta string, when fenced with grave accents.
///
/// ````markdown
/// > | ```js eval
/// ^^^^
/// | console.log(1)
/// | ```
/// ````
CodeFencedMetaGraveAccent,
/// Code (fenced) meta string, when fenced with tildes.
///
/// ````markdown
/// > | ~~~js eval
/// ^^^^
/// | console.log(1)
/// | ~~~
/// ````
CodeFencedMetaTilde,
/// Whole code (indented).
///
/// ```markdown
/// ␠␠␠␠console.log(1)
/// ^^^^^^^^^^^^^^^^^^
/// ```
CodeIndented,
/// Whole definition.
///
/// ```markdown
/// > | [a]: b "c"
/// ^^^^^^^^^^
/// ```
Definition,
/// Destination (literal) (occurs in definition, image, link).
///
/// ```markdown
/// > | [a]: <b> "c"
/// ^^^
/// > | a  e
/// ^^^
/// ```
DestinationLiteral,
/// Destination (raw) (occurs in definition, image, link).
///
/// ```markdown
/// > | [a]: b "c"
/// ^
/// > | a  e
/// ^
/// ```
DestinationRaw,
/// Emphasis.
///
/// ```markdown
/// > | *a*
/// ^^^
/// ```
Emphasis,
/// Whole heading (atx).
///
/// ```markdown
/// > | # alpha
/// ^^^^^^^
/// ```
HeadingAtx,
/// Whole heading (setext).
///
/// ```markdown
/// > | alpha
/// ^^^^^
/// > | =====
/// ^^^^^
/// ```
HeadingSetext,
/// Whole image.
///
/// ```markdown
/// > | 
/// ^^^^^^^
/// > | ![c]
/// ^^^^
/// ```
Image,
/// Whole image reference.
///
/// ```markdown
/// > | ![a]
/// ^^^^
/// ```
ImageReference,
/// Label (occurs in definitions, image reference, image, link reference,
/// link).
///
/// ```markdown
/// > | [a]: b "c"
/// ^^^
/// > | a [b] c
/// ^^^
/// > | a ![b][c] d
/// ^^^^
/// > | a [b](c) d
/// ^^^
/// ```
Label,
/// Whole link.
///
/// ```markdown
/// > | [a](b)
/// ^^^^^^
/// > | [c]
/// ^^^
/// ```
Link,
/// Whole link reference.
///
/// ```markdown
/// > | [a]
/// ^^^
/// ```
LinkReference,
/// List.
///
/// ```markdown
/// > | * a
/// ^^^
/// > | 1. b
/// ^^^^
/// ```
List,
/// List item.
///
/// ```markdown
/// > | * a
/// ^^^
/// > | 1. b
/// ^^^^
/// ```
ListItem,
/// Math (flow).
///
/// ```markdown
/// > | $$
/// ^^
/// > | a
/// ^
/// > | $$
/// ^^
/// ```
MathFlow,
/// Math (flow) meta flag.
///
/// ```markdown
/// > | $$a
/// ^
/// | b
/// | $$
/// ```
MathFlowMeta,
/// Paragraph.
///
/// ```markdown
/// > | a b
/// ^^^
/// > | c.
/// ^^
/// ```
Paragraph,
/// Phrasing (occurs in headings, paragraphs, etc).
///
/// ```markdown
/// > | a
/// ^
/// ```
Phrasing,
/// Reference (occurs in image, link).
///
/// ```markdown
/// > | [a][]
/// ^^
/// ```
Reference,
/// Strong.
///
/// ```markdown
/// > | **a**
/// ^^^^^
/// ```
Strong,
/// Title using single quotes (occurs in definition, image, link).
///
/// ```markdown
/// > | [a](b 'c')
/// ^^^
/// ```
TitleApostrophe,
/// Title using double quotes (occurs in definition, image, link).
///
/// ```markdown
/// > | [a](b "c")
/// ^^^
/// ```
TitleQuote,
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/blockquote.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/handle/blockquote.js
use super::Handle;
use crate::{
construct_name::ConstructName,
state::{Info, State},
};
use alloc::string::String;
use markdown::{
mdast::{Blockquote, Node},
message::Message,
};
impl Handle for Blockquote {
fn handle(
&self,
state: &mut State,
_info: &Info,
_parent: Option<&Node>,
node: &Node,
) -> Result<alloc::string::String, Message> {
state.enter(ConstructName::Blockquote);
let value = state.container_flow(node)?;
let value = state.indent_lines(&value, map);
state.exit();
Ok(value)
}
}
fn map(line: &str, _index: usize, blank: bool) -> String {
let mut result = String::with_capacity(2 + line.len());
let marker = ">";
result.push_str(marker);
if !blank {
result.push(' ');
}
result.push_str(line);
result
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/break.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/handle/break.js
use super::Handle;
use crate::{
state::{Info, State},
util::pattern_in_scope::pattern_in_scope,
};
use alloc::string::ToString;
use markdown::{
mdast::{Break, Node},
message::Message,
};
impl Handle for Break {
fn handle(
&self,
state: &mut State,
info: &Info,
_parent: Option<&Node>,
_node: &Node,
) -> Result<alloc::string::String, Message> {
for pattern in state.r#unsafe.iter() {
// If we can’t put eols in this construct (setext headings, tables), use a
// space instead.
if pattern.character == '\n' && pattern_in_scope(&state.stack, pattern) {
let space_or_tab = info.before.chars().any(|c| c == '\t' || c == ' ');
if space_or_tab {
return Ok("".to_string());
}
return Ok(" ".to_string());
}
}
Ok("\\\n".to_string())
}
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/code.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/handle/code.js
use super::Handle;
use crate::{
construct_name::ConstructName,
state::{Info, State},
util::{
check_fence::check_fence, format_code_as_indented::format_code_as_indented,
longest_char_streak::longest_char_streak, safe::SafeConfig,
},
};
use alloc::{
format,
string::{String, ToString},
};
use markdown::{
mdast::{Code, Node},
message::Message,
};
impl Handle for Code {
fn handle(
&self,
state: &mut State,
_info: &Info,
_parent: Option<&Node>,
_node: &Node,
) -> Result<alloc::string::String, Message> {
let marker = check_fence(state)?;
if format_code_as_indented(self, state) {
state.enter(ConstructName::CodeIndented);
let value = state.indent_lines(&self.value, map);
state.exit();
return Ok(value);
}
let sequence = marker
.to_string()
.repeat((longest_char_streak(&self.value, marker) + 1).max(3));
state.enter(ConstructName::CodeFenced);
let mut value = sequence.clone();
if let Some(lang) = &self.lang {
let code_fenced_lang_construct = if marker == '`' {
ConstructName::CodeFencedLangGraveAccent
} else {
ConstructName::CodeFencedLangTilde
};
state.enter(code_fenced_lang_construct);
value.push_str(&state.safe(lang, &SafeConfig::new(&value, " ", Some('`'))));
state.exit();
if let Some(meta) = &self.meta {
let code_fenced_meta_construct = if marker == '`' {
ConstructName::CodeFencedMetaGraveAccent
} else {
ConstructName::CodeFencedMetaTilde
};
state.enter(code_fenced_meta_construct);
value.push(' ');
value.push_str(&state.safe(meta, &SafeConfig::new(&value, "\n", Some('`'))));
state.exit();
}
}
value.push('\n');
if !self.value.is_empty() {
value.push_str(&self.value);
value.push('\n');
}
value.push_str(&sequence);
state.exit();
Ok(value)
}
}
fn map(line: &str, _index: usize, blank: bool) -> String {
if blank {
String::new()
} else {
format!(" {}", line)
}
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/definition.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/handle/definition.js
use super::Handle;
use crate::{
construct_name::ConstructName,
state::{Info, State},
util::{
check_quote::check_quote, contains_control_or_whitespace::contains_control_or_whitespace,
safe::SafeConfig,
},
};
use alloc::string::String;
use markdown::{
mdast::{Definition, Node},
message::Message,
};
impl Handle for Definition {
fn handle(
&self,
state: &mut State,
_info: &Info,
_parent: Option<&Node>,
_node: &Node,
) -> Result<alloc::string::String, Message> {
let quote = check_quote(state)?;
state.enter(ConstructName::Definition);
state.enter(ConstructName::Label);
let mut value = String::from('[');
value.push_str(&state.safe(
&state.association(self),
&SafeConfig::new(&value, "]", None),
));
value.push_str("]: ");
state.exit();
if self.url.is_empty() || contains_control_or_whitespace(&self.url) {
state.enter(ConstructName::DestinationLiteral);
value.push('<');
value.push_str(&state.safe(&self.url, &SafeConfig::new(&value, ">", None)));
value.push('>');
} else {
state.enter(ConstructName::DestinationRaw);
let after = if self.title.is_some() { " " } else { ")" };
value.push_str(&state.safe(&self.url, &SafeConfig::new(&value, after, None)));
}
state.exit();
if let Some(title) = &self.title {
let title_construct_name = if quote == '"' {
ConstructName::TitleQuote
} else {
ConstructName::TitleApostrophe
};
state.enter(title_construct_name);
value.push(' ');
value.push(quote);
let mut before_buffer = [0u8; 4];
let before = quote.encode_utf8(&mut before_buffer);
value.push_str(&state.safe(title, &SafeConfig::new(&self.url, before, None)));
value.push(quote);
state.exit();
}
state.exit();
Ok(value)
}
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/emphasis.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/handle/emphasis.js
use super::Handle;
use crate::{
construct_name::ConstructName,
state::{Info, State},
util::check_emphasis::check_emphasis,
};
use alloc::format;
use markdown::{
mdast::{Emphasis, Node},
message::Message,
};
impl Handle for Emphasis {
fn handle(
&self,
state: &mut State,
info: &Info,
_parent: Option<&Node>,
node: &Node,
) -> Result<alloc::string::String, Message> {
let marker = check_emphasis(state)?;
state.enter(ConstructName::Emphasis);
let mut value = format!("{}{}", marker, state.container_phrasing(node, info)?);
value.push(marker);
state.exit();
Ok(value)
}
}
pub fn peek_emphasis(state: &State) -> char {
state.options.emphasis
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/heading.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/handle/heading.js
use super::Handle;
use crate::{
construct_name::ConstructName,
state::{Info, State},
util::format_heading_as_setext::format_heading_as_setext,
};
use alloc::format;
use markdown::{
mdast::{Heading, Node},
message::Message,
};
impl Handle for Heading {
fn handle(
&self,
state: &mut State,
_info: &Info,
_parent: Option<&Node>,
node: &Node,
) -> Result<alloc::string::String, Message> {
let rank = self.depth.clamp(1, 6);
if format_heading_as_setext(self, state) {
state.enter(ConstructName::HeadingSetext);
state.enter(ConstructName::Phrasing);
let mut value = state.container_phrasing(node, &Info::new("\n", "\n"))?;
state.exit();
state.exit();
let underline_char = if rank == 1 { "=" } else { "-" };
let last_line_rank = value
.rfind('\n')
.unwrap_or(0)
.max(value.rfind('\r').unwrap_or(0));
let last_line_rank = if last_line_rank > 0 {
last_line_rank + 1
} else {
0
};
let setext_underline = underline_char.repeat(value.len() - last_line_rank);
value.push('\n');
value.push_str(&setext_underline);
return Ok(value);
}
let sequence = "#".repeat(rank as usize);
state.enter(ConstructName::HeadingAtx);
state.enter(ConstructName::Phrasing);
let mut value = state.container_phrasing(node, &Info::new("# ", "\n"))?;
if let Some(first_char) = value.chars().nth(0) {
if first_char == ' ' || first_char == '\t' {
let hex_code = u32::from(first_char);
value = format!("&#x{:X};{}", hex_code, &value[1..])
}
}
if value.is_empty() {
value.push_str(&sequence);
} else {
value = format!("{} {}", &sequence, value);
}
if state.options.close_atx {
value.push(' ');
value.push_str(&sequence);
}
state.exit();
state.exit();
Ok(value)
}
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/html.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/handle/html.js
use super::Handle;
use crate::state::{Info, State};
use markdown::{
mdast::{Html, Node},
message::Message,
};
impl Handle for Html {
fn handle(
&self,
_state: &mut State,
_info: &Info,
_parent: Option<&Node>,
_node: &Node,
) -> Result<alloc::string::String, Message> {
Ok(self.value.clone())
}
}
pub fn peek_html() -> char {
'<'
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/image.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/handle/image.js
use super::Handle;
use crate::{
construct_name::ConstructName,
state::{Info, State},
util::{
check_quote::check_quote, contains_control_or_whitespace::contains_control_or_whitespace,
safe::SafeConfig,
},
};
use alloc::string::String;
use markdown::{
mdast::{Image, Node},
message::Message,
};
impl Handle for Image {
fn handle(
&self,
state: &mut State,
_info: &Info,
_parent: Option<&Node>,
_node: &Node,
) -> Result<alloc::string::String, Message> {
let quote = check_quote(state)?;
state.enter(ConstructName::Image);
state.enter(ConstructName::Label);
let mut value = String::new();
value.push_str("![");
value.push_str(&state.safe(&self.alt, &SafeConfig::new(value.as_str(), "]", None)));
value.push_str("](");
state.exit();
if self.url.is_empty() && self.title.is_some() || contains_control_or_whitespace(&self.url)
{
state.enter(ConstructName::DestinationLiteral);
value.push('<');
value.push_str(&state.safe(&self.url, &SafeConfig::new(&value, ">", None)));
value.push('>');
} else {
state.enter(ConstructName::DestinationRaw);
let after = if self.title.is_some() { " " } else { ")" };
value.push_str(&state.safe(&self.url, &SafeConfig::new(&value, after, None)));
}
state.exit();
if let Some(title) = &self.title {
let title_construct_name = if quote == '"' {
ConstructName::TitleQuote
} else {
ConstructName::TitleApostrophe
};
state.enter(title_construct_name);
value.push(' ');
value.push(quote);
let mut before_buffer = [0u8; 4];
let before = quote.encode_utf8(&mut before_buffer);
value.push_str(&state.safe(title, &SafeConfig::new(&self.url, before, None)));
value.push(quote);
state.exit();
}
value.push(')');
state.exit();
Ok(value)
}
}
pub fn peek_image() -> char {
'!'
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/image_reference.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/handle/image-reference.js
use super::Handle;
use crate::{
construct_name::ConstructName,
state::{Info, State},
util::safe::SafeConfig,
};
use alloc::string::String;
use core::mem;
use markdown::{
mdast::{ImageReference, Node, ReferenceKind},
message::Message,
};
impl Handle for ImageReference {
fn handle(
&self,
state: &mut State,
_info: &Info,
_parent: Option<&Node>,
_node: &Node,
) -> Result<alloc::string::String, Message> {
state.enter(ConstructName::ImageReference);
state.enter(ConstructName::Label);
let mut value = String::from("![");
let alt = state.safe(&self.alt, &SafeConfig::new(&value, "]", None));
value.push_str(&alt);
value.push_str("][");
state.exit();
let old_stack = mem::take(&mut state.stack);
state.enter(ConstructName::Reference);
let reference = state.safe(
&state.association(self),
&SafeConfig::new(&value, "]", None),
);
state.exit();
state.stack = old_stack;
state.exit();
if matches!(self.reference_kind, ReferenceKind::Full) || alt.is_empty() || alt != reference
{
value.push_str(&reference);
value.push(']');
} else if matches!(self.reference_kind, ReferenceKind::Shortcut) {
value.pop();
} else {
value.push(']');
}
Ok(value)
}
}
pub fn peek_image_reference() -> char {
'!'
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/inline_code.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/handle/inline-code.js
use super::Handle;
use crate::state::{Info, State};
use alloc::{format, string::String};
use markdown::{
mdast::{InlineCode, Node},
message::Message,
};
use regex::Regex;
impl Handle for InlineCode {
fn handle(
&self,
state: &mut State,
_info: &Info,
_parent: Option<&Node>,
_node: &Node,
) -> Result<alloc::string::String, Message> {
let mut value = self.value.clone();
let mut sequence = String::from('`');
let mut grave_accent_match = Regex::new(&format!(r"(^|[^`]){}([^`]|$)", sequence)).unwrap();
while grave_accent_match.is_match(&value) {
sequence.push('`');
grave_accent_match = Regex::new(&format!(r"(^|[^`]){}([^`]|$)", sequence)).unwrap();
}
let no_whitespaces = !value.chars().all(char::is_whitespace);
let starts_with_whitespace = value.starts_with(char::is_whitespace);
let ends_with_whitespace = value.ends_with(char::is_whitespace);
let starts_with_tick = value.starts_with('`');
let ends_with_tick = value.ends_with('`');
if no_whitespaces
&& ((starts_with_whitespace && ends_with_whitespace)
|| starts_with_tick
|| ends_with_tick)
{
value = format!("{}{}{}", ' ', value, ' ');
}
for pattern in &mut state.r#unsafe {
if !pattern.at_break {
continue;
}
State::compile_pattern(pattern);
if let Some(regex) = &pattern.compiled {
while let Some(m) = regex.find(&value) {
let position = m.start();
let position = if position > 0
&& &value[position..m.len()] == "\n"
&& &value[position - 1..position] == "\r"
{
position - 1
} else {
position
};
value.replace_range(position..m.start() + 1, " ");
}
}
}
Ok(format!("{}{}{}", sequence, value, sequence))
}
}
pub fn peek_inline_code() -> char {
'`'
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/inline_math.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-math/blob/main/lib/index.js#L241
use super::Handle;
use crate::state::{Info, State};
use alloc::format;
use markdown::{
mdast::{InlineMath, Node},
message::Message,
};
use regex::Regex;
impl Handle for InlineMath {
fn handle(
&self,
state: &mut State,
_info: &Info,
_parent: Option<&Node>,
_node: &Node,
) -> Result<alloc::string::String, Message> {
let mut size: usize = if !state.options.single_dollar_text_math {
2
} else {
1
};
let pattern = format!("(^|[^$]){}([^$]|$)", "\\$".repeat(size));
let mut dollar_sign_match = Regex::new(&pattern).unwrap();
while dollar_sign_match.is_match(&self.value) {
size += 1;
let pattern = format!("(^|[^$]){}([^$]|$)", "\\$".repeat(size));
dollar_sign_match = Regex::new(&pattern).unwrap();
}
let sequence = "$".repeat(size);
let no_whitespaces = !self.value.chars().all(char::is_whitespace);
let starts_with_whitespace = self.value.starts_with(char::is_whitespace);
let ends_with_whitespace = self.value.ends_with(char::is_whitespace);
let starts_with_dollar = self.value.starts_with('$');
let ends_with_dollar = self.value.ends_with('$');
let mut value = self.value.clone();
if no_whitespaces
&& ((starts_with_whitespace && ends_with_whitespace)
|| starts_with_dollar
|| ends_with_dollar)
{
value = format!(" {} ", value);
}
for pattern in &mut state.r#unsafe {
if !pattern.at_break {
continue;
}
State::compile_pattern(pattern);
if let Some(regex) = &pattern.compiled {
while let Some(m) = regex.find(&value) {
let position = m.start();
let position = if position > 0
&& &value[position..m.len()] == "\n"
&& &value[position - 1..position] == "\r"
{
position - 1
} else {
position
};
value.replace_range(position..m.start() + 1, " ");
}
}
}
Ok(format!("{}{}{}", sequence, value, sequence))
}
}
pub fn peek_inline_math() -> char {
'$'
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/link.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/handle/link.js
use super::Handle;
use crate::{
construct_name::ConstructName,
state::{Info, State},
util::{
check_quote::check_quote, contains_control_or_whitespace::contains_control_or_whitespace,
format_link_as_auto_link::format_link_as_auto_link, safe::SafeConfig,
},
};
use alloc::string::String;
use core::mem;
use markdown::{
mdast::{Link, Node},
message::Message,
};
impl Handle for Link {
fn handle(
&self,
state: &mut State,
_info: &Info,
_parent: Option<&Node>,
node: &Node,
) -> Result<alloc::string::String, Message> {
let quote = check_quote(state)?;
if format_link_as_auto_link(self, node, state) {
let old_stack = mem::take(&mut state.stack);
state.enter(ConstructName::Autolink);
let mut value = String::from("<");
value.push_str(&state.container_phrasing(node, &Info::new(&value, ">"))?);
value.push('>');
state.exit();
state.stack = old_stack;
return Ok(value);
}
state.enter(ConstructName::Link);
state.enter(ConstructName::Label);
let mut value = String::from("[");
value.push_str(&state.container_phrasing(node, &Info::new(&value, "]("))?);
value.push_str("](");
state.exit();
if self.url.is_empty() && self.title.is_some() || contains_control_or_whitespace(&self.url)
{
state.enter(ConstructName::DestinationLiteral);
value.push('<');
value.push_str(&state.safe(&self.url, &SafeConfig::new(&value, ">", None)));
value.push('>');
} else {
state.enter(ConstructName::DestinationRaw);
let after = if self.title.is_some() { " " } else { ")" };
value.push_str(&state.safe(&self.url, &SafeConfig::new(&value, after, None)))
}
state.exit();
if let Some(title) = &self.title {
let title_construct_name = if quote == '"' {
ConstructName::TitleQuote
} else {
ConstructName::TitleApostrophe
};
state.enter(title_construct_name);
value.push(' ');
value.push(quote);
let mut before_buffer = [0u8; 4];
let before = quote.encode_utf8(&mut before_buffer);
value.push_str(&state.safe(title, &SafeConfig::new(&self.url, before, None)));
value.push(quote);
state.exit();
}
value.push(')');
state.exit();
Ok(value)
}
}
pub fn peek_link(link: &Link, node: &Node, state: &State) -> char {
if format_link_as_auto_link(link, node, state) {
'>'
} else {
'['
}
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/link_reference.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/handle/link-reference.js
use super::Handle;
use crate::{
construct_name::ConstructName,
state::{Info, State},
util::safe::SafeConfig,
};
use alloc::string::String;
use core::mem;
use markdown::{
mdast::{LinkReference, Node, ReferenceKind},
message::Message,
};
impl Handle for LinkReference {
fn handle(
&self,
state: &mut State,
_info: &Info,
_parent: Option<&Node>,
node: &Node,
) -> Result<alloc::string::String, Message> {
state.enter(ConstructName::LinkReference);
state.enter(ConstructName::Label);
let mut value = String::from("[");
let text = state.container_phrasing(node, &Info::new(&value, "]"))?;
value.push_str(&text);
value.push_str("][");
state.exit();
let old_stack = mem::take(&mut state.stack);
state.enter(ConstructName::Reference);
let reference = state.safe(
&state.association(self),
&SafeConfig::new(&value, "]", None),
);
state.exit();
state.stack = old_stack;
state.exit();
if matches!(self.reference_kind, ReferenceKind::Full)
|| text.is_empty()
|| text != reference
{
value.push_str(&reference);
value.push(']');
} else if matches!(self.reference_kind, ReferenceKind::Shortcut) {
value.pop();
} else {
value.push(']');
}
Ok(value)
}
}
pub fn peek_link_reference() -> char {
'['
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/list.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/handle/list.js
use super::Handle;
use crate::{
construct_name::ConstructName,
state::{Info, State},
util::{
check_bullet::check_bullet, check_bullet_ordered::check_bullet_ordered,
check_bullet_other::check_bullet_other, check_rule::check_rule,
},
};
use markdown::{
mdast::{List, Node},
message::Message,
};
impl Handle for List {
fn handle(
&self,
state: &mut State,
_info: &Info,
_parent: Option<&Node>,
node: &Node,
) -> Result<alloc::string::String, Message> {
state.enter(ConstructName::List);
let bullet_current = state.bullet_current;
let mut bullet = if self.ordered {
check_bullet_ordered(state)?
} else {
check_bullet(state)?
};
let bullet_other = if self.ordered {
if bullet == '.' {
')'
} else {
'.'
}
} else {
check_bullet_other(state)?
};
let mut use_different_marker = false;
if let Some(bullet_last_used) = state.bullet_last_used {
use_different_marker = bullet == bullet_last_used;
}
if !self.ordered {
let is_valid_bullet = bullet == '*' || bullet == '-';
let is_within_bounds = state.stack.len() >= 4 && state.index_stack.len() >= 3;
let first_list_item_has_no_children = !self.children.is_empty()
&& self.children[0]
.children()
.map(|inner| inner.is_empty())
.expect("There's at least one list item.");
if is_valid_bullet
&& is_within_bounds
&& first_list_item_has_no_children
&& state.stack[state.stack.len() - 1] == ConstructName::List
&& state.stack[state.stack.len() - 2] == ConstructName::ListItem
&& state.stack[state.stack.len() - 3] == ConstructName::List
&& state.stack[state.stack.len() - 4] == ConstructName::ListItem
&& state.index_stack[state.index_stack.len() - 1] == 0
&& state.index_stack[state.index_stack.len() - 2] == 0
&& state.index_stack[state.index_stack.len() - 3] == 0
{
use_different_marker = true;
}
if check_rule(state)? == bullet {
for child in self.children.iter() {
if let Some(child_children) = child.children() {
if !child_children.is_empty()
&& matches!(child, Node::ListItem(_))
&& matches!(child_children[0], Node::ThematicBreak(_))
{
use_different_marker = true;
break;
}
}
}
}
}
if use_different_marker {
bullet = bullet_other;
}
state.bullet_current = Some(bullet);
let value = state.container_flow(node)?;
state.bullet_last_used = Some(bullet);
state.bullet_current = bullet_current;
state.exit();
Ok(value)
}
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/list_item.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/handle/list-item.js
use super::Handle;
use crate::{
configure::IndentOptions,
construct_name::ConstructName,
state::{Info, State},
util::check_bullet::check_bullet,
};
use alloc::{
format,
string::{String, ToString},
};
use markdown::{
mdast::{ListItem, Node},
message::Message,
};
impl Handle for ListItem {
fn handle(
&self,
state: &mut State,
_info: &Info,
parent: Option<&Node>,
node: &Node,
) -> Result<alloc::string::String, Message> {
let list_item_indent = state.options.list_item_indent;
let mut bullet = state
.bullet_current
.unwrap_or(check_bullet(state)?)
.to_string();
if let Some(Node::List(list)) = parent {
if list.ordered {
let bullet_number = if let Some(start) = list.start {
start as usize
} else {
1
};
if state.options.increment_list_marker {
if let Some(position_node) = list.children.iter().position(|x| *x == *node) {
bullet = format!("{}{}", bullet_number + position_node, bullet);
}
} else {
bullet = format!("{}{}", bullet_number, bullet);
}
}
}
let mut size = bullet.len() + 1;
let should_compute_size = match list_item_indent {
IndentOptions::Mixed => {
if let Some(Node::List(list)) = parent {
list.spread || self.spread
} else {
self.spread
}
}
IndentOptions::Tab => true,
_ => false,
};
if should_compute_size {
size = compute_size(size);
}
state.enter(ConstructName::ListItem);
let value = state.container_flow(node)?;
let value = state.indent_lines(&value, |line, index, blank| {
if index > 0 {
if blank {
String::from(line)
} else {
let blank = " ".repeat(size);
let mut result = String::with_capacity(blank.len() + line.len());
result.push_str(&blank);
result.push_str(line);
result
}
} else if blank {
let mut result = String::with_capacity(bullet.len() + line.len());
result.push_str(&bullet);
result.push_str(line);
result
} else {
// size - bullet.len() will never panic because size > bullet.len() always.
let blank = " ".repeat(size - bullet.len());
let mut result = String::with_capacity(blank.len() + line.len() + bullet.len());
result.push_str(&bullet);
result.push_str(&blank);
result.push_str(line);
result
}
});
state.exit();
Ok(value)
}
}
fn compute_size(a: usize) -> usize {
// `a.div_ceil(4)` is `((a + 4 - 1) / 4)`
a.div_ceil(4) * 4
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/math.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-math/blob/main/lib/index.js#L204
use super::Handle;
use crate::{
construct_name::ConstructName,
state::{Info, State},
util::{longest_char_streak::longest_char_streak, safe::SafeConfig},
};
use alloc::string::String;
use markdown::{
mdast::{Math, Node},
message::Message,
};
impl Handle for Math {
fn handle(
&self,
state: &mut State,
_info: &Info,
_parent: Option<&Node>,
_node: &Node,
) -> Result<alloc::string::String, Message> {
let sequence = "$".repeat((longest_char_streak(&self.value, '$') + 1).max(2));
state.enter(ConstructName::MathFlow);
let mut value = String::new();
value.push_str(&sequence);
if let Some(meta) = &self.meta {
state.enter(ConstructName::MathFlowMeta);
value.push_str(&state.safe(meta, &SafeConfig::new(&value, "\n", Some('$'))));
state.exit();
}
value.push('\n');
if !self.value.is_empty() {
value.push_str(&self.value);
value.push('\n');
}
value.push_str(&sequence);
state.exit();
Ok(value)
}
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/mod.rs | Rust | use crate::{state::Info, State};
use alloc::string::String;
use markdown::{mdast::Node, message::Message};
mod blockquote;
mod r#break;
mod code;
mod definition;
pub mod emphasis;
mod heading;
pub mod html;
pub mod image;
pub mod image_reference;
pub mod inline_code;
pub mod inline_math;
pub mod link;
pub mod link_reference;
mod list;
mod list_item;
mod math;
mod paragraph;
mod root;
pub mod strong;
mod text;
mod thematic_break;
pub trait Handle {
fn handle(
&self,
state: &mut State,
info: &Info,
parent: Option<&Node>,
node: &Node,
) -> Result<String, Message>;
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/paragraph.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/handle/paragraph.js
use super::Handle;
use crate::{
construct_name::ConstructName,
state::{Info, State},
};
use markdown::{
mdast::{Node, Paragraph},
message::Message,
};
impl Handle for Paragraph {
fn handle(
&self,
state: &mut State,
info: &Info,
_parent: Option<&Node>,
node: &Node,
) -> Result<alloc::string::String, Message> {
state.enter(ConstructName::Paragraph);
state.enter(ConstructName::Phrasing);
let value = state.container_phrasing(node, info)?;
state.exit();
state.exit();
Ok(value)
}
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/root.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/handle/root.js
use super::Handle;
use crate::state::{Info, State};
use alloc::string::String;
use markdown::{
mdast::{Node, Root},
message::Message,
};
impl Handle for Root {
fn handle(
&self,
state: &mut State,
info: &Info,
_parent: Option<&Node>,
node: &Node,
) -> Result<String, Message> {
let has_phrasing = self.children.iter().any(phrasing);
if has_phrasing {
state.container_phrasing(node, info)
} else {
state.container_flow(node)
}
}
}
// JS: <https://github.com/syntax-tree/mdast-util-phrasing>.
fn phrasing(child: &Node) -> bool {
// Note: `html` nodes are ambiguous.
matches!(
*child,
Node::Break(_)
| Node::Emphasis(_)
| Node::Image(_)
| Node::ImageReference(_)
| Node::InlineCode(_)
| Node::InlineMath(_)
| Node::Link(_)
| Node::LinkReference(_)
| Node::Strong(_)
| Node::Text(_)
)
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/strong.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/handle/strong.js
use super::Handle;
use crate::{
construct_name::ConstructName,
state::{Info, State},
util::check_strong::check_strong,
};
use alloc::format;
use markdown::{
mdast::{Node, Strong},
message::Message,
};
impl Handle for Strong {
fn handle(
&self,
state: &mut State,
info: &Info,
_parent: Option<&Node>,
node: &Node,
) -> Result<alloc::string::String, Message> {
let marker = check_strong(state)?;
state.enter(ConstructName::Strong);
let mut value = format!(
"{}{}{}",
marker,
marker,
state.container_phrasing(node, info)?
);
value.push(marker);
value.push(marker);
state.exit();
Ok(value)
}
}
pub fn peek_strong(state: &State) -> char {
state.options.strong
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/text.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/handle/text.js
use super::Handle;
use crate::{
state::{Info, State},
util::safe::SafeConfig,
};
use markdown::{
mdast::{Node, Text},
message::Message,
};
impl Handle for Text {
fn handle(
&self,
state: &mut State,
info: &Info,
_parent: Option<&Node>,
_node: &Node,
) -> Result<alloc::string::String, Message> {
Ok(state.safe(&self.value, &SafeConfig::new(info.before, info.after, None)))
}
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/handle/thematic_break.rs | Rust | //! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/handle/thematic-break.js
use super::Handle;
use crate::{
state::{Info, State},
util::{check_rule::check_rule, check_rule_repetition::check_rule_repetition},
};
use alloc::format;
use markdown::{
mdast::{Node, ThematicBreak},
message::Message,
};
impl Handle for ThematicBreak {
fn handle(
&self,
state: &mut State,
_info: &Info,
_parent: Option<&Node>,
_node: &Node,
) -> Result<alloc::string::String, Message> {
let marker = check_rule(state)?;
let space = if state.options.rule_spaces { " " } else { "" };
let mut value =
format!("{}{}", marker, space).repeat(check_rule_repetition(state)? as usize);
if state.options.rule_spaces {
// Remove the last space.
value.pop();
Ok(value)
} else {
Ok(value)
}
}
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/lib.rs | Rust | //! API.
//!
//! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/index.js.
#![no_std]
use alloc::string::String;
pub use configure::{IndentOptions, Options};
use markdown::{mdast::Node, message::Message};
use state::{Info, State};
extern crate alloc;
mod association;
mod configure;
mod construct_name;
mod handle;
mod state;
mod r#unsafe;
mod util;
/// Turn an mdast syntax tree into markdown.
pub fn to_markdown(tree: &Node) -> Result<String, Message> {
to_markdown_with_options(tree, &Options::default())
}
/// Turn an mdast syntax tree, with options, into markdown.
pub fn to_markdown_with_options(tree: &Node, options: &Options) -> Result<String, Message> {
let mut state = State::new(options);
let mut result = state.handle(tree, &Info::new("\n", "\n"), None)?;
if !result.is_empty() {
let last_char = result.chars().last().unwrap();
if last_char != '\n' && last_char != '\r' {
result.push('\n');
}
}
Ok(result)
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/state.rs | Rust | //! State.
//!
//! JS equivalent: https://github.com/syntax-tree/mdast-util-to-markdown/blob/fd6a508/lib/types.js#L195.
use crate::{
association::Association,
construct_name::ConstructName,
handle::{
emphasis::peek_emphasis, html::peek_html, image::peek_image,
image_reference::peek_image_reference, inline_code::peek_inline_code,
inline_math::peek_inline_math, link::peek_link, link_reference::peek_link_reference,
strong::peek_strong, Handle,
},
r#unsafe::Unsafe,
util::{
format_code_as_indented::format_code_as_indented,
format_heading_as_setext::format_heading_as_setext,
pattern_in_scope::pattern_in_scope,
safe::{escape_backslashes, EscapeInfos, SafeConfig},
},
Options,
};
use alloc::{
boxed::Box,
collections::BTreeMap,
format,
string::{String, ToString},
vec::Vec,
};
use markdown::{mdast::Node, message::Message};
use regex::{Captures, Regex, RegexBuilder};
pub struct Info<'a> {
pub after: &'a str,
pub before: &'a str,
}
#[derive(Debug)]
/// Different ways to join two (container, flow) nodes.
enum Join {
/// Join the two nodes with `1` blank line.
Break,
/// Join the two nodes with an HTML comment.
HtmlComment,
/// Join the two nodes with `d` blank lines.
Lines(usize),
}
pub struct State<'a> {
pub bullet_current: Option<char>,
pub bullet_last_used: Option<char>,
pub index_stack: Vec<usize>,
pub options: &'a Options,
pub stack: Vec<ConstructName>,
pub r#unsafe: Vec<Unsafe<'a>>,
}
impl<'a> Info<'a> {
pub fn new(before: &'a str, after: &'a str) -> Self {
Info { after, before }
}
}
impl<'a> State<'a> {
/// JS equivalent: <https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/util/association.js>.
pub fn association(&self, node: &impl Association) -> String {
if node.label().is_some() || node.identifier().is_empty() {
return node.label().clone().unwrap_or_default();
}
let character_escape_or_reference =
RegexBuilder::new(r"\\([!-/:-@\[-`{-~])|&(#(?:\d{1,7}|x[\da-f]{1,6})|[\da-z]{1,31});")
.case_insensitive(true)
.build()
.unwrap();
character_escape_or_reference
.replace_all(node.identifier(), Self::decode)
.into_owned()
}
/// JS equivalent: <https://github.com/syntax-tree/mdast-util-to-markdown/blob/fd6a508/lib/util/container-flow.js#L66>.
fn between(&self, left: &Node, right: &Node, parent: &Node, results: &mut String) {
if self.options.tight_definitions {
Self::set_between(&self.tight_definition(left, right), results)
} else {
Self::set_between(&self.join_defaults(left, right, parent), results)
}
}
/// JS equivalent: <https://github.com/syntax-tree/mdast-util-to-markdown/blob/fd6a508/lib/util/compile-pattern.js>.
pub fn compile_pattern(pattern: &mut Unsafe) {
if pattern.compiled.is_none() {
let mut pattern_to_compile = String::new();
if let Some(pattern_before) = pattern.before {
pattern_to_compile.push('(');
if pattern.at_break {
pattern_to_compile.push_str("[\\r\\n][\\t ]*");
}
pattern_to_compile.push_str("(?:");
pattern_to_compile.push_str(pattern_before);
pattern_to_compile.push(')');
pattern_to_compile.push(')');
} else if pattern.at_break {
pattern_to_compile.push('(');
pattern_to_compile.push_str("[\\r\\n][\\t ]*");
pattern_to_compile.push(')');
}
if matches!(
pattern.character,
'|' | '\\'
| '{'
| '}'
| '('
| ')'
| '['
| ']'
| '^'
| '$'
| '+'
| '*'
| '?'
| '.'
| '-'
) {
pattern_to_compile.push('\\');
}
pattern_to_compile.push(pattern.character);
if let Some(pattern_after) = pattern.after {
pattern_to_compile.push_str("(?:");
pattern_to_compile.push_str(pattern_after);
pattern_to_compile.push(')');
}
pattern.set_compiled(
Regex::new(&pattern_to_compile).expect("A valid unsafe regex pattern"),
);
}
}
/// JS equivalent: <https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/util/container-flow.js>.
pub fn container_flow(&mut self, parent: &Node) -> Result<String, Message> {
let children = parent.children().expect("The node to be a flow parent.");
if children.is_empty() {
return Ok(String::new());
}
let mut results: String = String::new();
let mut children_iter = children.iter().peekable();
let mut index = 0;
self.index_stack.push(0);
while let Some(child) = children_iter.next() {
if index > 0 {
let top = self
.index_stack
.last_mut()
.expect("The stack is populated with at least one child position");
*top = index;
}
if !matches!(child, Node::List(_)) {
self.bullet_last_used = None;
}
results.push_str(&self.handle(child, &Info::new("\n", "\n"), Some(parent))?);
if let Some(next_child) = children_iter.peek() {
self.between(child, next_child, parent, &mut results);
}
index += 1;
}
self.index_stack.pop();
Ok(results)
}
/// JS equivalent: <https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/util/container-phrasing.js>.
pub fn container_phrasing(&mut self, parent: &Node, info: &Info) -> Result<String, Message> {
let children = parent
.children()
.expect("The node to be a phrasing parent.");
if children.is_empty() {
return Ok(String::new());
}
let mut results: String = String::new();
let mut index = 0;
let mut children_iter = children.iter().peekable();
self.index_stack.push(0);
while let Some(child) = children_iter.next() {
if index > 0 {
let top = self
.index_stack
.last_mut()
.expect("The stack is populated with at least one child position");
*top = index;
}
let mut new_info = Info::new(info.before, info.after);
let mut buffer = [0u8; 4];
if let Some(child) = children_iter.peek() {
if let Some(first_char) = self.peek_node(child) {
new_info.after = first_char.encode_utf8(&mut buffer);
} else {
new_info.after = self
.handle(child, &Info::new("", ""), Some(parent))?
.chars()
.nth(0)
.unwrap_or_default()
.encode_utf8(&mut buffer);
}
}
// In some cases, html (text) can be found in phrasing right after an eol.
// When we’d serialize that, in most cases that would be seen as html
// (flow).
// As we can’t escape or so to prevent it from happening, we take a somewhat
// reasonable approach: replace that eol with a space.
// See: <https://github.com/syntax-tree/mdast-util-to-markdown/issues/15>
if !results.is_empty() {
if info.before == "\r" || info.before == "\n" && matches!(child, Node::Html(_)) {
// TODO Remove this check here it might not be needed since we're
// checking for the before info.
if results.ends_with('\n') || results.ends_with('\r') {
results.pop();
if results.ends_with('\r') {
results.pop();
}
}
results.push(' ');
new_info.before = " ";
} else {
new_info.before = &results;
}
}
results.push_str(&self.handle(child, &new_info, Some(parent))?);
index += 1;
}
self.index_stack.pop();
Ok(results)
}
/// JS equvialent: <https://github.com/micromark/micromark/blob/main/packages/micromark-util-decode-string/dev/index.js>.
fn decode(caps: &Captures) -> String {
if let Some(first_cap) = caps.get(1) {
return String::from(first_cap.as_str());
}
if let Some(head) = &caps[2].chars().nth(0) {
if *head == '#' {
let radix = match caps[2].chars().nth(1) {
Some('x') | Some('X') => 16,
_ => 10,
};
let capture = &caps[2];
let numeric_encoded = if radix == 16 {
&capture[2..]
} else {
&capture[1..]
};
return markdown::decode_numeric(numeric_encoded, radix);
}
}
markdown::decode_named(&caps[2], true).unwrap_or(caps[0].to_string())
}
/// No real JS equivalent, it’s written inline.
fn encode_char(character: char) -> String {
let hex_code = u32::from(character);
format!("&#x{:X};", hex_code)
}
pub fn enter(&mut self, name: ConstructName) {
self.stack.push(name);
}
pub fn exit(&mut self) {
self.stack.pop();
}
/// No JS equivalent.
pub fn handle(
&mut self,
node: &Node,
info: &Info,
parent: Option<&Node>,
) -> Result<String, Message> {
match node {
Node::Break(r#break) => r#break.handle(self, info, parent, node),
Node::Blockquote(block_quote) => block_quote.handle(self, info, parent, node),
Node::Code(code) => code.handle(self, info, parent, node),
Node::Definition(definition) => definition.handle(self, info, parent, node),
Node::Emphasis(emphasis) => emphasis.handle(self, info, parent, node),
Node::Heading(heading) => heading.handle(self, info, parent, node),
Node::Html(html) => html.handle(self, info, parent, node),
Node::ImageReference(image_reference) => {
image_reference.handle(self, info, parent, node)
}
Node::Image(image) => image.handle(self, info, parent, node),
Node::InlineCode(inline_code) => inline_code.handle(self, info, parent, node),
Node::LinkReference(link_reference) => link_reference.handle(self, info, parent, node),
Node::Link(link) => link.handle(self, info, parent, node),
Node::ListItem(list_item) => list_item.handle(self, info, parent, node),
Node::List(list) => list.handle(self, info, parent, node),
Node::Paragraph(paragraph) => paragraph.handle(self, info, parent, node),
Node::Root(root) => root.handle(self, info, parent, node),
Node::Strong(strong) => strong.handle(self, info, parent, node),
Node::Text(text) => text.handle(self, info, parent, node),
Node::ThematicBreak(thematic_break) => thematic_break.handle(self, info, parent, node),
Node::Math(math) => math.handle(self, info, parent, node),
Node::InlineMath(inline_math) => inline_math.handle(self, info, parent, node),
_ => Err(Message {
place: None,
reason: format!("Unexpected node type `{:?}`", node),
rule_id: Box::new("unexpected-node".into()),
source: Box::new("mdast-util-to-markdown".into()),
}),
}
}
/// JS equivalent: <https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/util/indent-lines.js>.
pub fn indent_lines(&self, value: &str, map: impl Fn(&str, usize, bool) -> String) -> String {
let mut result = String::new();
let mut start = 0;
let mut line = 0;
let eol = Regex::new(r"\r?\n|\r").unwrap();
for m in eol.captures_iter(value) {
let full_match = m.get(0).unwrap();
let value_slice = &value[start..full_match.start()];
result.push_str(&map(value_slice, line, value_slice.is_empty()));
result.push_str(full_match.as_str());
start = full_match.start() + full_match.len();
line += 1;
}
result.push_str(&map(&value[start..], line, value.is_empty()));
result
}
/// No real JS equivalent, but see:
/// <https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/join.js>.
fn join_defaults(&self, left: &Node, right: &Node, parent: &Node) -> Join {
if let Node::Code(code) = right {
if format_code_as_indented(code, self) && matches!(left, Node::List(_)) {
return Join::HtmlComment;
}
if let Node::Code(code) = left {
if format_code_as_indented(code, self) {
return Join::HtmlComment;
}
}
}
if matches!(parent, Node::ListItem(_) | Node::List(_)) {
if matches!(left, Node::Paragraph(_)) {
if matches!(right, Node::Paragraph(_)) {
return Join::Break;
}
if matches!(right, Node::Definition(_)) {
return Join::Break;
}
if let Node::Heading(heading) = right {
if format_heading_as_setext(heading, self) {
return Join::Break;
}
}
}
let spread = if let Node::List(list) = parent {
list.spread
} else if let Node::ListItem(list_item) = parent {
list_item.spread
} else {
false
};
if spread {
return Join::Lines(1);
}
return Join::Lines(0);
}
Join::Break
}
/// No JS equivalent.
pub fn new(options: &'a Options) -> Self {
State {
bullet_current: None,
bullet_last_used: None,
index_stack: Vec::new(),
options,
stack: Vec::new(),
r#unsafe: Unsafe::get_default_unsafe(options),
}
}
/// No JS equivalent.
fn peek_node(&self, node: &Node) -> Option<char> {
match node {
Node::Emphasis(_) => Some(peek_emphasis(self)),
Node::Html(_) => Some(peek_html()),
Node::ImageReference(_) => Some(peek_image_reference()),
Node::Image(_) => Some(peek_image()),
Node::InlineCode(_) => Some(peek_inline_code()),
Node::LinkReference(_) => Some(peek_link_reference()),
Node::Link(link) => Some(peek_link(link, node, self)),
Node::Strong(_) => Some(peek_strong(self)),
Node::InlineMath(_) => Some(peek_inline_math()),
_ => None,
}
}
/// JS equivalent: <https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/util/safe.js>.
pub fn safe(&mut self, input: &str, config: &SafeConfig) -> String {
let value = format!("{}{}{}", config.before, input, config.after);
let mut positions: Vec<usize> = Vec::new();
let mut result: String = String::new();
let mut infos: BTreeMap<usize, EscapeInfos> = BTreeMap::new();
for pattern in &mut self.r#unsafe {
if !pattern_in_scope(&self.stack, pattern) {
continue;
}
Self::compile_pattern(pattern);
if let Some(regex) = &pattern.compiled {
for m in regex.captures_iter(&value) {
let full_match = m.get(0).expect("Guaranteed to have a match");
let captured_group_len = m
.get(1)
.map(|captured_group| captured_group.len())
.unwrap_or(0);
let before = pattern.before.is_some() || pattern.at_break;
let after = pattern.after.is_some();
let position = full_match.start() + if before { captured_group_len } else { 0 };
if positions.contains(&position) {
if let Some(entry) = infos.get_mut(&position) {
if entry.before && !before {
entry.before = false;
}
if entry.after && !after {
entry.after = false;
}
}
} else {
infos.insert(position, EscapeInfos { after, before });
positions.push(position);
}
}
}
}
positions.sort_unstable();
let mut start = config.before.len();
let end = value.len() - config.after.len();
for (index, position) in positions.iter().enumerate() {
if *position < start || *position >= end {
continue;
}
// If this character is supposed to be escaped because it has a condition on
// the next character, and the next character is definitly being escaped,
// then skip this escape.
// This will never panic because the bounds are properly checked, and we
// guarantee that the positions are already keys in the `infos` map before this
// point in execution.
if index + 1 < positions.len()
&& position + 1 < end
&& positions[index + 1] == position + 1
&& infos[position].after
&& !infos[&(position + 1)].before
&& !infos[&(position + 1)].after
|| index > 0
&& positions[index - 1] == position - 1
&& infos[position].before
&& !infos[&(position - 1)].before
&& !infos[&(position - 1)].after
{
continue;
}
if start != *position {
result.push_str(&escape_backslashes(&value[start..*position], r"\"));
}
start = *position;
let char_at_pos = value.chars().nth(*position);
match char_at_pos {
Some('!'..='/') | Some(':'..='@') | Some('['..='`') | Some('{'..='~') => {
if let Some(encode) = &config.encode {
let character = char_at_pos.expect("To be a valid char");
if *encode != character {
result.push('\\');
} else {
let encoded_char = Self::encode_char(character);
result.push_str(&encoded_char);
start += character.len_utf8();
}
} else {
result.push('\\');
}
}
Some(character) => {
let encoded_char = Self::encode_char(character);
result.push_str(&encoded_char);
start += character.len_utf8();
}
_ => (),
};
}
// Some of the operations above seem to end up right in a utf8 boundary
// (see GH-170 for more info).
// Move back.
while !value.is_char_boundary(start) {
start -= 1;
}
result.push_str(&escape_backslashes(&value[start..end], config.after));
result
}
/// No real JS equivalent, but see:
/// <https://github.com/syntax-tree/mdast-util-to-markdown/blob/fd6a508/lib/util/container-flow.js#L66>.
fn set_between(join: &Join, results: &mut String) {
if let Join::Break = join {
results.push_str("\n\n");
} else if let Join::Lines(n) = join {
if *n == 1 {
results.push_str("\n\n");
return;
}
results.push_str("\n".repeat(1 + n).as_ref());
} else if let Join::HtmlComment = join {
results.push_str("\n\n<!---->\n\n");
}
}
/// No real JS equivalent, but see:
/// <https://github.com/syntax-tree/mdast-util-to-markdown/blob/fd6a508/lib/util/container-flow.js#L66>.
fn tight_definition(&self, left: &Node, right: &Node) -> Join {
if matches!(left, Node::Definition(_)) && matches!(right, Node::Definition(_)) {
return Join::Lines(0);
}
Join::Break
}
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/unsafe.rs | Rust | //! Unsafe patterns.
//!
//! JS equivalent: <https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/unsafe.js>.
//! Also: <https://github.com/syntax-tree/mdast-util-to-markdown/blob/fd6a508/lib/types.js#L287-L305>.
use crate::{construct_name::ConstructName, Options};
use alloc::{vec, vec::Vec};
use regex::Regex;
#[derive(Default)]
pub struct Unsafe<'a> {
pub after: Option<&'a str>,
pub at_break: bool,
pub before: Option<&'a str>,
pub character: char,
pub(crate) compiled: Option<Regex>,
pub in_construct: Vec<ConstructName>,
pub not_in_construct: Vec<ConstructName>,
}
impl<'a> Unsafe<'a> {
pub fn new(
character: char,
before: Option<&'a str>,
after: Option<&'a str>,
in_construct: Vec<ConstructName>,
not_in_construct: Vec<ConstructName>,
at_break: bool,
) -> Self {
Unsafe {
after,
at_break,
before,
character,
compiled: None,
in_construct,
not_in_construct,
}
}
pub fn get_default_unsafe(options: &Options) -> Vec<Self> {
let full_phrasing_spans = vec![
ConstructName::Autolink,
ConstructName::DestinationLiteral,
ConstructName::DestinationRaw,
ConstructName::Reference,
ConstructName::TitleApostrophe,
ConstructName::TitleQuote,
];
vec![
Self::new(
'\t',
None,
"[\\r\\n]".into(),
vec![ConstructName::Phrasing],
vec![],
false,
),
Self::new(
'\t',
"[\\r\\n]".into(),
None,
vec![ConstructName::Phrasing],
vec![],
false,
),
Self::new(
'\t',
None,
None,
vec![
ConstructName::CodeFencedLangGraveAccent,
ConstructName::CodeFencedLangTilde,
],
vec![],
false,
),
Self::new(
'\r',
None,
None,
vec![
ConstructName::CodeFencedLangGraveAccent,
ConstructName::CodeFencedLangTilde,
ConstructName::CodeFencedMetaGraveAccent,
ConstructName::CodeFencedMetaTilde,
ConstructName::DestinationLiteral,
ConstructName::HeadingAtx,
ConstructName::MathFlowMeta,
],
vec![],
false,
),
Self::new(
'\n',
None,
None,
vec![
ConstructName::CodeFencedLangGraveAccent,
ConstructName::CodeFencedLangTilde,
ConstructName::CodeFencedMetaGraveAccent,
ConstructName::CodeFencedMetaTilde,
ConstructName::DestinationLiteral,
ConstructName::HeadingAtx,
ConstructName::MathFlowMeta,
],
vec![],
false,
),
Self::new(
' ',
None,
"[\\r\\n]".into(),
vec![ConstructName::Phrasing],
vec![],
false,
),
Self::new(
' ',
"[\\r\\n]".into(),
None,
vec![ConstructName::Phrasing],
vec![],
false,
),
Self::new(
' ',
None,
None,
vec![
ConstructName::CodeFencedLangGraveAccent,
ConstructName::CodeFencedLangTilde,
],
vec![],
false,
),
Self::new(
'!',
None,
"\\[".into(),
vec![ConstructName::Phrasing],
full_phrasing_spans.clone(),
false,
),
Self::new(
'\"',
None,
None,
vec![ConstructName::TitleQuote],
vec![],
false,
),
Self::new('#', None, None, vec![], vec![], true),
Self::new(
'#',
None,
"(?:[\r\n]|$)".into(),
vec![ConstructName::HeadingAtx],
vec![],
false,
),
Self::new(
'&',
None,
"[#A-Za-z]".into(),
vec![ConstructName::Phrasing],
vec![],
false,
),
Self::new(
'\'',
None,
None,
vec![ConstructName::TitleApostrophe],
vec![],
false,
),
Self::new(
'(',
None,
None,
vec![ConstructName::DestinationRaw],
vec![],
false,
),
Self::new(
'(',
"\\]".into(),
None,
vec![ConstructName::Phrasing],
full_phrasing_spans.clone(),
false,
),
Self::new(')', "\\d+".into(), None, vec![], vec![], true),
Self::new(
')',
None,
None,
vec![ConstructName::DestinationRaw],
vec![],
false,
),
Self::new('*', None, "(?:[ \t\r\n*])".into(), vec![], vec![], true),
Self::new(
'*',
None,
None,
vec![ConstructName::Phrasing],
full_phrasing_spans.clone(),
false,
),
Self::new('+', None, "(?:[ \t\r\n])".into(), vec![], vec![], true),
Self::new('-', None, "(?:[ \t\r\n-])".into(), vec![], vec![], true),
Self::new(
'.',
"\\d+".into(),
"(?:[ \t\r\n]|$)".into(),
vec![],
vec![],
true,
),
Self::new('<', None, "[!/?A-Za-z]".into(), vec![], vec![], true),
Self::new(
'<',
None,
"[!/?A-Za-z]".into(),
vec![ConstructName::Phrasing],
full_phrasing_spans.clone(),
false,
),
Self::new(
'<',
None,
None,
vec![ConstructName::DestinationLiteral],
vec![],
false,
),
Self::new('=', None, None, vec![], vec![], true),
Self::new('>', None, None, vec![], vec![], true),
Self::new(
'>',
None,
None,
vec![ConstructName::DestinationLiteral],
vec![],
false,
),
Self::new('[', None, None, vec![], vec![], true),
Self::new(
'[',
None,
None,
vec![ConstructName::Phrasing],
full_phrasing_spans.clone(),
false,
),
Self::new(
'[',
None,
None,
vec![ConstructName::Label, ConstructName::Reference],
vec![],
false,
),
Self::new(
'\\',
None,
"[\\r\\n]".into(),
vec![ConstructName::Phrasing],
vec![],
false,
),
Self::new(
']',
None,
None,
vec![ConstructName::Label, ConstructName::Reference],
vec![],
false,
),
Self::new('_', None, None, vec![], vec![], true),
Self::new(
'_',
None,
None,
vec![ConstructName::Phrasing],
full_phrasing_spans.clone(),
false,
),
Self::new('`', None, None, vec![], vec![], true),
Self::new(
'`',
None,
None,
vec![
ConstructName::CodeFencedLangGraveAccent,
ConstructName::CodeFencedMetaGraveAccent,
],
vec![],
false,
),
Self::new(
'`',
None,
None,
vec![ConstructName::Phrasing],
full_phrasing_spans.clone(),
false,
),
Self::new('~', None, None, vec![], vec![], true),
Self::new(
'$',
None,
if options.single_dollar_text_math {
None
} else {
"\\$".into()
},
vec![ConstructName::Phrasing],
vec![],
false,
),
Self::new(
'$',
None,
None,
vec![ConstructName::MathFlowMeta],
vec![],
false,
),
Self::new('$', None, "\\$".into(), vec![], vec![], true),
]
}
pub(crate) fn set_compiled(&mut self, regex_pattern: Regex) {
self.compiled = Some(regex_pattern);
}
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/util/check_bullet.rs | Rust | //! JS equivalent https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/util/check-bullet.js
use crate::state::State;
use alloc::{boxed::Box, format};
use markdown::message::Message;
pub fn check_bullet(state: &mut State) -> Result<char, Message> {
let marker = state.options.bullet;
if marker != '*' && marker != '+' && marker != '-' {
return Err(Message {
place: None,
reason: format!(
"Cannot serialize items with `{}` for `options.bullet`, expected `*`, `+`, or `-`",
marker
),
rule_id: Box::new("unexpected-marker".into()),
source: Box::new("mdast-util-to-markdown".into()),
});
}
Ok(marker)
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/util/check_bullet_ordered.rs | Rust | //! JS equivalent https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/util/check-bullet-ordered.js
use crate::state::State;
use alloc::{boxed::Box, format};
use markdown::message::Message;
pub fn check_bullet_ordered(state: &mut State) -> Result<char, Message> {
let marker = state.options.bullet_ordered;
if marker != '.' && marker != ')' {
return Err(Message {
place: None,
reason: format!(
"Cannot serialize items with `{}` for `options.bullet_ordered`, expected `.` or `)`",
marker
),
rule_id: Box::new("unexpected-marker".into()),
source: Box::new("mdast-util-to-markdown".into()),
});
}
Ok(marker)
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/util/check_bullet_other.rs | Rust | //! JS equivalent https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/util/check-bullet-other.js
use super::check_bullet::check_bullet;
use crate::state::State;
use alloc::{boxed::Box, format};
use markdown::message::Message;
pub fn check_bullet_other(state: &mut State) -> Result<char, Message> {
let bullet = check_bullet(state)?;
let mut bullet_other = state.options.bullet_other;
if bullet != '*' {
bullet_other = '*';
}
if bullet_other != '*' && bullet_other != '+' && bullet_other != '-' {
return Err(Message {
place: None,
reason: format!(
"Cannot serialize items with `{}` for `options.bullet_other`, expected `*`, `+`, or `-`",
bullet_other
),
rule_id: Box::new("unexpected-marker".into()),
source: Box::new("mdast-util-to-markdown".into()),
});
}
if bullet_other == bullet {
return Err(Message {
place: None,
reason: format!(
"Expected `bullet` (`{}`) and `bullet_other` (`{}`) to be different",
bullet, bullet_other
),
rule_id: Box::new("bullet-match-bullet_other".into()),
source: Box::new("mdast-util-to-markdown".into()),
});
}
Ok(bullet_other)
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/util/check_emphasis.rs | Rust | //! JS equivalent https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/util/check-emphasis.js
use crate::state::State;
use alloc::{boxed::Box, format};
use markdown::message::Message;
pub fn check_emphasis(state: &State) -> Result<char, Message> {
let marker = state.options.emphasis;
if marker != '*' && marker != '_' {
return Err(Message {
place: None,
reason: format!(
"Cannot serialize emphasis with `{}` for `options.emphasis`, expected `*`, or `_`",
marker
),
rule_id: Box::new("unexpected-marker".into()),
source: Box::new("mdast-util-to-markdown".into()),
});
}
Ok(marker)
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/util/check_fence.rs | Rust | //! JS equivalent https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/util/check-fence.js
use crate::state::State;
use alloc::{boxed::Box, format};
use markdown::message::Message;
pub fn check_fence(state: &mut State) -> Result<char, Message> {
let marker = state.options.fence;
if marker != '`' && marker != '~' {
return Err(Message {
place: None,
reason: format!(
"Cannot serialize code with `{}` for `options.fence`, expected `` ` `` or `~`",
marker
),
rule_id: Box::new("unexpected-marker".into()),
source: Box::new("mdast-util-to-markdown".into()),
});
}
Ok(marker)
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/util/check_quote.rs | Rust | //! JS equivalent https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/util/check-quote.js
use crate::state::State;
use alloc::{boxed::Box, format};
use markdown::message::Message;
pub fn check_quote(state: &State) -> Result<char, Message> {
let marker = state.options.quote;
if marker != '"' && marker != '\'' {
return Err(Message {
place: None,
reason: format!(
"Cannot serialize title with `{}` for `options.quote`, expected `\"`, or `'`",
marker
),
rule_id: Box::new("unexpected-marker".into()),
source: Box::new("mdast-util-to-markdown".into()),
});
}
Ok(marker)
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/util/check_rule.rs | Rust | //! JS equivalent https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/util/check-rule.js
use crate::state::State;
use alloc::{boxed::Box, format};
use markdown::message::Message;
pub fn check_rule(state: &State) -> Result<char, Message> {
let marker = state.options.rule;
if marker != '*' && marker != '-' && marker != '_' {
return Err(Message {
place: None,
reason: format!(
"Cannot serialize rules with `{}` for `options.rule`, expected `*`, `-`, or `_`",
marker
),
rule_id: Box::new("unexpected-marker".into()),
source: Box::new("mdast-util-to-markdown".into()),
});
}
Ok(marker)
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/util/check_rule_repetition.rs | Rust | //! JS equivalent https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/util/check-rule-repetition.js
use crate::state::State;
use alloc::{boxed::Box, format};
use markdown::message::Message;
pub fn check_rule_repetition(state: &State) -> Result<u32, Message> {
let repetition = state.options.rule_repetition;
if repetition < 3 {
return Err(Message {
place: None,
reason: format!(
"Cannot serialize rules with repetition `{}` for `options.rule_repetition`, expected `3` or more",
repetition
),
rule_id: Box::new("unexpected-marker".into()),
source: Box::new("mdast-util-to-markdown".into()),
});
}
Ok(repetition)
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus | |
mdast_util_to_markdown/src/util/check_strong.rs | Rust | //! JS equivalent https://github.com/syntax-tree/mdast-util-to-markdown/blob/main/lib/util/check-strong.js
use crate::state::State;
use alloc::{boxed::Box, format};
use markdown::message::Message;
pub fn check_strong(state: &State) -> Result<char, Message> {
let marker = state.options.strong;
if marker != '*' && marker != '_' {
return Err(Message {
place: None,
reason: format!(
"Cannot serialize strong with `{}` for `options.strong`, expected `*`, or `_`",
marker
),
rule_id: Box::new("unexpected-marker".into()),
source: Box::new("mdast-util-to-markdown".into()),
});
}
Ok(marker)
}
| wooorm/markdown-rs | 1,459 | CommonMark compliant markdown parser in Rust with ASTs and extensions | Rust | wooorm | Titus |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.