repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/tasks/benchmark/src/main.rs | tasks/benchmark/src/main.rs | extern crate criterion;
extern crate mdx_rs;
extern crate pico_args;
use criterion::{BenchmarkId, Criterion, Throughput};
use mdx_rs::compile;
use pico_args::Arguments;
use std::fs::File;
use std::io::prelude::*;
pub fn main() {
let mut args = Arguments::from_env();
let baseline: Option<String> = args.opt_value_from_str("--save-baseline").unwrap();
let mut criterion = Criterion::default().without_plots();
if let Some(baseline) = baseline {
criterion = criterion.save_baseline(baseline.to_string());
}
// Define the path to the file
let path: &str = "benches/fixtures/example.md";
// Open the file in read-only mode
let mut file = File::open(path).expect("Could not open file");
// Create a new String to hold the file contents
let mut contents = String::new();
file
.read_to_string(&mut contents)
.expect("Could not read file");
let mut group = criterion.benchmark_group("mdx_rs");
group.throughput(Throughput::Bytes(contents.len() as u64));
group.bench_with_input(
BenchmarkId::from_parameter("example.md"),
&contents,
|b, source_text| {
b.iter_with_large_drop(|| {
compile(source_text, "", false, "");
})
},
);
group.finish();
drop(criterion);
}
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | false |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/plugin_html/src/lib.rs | crates/plugin_html/src/lib.rs | //! Author: sanyuan0704
//!
//! This plugin is used to serialize hast to html.
use hast::Node;
fn display_property_value(value: &hast::PropertyValue) -> String {
match value {
hast::PropertyValue::String(value) => value.clone(),
hast::PropertyValue::Boolean(value) => value.to_string(),
_ => "".to_string(),
}
}
pub fn mdx_plugin_html_impl(node: &Node) -> String {
match node {
Node::Element(element) => {
if element.tag_name == "script" || element.tag_name == "style" {
return "".to_string();
}
let mut html = String::new();
html.push_str(&format!("<{}", element.tag_name));
for (key, value) in &element.properties {
// skip className
if key == "className" {
continue;
}
html.push_str(&format!(" {}=\"{}\"", key, display_property_value(value)));
}
html.push('>');
for child in &element.children {
html.push_str(&mdx_plugin_html_impl(child));
}
html.push_str(&format!("</{}>", element.tag_name));
html
}
Node::Text(text) => text.value.clone(),
Node::Comment(_) => "".to_string(),
Node::Root(root) => {
let mut html = String::new();
for child in &root.children {
html.push_str(&mdx_plugin_html_impl(child));
}
html
}
_ => "".to_string(),
}
}
pub fn mdx_plugin_html(node: &Node) -> String {
mdx_plugin_html_impl(node)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_serialize_hast_to_html() {
let text = Node::Text(hast::Text {
value: "hello world".to_string(),
position: None,
});
let element = Node::Element(hast::Element {
tag_name: "div".to_string(),
properties: vec![(
"a".to_string(),
hast::PropertyValue::String("1".to_string()),
)],
children: vec![text],
position: None,
});
let root = Node::Root(hast::Root {
children: vec![element],
position: None,
});
let html = mdx_plugin_html(&root);
assert_eq!(html, "<div a=\"1\">hello world</div>");
}
}
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | false |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/slugger/src/lib.rs | crates/slugger/src/lib.rs | //! You can understand the module as `github-slugger-rs`
//!
//! Author: sanyuan0704
//!
//! Port of <https://github.com/Flet/github-slugger>
use lazy_static::lazy_static;
use regex::Regex;
use std::collections::HashMap;
pub struct Slugger {
occurrences: HashMap<String, i32>,
}
lazy_static! {
static ref REMOVE_RE: Regex = Regex::new(r"[\p{Other_Number}\p{Close_Punctuation}\p{Final_Punctuation}\p{Initial_Punctuation}\p{Open_Punctuation}\p{Other_Punctuation}\p{Dash_Punctuation}\p{Symbol}\p{Control}\p{Private_Use}\p{Format}\p{Unassigned}\p{Separator}]").unwrap();
}
fn normalize_slug(value: &str) -> String {
let s = REMOVE_RE.replace_all(value, |caps: ®ex::Captures| {
let c = caps.get(0).unwrap().as_str();
if c == " " || c == "-" {
"-".to_string()
} else if c.chars().all(|a| a.is_alphabetic()) {
c.to_string()
} else {
"".to_string()
}
});
s.replace(|c: char| c.is_whitespace(), "-")
}
impl Default for Slugger {
fn default() -> Self {
Self::new()
}
}
impl Slugger {
/**
* Create a new slug class.
*/
pub fn new() -> Self {
Slugger {
occurrences: HashMap::new(),
}
}
/**
* Generate a unique slug.
*
* Tracks previously generated slugs: repeated calls with the same value
* will result in different slugs.
* Use the `slug` function to get same slugs.
*/
pub fn slug(&mut self, value: &str, maintain_case: bool) -> String {
let mut result = if maintain_case {
value.to_owned()
} else {
value.to_lowercase()
};
// Normalize the slug and use it as the base for counting
result = normalize_slug(&result).to_string();
let original_slug = result.clone();
while self.occurrences.contains_key(&result) {
let count = self.occurrences.get_mut(&original_slug).unwrap();
*count += 1;
result = format!("{}-{}", &original_slug, count);
}
self.occurrences.insert(result.clone(), 0);
result
}
/**
* Reset - Forget all previous slugs
*
* @return ()
*/
pub fn reset(&mut self) {
self.occurrences.clear();
}
}
/**
* Generate a slug.
*
* Does not track previously generated slugs: repeated calls with the same value
* will result in the exact same slug.
* Use the `GithubSlugger` class to get unique slugs.
*
* @param {String} value
* String of text to slugify
* @param {bool} [maintain_case=false]
* Keep the current case, otherwise make all lowercase
* @return {String}
* A unique slug string
*/
pub fn slug(value: &str, maintain_case: bool) -> String {
let result = if maintain_case {
value.to_owned()
} else {
value.to_lowercase()
};
normalize_slug(&result).to_string()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_slugger() {
let mut slugger = Slugger::new();
assert_eq!(slugger.slug("Hello World", false), "hello-world");
assert_eq!(slugger.slug("Hello World", false), "hello-world-1");
assert_eq!(slugger.slug("Hello World", false), "hello-world-2");
}
#[test]
fn test_slugger_maintain_case() {
let mut slugger = Slugger::new();
assert_eq!(slugger.slug("Hello World", true), "Hello-World");
assert_eq!(slugger.slug("Hello World", true), "Hello-World-1");
assert_eq!(slugger.slug("Hello World", true), "Hello-World-2");
}
#[test]
fn test_slugger_reset() {
let mut slugger = Slugger::new();
assert_eq!(slugger.slug("Hello World", false), "hello-world");
assert_eq!(slugger.slug("Hello World", false), "hello-world-1");
slugger.reset();
assert_eq!(slugger.slug("Hello World", false), "hello-world");
}
#[test]
fn test_slug() {
assert_eq!(slug("Hello World", false), "hello-world");
assert_eq!(slug("Hello World", false), "hello-world");
assert_eq!(slug("`Hello` **World**", false), "hello-world");
assert_eq!(slug("export 'function'", false), "export-function");
}
#[test]
fn test_slugger_with_similar_inputs() {
let mut slugger = Slugger::new();
assert_eq!(slugger.slug("inline", false), "inline");
assert_eq!(slugger.slug("**inline**", false), "inline-1");
assert_eq!(slugger.slug("*inline*", false), "inline-2");
assert_eq!(slugger.slug("Inline", false), "inline-3");
assert_eq!(slugger.slug("inline", true), "inline-4");
assert_eq!(slugger.slug("Inline", true), "Inline");
}
}
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | false |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/plugin_highlighter/src/lib.rs | crates/plugin_highlighter/src/lib.rs | //! Author: shulaoda
//!
//! This plugin is used to collect code lang in mdx.
use markdown::mdast::Node;
use std::collections::HashSet;
pub fn mdx_plugin_highlighter(node: &Node) -> Vec<String> {
let mut languages: HashSet<String> = HashSet::new();
if let Node::Root(root) = node {
for child in &root.children {
if let Node::Code(code) = child {
if let Some(lang) = &code.lang {
languages.insert(lang.clone());
}
}
}
}
languages.into_iter().collect::<Vec<String>>()
}
#[cfg(test)]
mod tests {
use super::*;
use markdown::mdast;
#[test]
fn test_mdx_plugin_highlighter() {
let code = mdast::Node::Root(mdast::Root {
children: vec![mdast::Node::Code(mdast::Code {
lang: Some("markdown".into()),
meta: None,
value: "".into(),
position: None,
})],
position: None,
});
assert_eq!(mdx_plugin_highlighter(&code), vec!["markdown".to_string()]);
}
}
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | false |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/plugin_normalize_link/src/lib.rs | crates/plugin_normalize_link/src/lib.rs | #![allow(clippy::format_in_format_args)]
use std::path::Path;
const PROTOCOLS: &[&str] = &["http://", "https://", "mailto:", "tel:", "javascript:", "#"];
const TEMP_VARIABLE: &str = "image_";
const IMAGE_EXTNAMES: &[&str] = &[".png", ".jpg", ".jpeg", ".gif", ".svg", ".webp"];
const MD_EXTNAMES: &[&str] = &[".md", ".mdx"];
fn generate_ast_import(index: usize, root: &str, src: &str, filepath: &str) -> hast::MdxjsEsm {
let mut import_path = src.to_string();
if import_path.starts_with('.') {
import_path = normalize_link(src, root, filepath);
}
hast::MdxjsEsm {
value: format!(
"import {} from \"{}\";",
format!("{}{}", TEMP_VARIABLE, index),
import_path
),
position: None,
stops: vec![],
}
}
fn normalize_link(url: &str, root: &str, filepath: &str) -> String {
// If url includes following case, return directly
// http/https、mailto、tel、javascript、#
if PROTOCOLS.iter().any(|protocol| url.starts_with(protocol)) {
return url.to_owned();
}
let raw_url = url.to_string();
// parse extname and remove it
let mut url = url.to_string();
let root_path = Path::new(root);
let file_path = Path::new(filepath);
// find the extname(before hash)
// first, find the hash
let hash_index = url.rfind('#').unwrap_or(url.len());
// then, find the extname
let extname = match url[..hash_index].rfind('.') {
Some(index) => url[index..hash_index].to_string().to_lowercase(),
None => "".to_string(),
};
let is_image = IMAGE_EXTNAMES.contains(&extname.as_str());
let is_md = MD_EXTNAMES.contains(&extname.as_str());
if let Ok(relative_path) = file_path.strip_prefix(root_path) {
if url.starts_with('.') {
// If the url is a image and relative path, return directly
if is_image {
return url;
}
let mut base_dir = relative_path.parent().unwrap();
if url.starts_with("./") {
url = url.replace("./", "");
}
while url.starts_with("../") {
// only replace the first ../
url.replace_range(0..3, "");
match base_dir.parent() {
Some(parent) => base_dir = parent,
None => {
println!(
"Warning: The link is invalid: {} because the target path is out of the root dir: {}",
raw_url, root
);
break;
}
}
}
url = base_dir.join(Path::new(&url)).to_str().unwrap().to_string();
}
// remove the starting slash
if url.starts_with('/') {
url = url[1..].to_string();
}
// ensure the url starts with /
if !url.starts_with('/') {
url = format!("/{}", url);
}
// remove md and mdx extname
if !extname.is_empty() && is_md {
url = url.replace(&extname, "");
}
}
// Replace all the \\ to / in windows
url.replace('\\', "/")
}
fn mdx_plugin_normalize_link_impl(
node: &mut hast::Node,
root: &str,
filepath: &str,
images: &mut Vec<hast::MdxjsEsm>,
) -> Vec<String> {
let mut links = vec![];
match node {
hast::Node::Root(root_node) => {
for child in root_node.children.iter_mut() {
links.append(&mut mdx_plugin_normalize_link_impl(
child, root, filepath, images,
));
}
}
hast::Node::Element(element) => {
if element.tag_name == "a" {
// Get the href property
let href = element.properties.iter().find(|(key, _)| key == "href");
if let Some((_, hast::PropertyValue::String(href))) = href {
let normalized_link = normalize_link(href, root, filepath);
links.push(normalized_link.clone());
// replace the href property
element
.properties
.iter_mut()
.find(|(key, _)| key == "href")
.unwrap()
.1 = hast::PropertyValue::String(normalized_link);
}
}
if element.tag_name == "img" {
// Get the src and alt property
let src = element.properties.iter().find(|(key, _)| key == "src");
let alt = element.properties.iter().find(|(key, _)| key == "alt");
// Then we will generate a mdxjsEsm node to import the image and push it into images
if let Some((_, hast::PropertyValue::String(src))) = src {
if PROTOCOLS.iter().any(|protocol| src.starts_with(protocol)) || src.starts_with('/') {
return links;
}
let index = images.len();
images.push(generate_ast_import(index, root, src, filepath));
// Here we have to transform the element type to MdxJsxElement instead of replace src property
// because the hast parser will parse the src property as hast::PropertyValue::String
// and we can't get the original value
let new_node = hast::Node::MdxJsxElement(hast::MdxJsxElement {
name: Some("img".to_string()),
attributes: vec![
hast::AttributeContent::Property(hast::MdxJsxAttribute {
name: "src".to_string(),
value: Some(hast::AttributeValue::Expression(
markdown::mdast::AttributeValueExpression {
value: format!("{}{}", TEMP_VARIABLE, index),
stops: vec![],
},
)),
}),
hast::AttributeContent::Property(hast::MdxJsxAttribute {
name: "alt".to_string(),
value: alt.map(|(_, value)| match value {
hast::PropertyValue::String(v) => hast::AttributeValue::Literal(v.to_string()),
_ => hast::AttributeValue::Literal("".to_string()),
}),
}),
],
children: element.children.clone(),
position: None,
});
*node = new_node;
}
}
if let Some(children) = node.children_mut() {
for child in children {
links.append(&mut mdx_plugin_normalize_link_impl(
child, root, filepath, images,
));
}
}
}
hast::Node::MdxJsxElement(element) => {
if let Some(name) = &element.name {
if name != "img" {
return links;
}
// Get the src property
let src: Option<&mut hast::AttributeContent> =
element.attributes.iter_mut().find(|attr| match attr {
hast::AttributeContent::Property(property) => property.name == "src",
_ => false,
});
// Add import statement and replace the src property
if let Some(hast::AttributeContent::Property(property)) = src {
if let Some(hast::AttributeValue::Literal(value)) = &mut property.value {
if PROTOCOLS.iter().any(|protocol| value.starts_with(protocol))
|| value.starts_with('/')
{
return links;
}
let index = images.len();
images.push(generate_ast_import(index, root, value, filepath));
property.value = Some(hast::AttributeValue::Expression(
markdown::mdast::AttributeValueExpression {
value: format!("{}{}", TEMP_VARIABLE, index),
stops: vec![],
},
));
}
}
}
}
_ => {}
}
links
}
pub fn mdx_plugin_normalize_link(node: &mut hast::Node, root: &str, filepath: &str) -> Vec<String> {
let mut images: Vec<hast::MdxjsEsm> = vec![];
let links = mdx_plugin_normalize_link_impl(node, root, filepath, &mut images);
if let hast::Node::Root(root) = node {
// insert the images into the front of root node children
for image in images {
root.children.insert(0, hast::Node::MdxjsEsm(image));
}
}
links
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_external_link() {
let root = "/Users/xxx/xxx/xxx/docs".to_string();
let filepath = "/Users/xxx/xxx/xxx/docs/zh/guide/config.md".to_string();
assert_eq!(
normalize_link("http://example.com", &root, &filepath),
"http://example.com".to_string()
);
assert_eq!(
normalize_link("https://example.com", &root, &filepath),
"https://example.com".to_string()
);
assert_eq!(
normalize_link("mailto:xxx.com", &root, &filepath),
"mailto:xxx.com".to_string()
);
assert_eq!(
normalize_link("tel:xxx.com", &root, &filepath),
"tel:xxx.com".to_string()
);
assert_eq!(
normalize_link("javascript:void(0)", &root, &filepath,),
"javascript:void(0)".to_string()
);
assert_eq!(normalize_link("#aaa", &root, &filepath), "#aaa".to_string());
}
#[test]
fn test_relative_link() {
let root = "/Users/xxx/xxx/xxx/docs".to_string();
let filepath = "/Users/xxx/xxx/xxx/docs/zh/guide/config.md".to_string();
assert_eq!(
normalize_link("./guide/config.md", &root, &filepath),
"/zh/guide/guide/config".to_string()
);
assert_eq!(
normalize_link("../guide/config.md", &root, &filepath),
"/zh/guide/config".to_string()
);
assert_eq!(
normalize_link("../../guide/config.md", &root, &filepath),
"/guide/config".to_string()
);
}
#[test]
fn test_link_with_hash() {
let root = "/Users/xxx/xxx/xxx/docs".to_string();
let filepath = "/Users/xxx/xxx/xxx/docs/zh/guide/config.md".to_string();
assert_eq!(
normalize_link("./guide/config.html#aaa", &root, &filepath),
"/zh/guide/guide/config.html#aaa".to_string()
);
assert_eq!(
normalize_link("./guide/config.html#tools.aaa", &root, &filepath),
"/zh/guide/guide/config.html#tools.aaa".to_string()
);
}
#[test]
fn test_absolute_link() {
let root = "/Users/xxx/xxx/xxx/docs".to_string();
let filepath = "/Users/xxx/xxx/xxx/docs/zh/guide/config.md".to_string();
assert_eq!(
normalize_link("/zh/guide/config.md", &root, &filepath),
"/zh/guide/config".to_string()
);
assert_eq!(
normalize_link("/zh/guide/config", &root, &filepath),
"/zh/guide/config".to_string()
);
}
#[test]
fn test_img_element() {
let root = "/Users/xxx/xxx/xxx/docs".to_string();
let filepath = "/Users/xxx/xxx/xxx/docs/zh/guide/config.md".to_string();
let mut node = hast::Node::Root(hast::Root {
children: vec![hast::Node::MdxJsxElement(hast::MdxJsxElement {
name: Some("img".to_string()),
attributes: vec![hast::AttributeContent::Property(hast::MdxJsxAttribute {
name: "src".to_string(),
value: Some(hast::AttributeValue::Literal(
"../../assets/a.png".to_string(),
)),
})],
children: vec![],
position: None,
})],
position: None,
});
let links = mdx_plugin_normalize_link(&mut node, &root, &filepath);
assert_eq!(links.len(), 0);
if let hast::Node::Root(root) = node {
assert_eq!(root.children.len(), 2);
if let hast::Node::MdxjsEsm(esm) = &root.children[0] {
assert_eq!(
esm.value,
"import image_0 from \"../../assets/a.png\";".to_string()
);
}
if let hast::Node::MdxJsxElement(element) = &root.children[1] {
assert_eq!(element.name, Some("img".to_string()));
if let hast::AttributeContent::Property(property) = &element.attributes[0] {
assert_eq!(property.name, "src".to_string());
if let Some(hast::AttributeValue::Expression(expression)) = &property.value {
assert_eq!(expression.value, "image_0".to_string());
}
}
}
}
}
#[test]
fn test_remove_extname() {
let root = "/Users/xxx/xxx/xxx/docs".to_string();
let filepath = "/Users/xxx/xxx/xxx/docs/zh/guide/config.md".to_string();
assert_eq!(
normalize_link("./guide/config.md", &root, &filepath),
"/zh/guide/guide/config".to_string()
);
assert_eq!(
normalize_link("./guide/config.mdx", &root, &filepath),
"/zh/guide/guide/config".to_string()
);
assert_eq!(
normalize_link("./guide/config/webpack.resolve.alias", &root, &filepath),
"/zh/guide/guide/config/webpack.resolve.alias".to_string()
);
}
}
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | false |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/plugin_container/src/lib.rs | crates/plugin_container/src/lib.rs | //! Author: sanyuan0704
//!
//! This plugin is used to parse the container in markdown.
#![allow(clippy::ptr_arg)]
// parse title from `title="xxx"` or `title=xxx` or `title='xxx'`
pub fn parse_title_from_meta(title_meta: &str) -> String {
let mut title = title_meta;
let quote = title_meta.chars().nth(6).unwrap();
if quote != '"' && quote != '\'' {
// ignore the last char, because it is "}"
let last_index = title.rfind('}').unwrap_or(title.len());
title = &title[6..last_index];
} else {
title = &title[7..];
// find the last index of quote
let last_index = title.rfind(quote).unwrap_or(title.len());
title = &title[..last_index];
}
title.to_string()
}
pub fn parse_container_meta(meta: &str) -> (String, String) {
// 1. parse the content before \n
// 2. parse the type after `:::`, such as tip, warning, etc. The type properly has a space before `:::`.
// 3. parse the title, such as `:::tip title` or `:::tip title="title"`
let mut lines = meta.lines();
let first_line = lines.next().unwrap_or("");
let mut type_and_title = first_line
.split_once(":::")
.map(|x| x.1)
.unwrap_or("")
.trim_start()
.splitn(2, |c| c == ' ' || c == '{');
// Parse the type and title individually. Such as :::tip title="title" -> ("tip", "title="title"}")
let container_type = type_and_title.next().unwrap_or("").trim();
// Get the content before \n and trim the space.
let mut title = type_and_title.next().unwrap_or("").to_string();
// The title is properly `title="title"` or `title='title'`, we need to parse this case.
if title.starts_with("title=") {
title = parse_title_from_meta(&title);
}
(container_type.into(), title.trim().to_string())
}
fn create_new_container_node(
container_type: &str,
container_title: &str,
container_content: &Vec<hast::Node>,
) -> hast::Node {
// if the container title is empty, we use the container type and use camel case.
let container_title = if container_title.is_empty() {
let mut title = container_type.to_string();
title.make_ascii_uppercase();
title
} else {
container_title.to_string()
};
let container_type = container_type.to_lowercase();
let is_details = container_type == "details";
let title_tag_name = if is_details { "summary" } else { "div" };
let root_tag_name = if is_details { "details" } else { "div" };
let container_title_node = hast::Element {
tag_name: title_tag_name.into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive-title".into()]),
)],
children: vec![hast::Node::Text(hast::Text {
value: container_title,
position: None,
})],
position: None,
};
let container_content_node = hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive-content".into()]),
)],
children: container_content.clone(),
position: None,
};
hast::Node::Element(hast::Element {
tag_name: root_tag_name.into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive".into(), container_type]),
)],
children: vec![
hast::Node::Element(container_title_node),
hast::Node::Element(container_content_node),
],
position: None,
})
}
fn wrap_node_with_paragraph(
properties: &[(String, hast::PropertyValue)],
children: &[hast::Node],
) -> hast::Node {
let mut paragraph = hast::Element {
tag_name: "p".into(),
properties: properties.to_vec(),
children: Vec::new(),
position: None,
};
paragraph.children.extend_from_slice(children);
hast::Node::Element(paragraph)
}
fn is_valid_container_type(container_type: &String) -> bool {
let mut container_type = container_type.clone();
container_type.make_ascii_lowercase();
let valid_types = [
"tip", "note", "warning", "caution", "danger", "info", "details",
];
valid_types.contains(&container_type.as_str())
}
fn parse_github_alerts_container_meta(meta: &str) -> (String, String) {
// GitHub Alert's verification is very strict.
// space and breaks are not allowed, they must be a whole.
// but can remove spaces or breaks at the beginning and end.
let lines = meta.lines();
let mut container_type = String::new();
let mut remaining_data = String::new();
let mut is_first_line = true;
for line in lines {
// clear breaks if no container_type
if container_type.is_empty() && line.is_empty() {
continue;
}
if container_type.is_empty() && is_first_line {
is_first_line = false;
let split_line = line.trim().split_once(']');
container_type = split_line
.unwrap_or(("", ""))
.0
.to_owned()
.replace("[!", "");
remaining_data = split_line.unwrap_or(("", "")).1.to_owned();
if container_type.is_empty() {
break;
}
continue;
}
if remaining_data.is_empty() {
remaining_data = line.to_owned();
} else {
remaining_data = format!("{}\n{}", remaining_data, line);
}
}
(container_type, remaining_data)
}
fn traverse_children(children: &mut Vec<hast::Node>) {
let mut container_type = String::new();
let mut container_title = String::new();
let mut container_content = vec![];
let mut container_content_start = false;
let mut container_content_end = false;
let mut container_content_start_index = 0;
let mut container_content_end_index = 0;
let mut index = 0;
while index < children.len() {
let child = &mut children[index];
traverse_node(child);
if let hast::Node::Element(element) = child {
// Meet the start of the container
if !container_content_start {
// e.g. :::tip
if element.tag_name == "p" {
if let Some(hast::Node::Text(text)) = element.children.first() {
if text.value.starts_with(":::") {
(container_type, container_title) = parse_container_meta(&text.value);
if !is_valid_container_type(&container_type) {
index += 1;
continue;
}
// If the second element is MdxExpression, we parse the value and reassign the container_title
if let Some(hast::Node::MdxExpression(expression)) = element.children.get(1) {
container_title = parse_title_from_meta(&expression.value);
}
container_content_start = true;
container_content_start_index = index;
// :::tip\nThis is a tip
// We should record the `This is a tip`
for line in text.value.lines().skip(1) {
if line.ends_with(":::") {
container_content_end = true;
container_content_end_index = index;
break;
};
container_content.push(wrap_node_with_paragraph(
&element.properties.clone(),
&[hast::Node::Text(hast::Text {
value: line.into(),
position: None,
})],
));
}
}
}
}
// e.g. > [!tip]
if element.tag_name == "blockquote" {
// why use element.children.get(1)?
// in crates/mdx_rs/mdast_util_to_hast.rs, method `transform_block_quote`
// always insert Text { value: "\n".into(), position: None } in blockquote's children
if let Some(hast::Node::Element(first_element)) = element.children.get(1) {
if first_element.tag_name == "p" {
if let Some(hast::Node::Text(text)) = first_element.children.first() {
if text.value.trim().starts_with("[!") {
// split data if previous step parse in one line
// e.g <p>[!TIP] this is a tip</p>
let (self_container_type, remaining_data) =
parse_github_alerts_container_meta(&text.value);
if !is_valid_container_type(&self_container_type) {
index += 1;
continue;
}
// in this case, container_type as container_title
container_type = self_container_type.clone();
container_title = self_container_type.clone();
container_title.make_ascii_uppercase();
container_content_start = true;
container_content_start_index = index;
// reform paragraph tag
let mut paragraph_children = first_element.children.clone();
if !remaining_data.is_empty() {
paragraph_children[0] = hast::Node::Text(hast::Text {
value: remaining_data,
position: None,
})
} else {
paragraph_children.remove(0);
}
// reform blockquote tag
let mut children = element.children.clone();
if paragraph_children.is_empty() {
children.remove(1);
} else {
children[1] =
wrap_node_with_paragraph(&element.properties.clone(), ¶graph_children)
}
container_content = children;
container_content_end = true;
container_content_end_index = index;
}
}
}
}
}
}
// Collect the container content in current p tag
if container_content_start && !container_content_end && !element.children.is_empty() {
if element.tag_name == "p" {
let mut fragments = vec![];
for (i, child) in element.children.iter().enumerate() {
// Skip the meta string and stop when we meet the end of container
if i == 0 && index == container_content_start_index {
continue;
}
if i == 1 && index == container_content_start_index {
if let hast::Node::MdxExpression(expression) = child {
if expression.value.starts_with("title=") {
continue;
}
}
}
if let hast::Node::Text(text) = child {
if text.value.ends_with(":::") {
let extra_text = text.value.split(":::").next().unwrap_or("");
if !extra_text.is_empty() {
fragments.push(hast::Node::Text(hast::Text {
value: extra_text.into(),
position: None,
}));
}
container_content_end = true;
container_content_end_index = index;
break;
}
}
fragments.push(child.clone());
}
if !fragments.is_empty() {
if index == container_content_start_index && !container_content.is_empty() {
let first_node = container_content.first_mut().unwrap();
let mut children = first_node.children().unwrap().to_vec();
children.extend(fragments);
*first_node.children_mut().unwrap() = children;
} else {
container_content.push(wrap_node_with_paragraph(
&element.properties.clone(),
&fragments,
));
}
}
} else {
container_content.push(child.clone());
}
}
// Meet the end of the container
if container_content_end {
// We should remove the container content from the root children
// And add the container element to the root children
let new_container_children = create_new_container_node(
container_type.as_str(),
container_title.as_str(),
&container_content,
);
children.drain(container_content_start_index..=container_content_end_index);
children.insert(container_content_start_index, new_container_children);
container_title = String::new();
container_content = vec![];
container_content_start = false;
container_content_end = false;
index = container_content_start_index;
}
index += 1;
continue;
}
index += 1;
if container_content_start && !container_content_end {
// Exclude the MdxExpression、MdxjsEsm Node
if let hast::Node::MdxExpression(_) = child {
continue;
}
container_content.push(child.clone());
}
}
}
fn traverse_node(node: &mut hast::Node) {
if let Some(children) = node.children_mut() {
traverse_children(children);
}
}
pub fn mdx_plugin_container(root: &mut hast::Node) {
// 1. Traverse children, get all p tags, check if they start with :::
// If it is, it is regarded as container syntax, and the content from the beginning of ::: to the end of a certain ::: is regarded as a container
// 2. Traverse children, get all blockquote tags, check if they next child's first element is p tags and if start with [! and end of ]
// If it is, it is regarded as container syntax, and the content from the beginning of blockquote to the end of a certain blockquote is regarded as a container
// The element of this container is a div element, className is "rspress-directive"
// for example:
// :::tip
// this is a tip
// :::
// or
// > [!tip]
// > this is a tip
// Will be transformed to:
// <div class="rspress-directive tip">
// <div class="rspress-directive-title">TIP</div>
// <div class="rspress-directive-content">
// <p>This is a tip</p>
// </div>
// </div>
traverse_node(root);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_title_from_meta() {
assert_eq!(parse_title_from_meta("title=\"Note\""), "Note");
assert_eq!(parse_title_from_meta("title=\'Note\'"), "Note");
assert_eq!(parse_title_from_meta("title=Note"), "Note");
}
#[test]
fn test_parse_container_meta() {
assert_eq!(
parse_container_meta(":::tip Note"),
("tip".into(), "Note".into()),
);
assert_eq!(
parse_container_meta(":::note{title=\"Note\"}"),
("note".into(), "Note".into()),
);
assert_eq!(
parse_container_meta(":::note{title=\'Note\'}"),
("note".into(), "Note".into()),
);
assert_eq!(
parse_container_meta(":::note{title=Note}"),
("note".into(), "Note".into()),
);
}
#[test]
fn test_parse_container_meta_with_empty_title() {
assert_eq!(parse_container_meta(":::tip"), ("tip".into(), "".into()),);
assert_eq!(
parse_container_meta(":::note{title=\"\"}"),
("note".into(), "".into()),
);
assert_eq!(
parse_container_meta(":::note{title=\'\'}"),
("note".into(), "".into()),
);
assert_eq!(
parse_container_meta(":::note{title=}"),
("note".into(), "".into()),
);
}
#[test]
fn test_parse_container_meta_with_empty_type() {
assert_eq!(parse_container_meta(":::"), ("".into(), "".into()),);
assert_eq!(
parse_container_meta(":::note{title=\"\"}"),
("note".into(), "".into()),
);
assert_eq!(
parse_container_meta(":::note{title=\'\'}"),
("note".into(), "".into()),
);
assert_eq!(
parse_container_meta(":::note{title=}"),
("note".into(), "".into()),
);
}
#[test]
fn test_parse_container_meta_with_empty_type_and_title() {
assert_eq!(parse_container_meta(":::"), ("".into(), "".into()),);
assert_eq!(
parse_container_meta(":::note{title=\"\"}"),
("note".into(), "".into()),
);
assert_eq!(
parse_container_meta(":::note{title=\'\'}"),
("note".into(), "".into()),
);
assert_eq!(
parse_container_meta(":::note{title=}"),
("note".into(), "".into()),
);
}
#[test]
fn test_container_plugin_with_normal_title() {
let mut root = hast::Node::Root(hast::Root {
children: vec![
hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: ":::tip Note".into(),
position: None,
})],
position: None,
}),
hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: "This is a tip".into(),
position: None,
})],
position: None,
}),
hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: ":::".into(),
position: None,
})],
position: None,
}),
],
position: None,
});
mdx_plugin_container(&mut root);
assert_eq!(
root,
hast::Node::Root(hast::Root {
children: vec![hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive".into(), "tip".into()])
),],
children: vec![
hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive-title".into()])
)],
children: vec![hast::Node::Text(hast::Text {
value: "Note".into(),
position: None,
})],
position: None,
}),
hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive-content".into()])
)],
children: vec![hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: "This is a tip".into(),
position: None,
})],
position: None,
})],
position: None,
})
],
position: None,
})],
position: None,
})
);
}
#[test]
fn test_container_plugin_with_empty_title() {
let mut root = hast::Node::Root(hast::Root {
children: vec![
hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: ":::tip".into(),
position: None,
})],
position: None,
}),
hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: "This is a tip".into(),
position: None,
})],
position: None,
}),
hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: ":::".into(),
position: None,
})],
position: None,
}),
],
position: None,
});
mdx_plugin_container(&mut root);
assert_eq!(
root,
hast::Node::Root(hast::Root {
children: vec![hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive".into(), "tip".into()])
),],
children: vec![
hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive-title".into()])
)],
children: vec![hast::Node::Text(hast::Text {
value: "TIP".into(),
position: None,
})],
position: None,
}),
hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive-content".into()])
)],
children: vec![hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: "This is a tip".into(),
position: None,
})],
position: None,
})],
position: None,
})
],
position: None,
})],
position: None,
})
);
}
#[test]
fn test_container_plugin_with_title_assign() {
let mut root = hast::Node::Root(hast::Root {
children: vec![
hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: ":::tip{title=\"Note\"}".into(),
position: None,
})],
position: None,
}),
hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: "This is a tip".into(),
position: None,
})],
position: None,
}),
hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: ":::".into(),
position: None,
})],
position: None,
}),
],
position: None,
});
mdx_plugin_container(&mut root);
assert_eq!(
root,
hast::Node::Root(hast::Root {
children: vec![hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive".into(), "tip".into()])
),],
children: vec![
hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive-title".into()])
)],
children: vec![hast::Node::Text(hast::Text {
value: "Note".into(),
position: None,
})],
position: None,
}),
hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive-content".into()])
)],
children: vec![hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: "This is a tip".into(),
position: None,
})],
position: None,
})],
position: None,
})
],
position: None,
})],
position: None,
})
);
}
#[test]
fn test_parse_github_alerts_container_meta() {
assert_eq!(
parse_github_alerts_container_meta("[!TIP]"),
("TIP".into(), "".into())
);
assert_eq!(
parse_github_alerts_container_meta("[!TIP this is tip block"),
("".into(), "".into())
);
assert_eq!(
parse_github_alerts_container_meta("[!TIP] this is tip block"),
("TIP".into(), " this is tip block".into())
);
}
#[test]
fn test_container_plugin_with_mdx_flow_in_content() {
let mut root = hast::Node::Root(hast::Root {
children: vec![
hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: ":::tip".into(),
position: None,
})],
position: None,
}),
hast::Node::MdxJsxElement(hast::MdxJsxElement {
name: Some("Rspack".into()),
attributes: vec![],
children: vec![],
position: None,
}),
hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: ":::".into(),
position: None,
})],
position: None,
}),
],
position: None,
});
mdx_plugin_container(&mut root);
assert_eq!(
root,
hast::Node::Root(hast::Root {
children: vec![hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive".into(), "tip".into()])
),],
children: vec![
hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive-title".into()])
)],
children: vec![hast::Node::Text(hast::Text {
value: "TIP".into(),
position: None,
})],
position: None,
}),
hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive-content".into()])
)],
children: vec![hast::Node::MdxJsxElement(hast::MdxJsxElement {
name: Some("Rspack".into()),
attributes: vec![],
children: vec![],
position: None,
})],
position: None,
})
],
position: None,
})],
position: None,
})
);
}
#[test]
fn test_container_plugin_width_details_title() {
let mut root = hast::Node::Root(hast::Root {
children: vec![
hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: ":::details Note".into(),
position: None,
})],
position: None,
}),
hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: "This is a tip".into(),
position: None,
})],
position: None,
}),
hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: ":::".into(),
position: None,
})],
position: None,
}),
],
position: None,
});
mdx_plugin_container(&mut root);
assert_eq!(
root,
hast::Node::Root(hast::Root {
children: vec![hast::Node::Element(hast::Element {
tag_name: "details".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive".into(), "details".into()])
),],
children: vec![
hast::Node::Element(hast::Element {
tag_name: "summary".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive-title".into()])
)],
children: vec![hast::Node::Text(hast::Text {
value: "Note".into(),
position: None,
})],
position: None,
}),
hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive-content".into()])
)],
children: vec![hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: "This is a tip".into(),
position: None,
})],
position: None,
})],
position: None,
})
],
position: None,
})],
position: None,
})
);
}
#[test]
fn test_container_plugin_with_github_alerts_title() {
let mut root = hast::Node::Root(hast::Root {
children: vec![hast::Node::Element(hast::Element {
tag_name: "blockquote".into(),
properties: vec![],
children: vec![
hast::Node::Text(hast::Text {
value: "\n".into(),
position: None,
}),
hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: "[!TIP]".into(),
position: None,
})],
position: None,
}),
],
position: None,
})],
position: None,
});
mdx_plugin_container(&mut root);
assert_eq!(
root,
hast::Node::Root(hast::Root {
children: vec![hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive".into(), "tip".into()])
),],
children: vec![
hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive-title".into()])
)],
children: vec![hast::Node::Text(hast::Text {
value: "TIP".into(),
position: None,
})],
position: None,
}),
hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-directive-content".into()])
)],
children: vec![hast::Node::Text(hast::Text {
value: "\n".into(),
position: None,
}),],
position: None,
})
],
position: None,
})],
position: None,
})
);
}
#[test]
fn test_container_plugin_with_nested_in_list() {
let mut root = hast::Node::Root(hast::Root {
children: vec![
hast::Node::Element(hast::Element {
tag_name: "ol".into(),
properties: vec![],
children: vec![
hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: ":::tip Note".into(),
position: None,
})],
position: None,
}),
hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: "This is a tip".into(),
position: None,
})],
position: None,
}),
hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: ":::".into(),
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | true |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/plugin_toc/src/lib.rs | crates/plugin_toc/src/lib.rs | //! Author: sanyuan0704
//!
//! This plugin is used to generate toc in mdx. Features:
//! 1. Collect h2 ~ h4 as toc
//! 2. Collect h1 as title
//! 3. Support custom id, example: `# hello world {#custom-id}`
use markdown::mdast::{self, Heading};
use slugger::Slugger;
use std::vec;
use utils::extract_title_and_id;
#[derive(Debug, Clone)]
pub struct TocItem {
pub text: String,
pub depth: u8,
pub id: String,
}
pub struct TocResult {
pub title: String,
pub toc: Vec<TocItem>,
}
pub enum NodeType {
Link,
Strong,
Emphasis,
Delete,
}
pub fn extract_text_from_node(node: &mdast::Node, node_type: NodeType) -> String {
match node_type {
NodeType::Link => {
if let mdast::Node::Link(link) = node {
if let mdast::Node::Text(text) = &link.children[0] {
return text.value.clone();
}
}
}
NodeType::Strong => {
if let mdast::Node::Strong(strong) = node {
if let mdast::Node::Text(text) = &strong.children[0] {
return text.value.clone();
}
}
}
NodeType::Emphasis => {
if let mdast::Node::Emphasis(emphasis) = node {
// Emphasis&Strong: ***hello***
if let mdast::Node::Strong(_) = &emphasis.children[0] {
return format!(
"**{}**",
extract_text_from_node(&emphasis.children[0], NodeType::Strong)
);
}
if let mdast::Node::Text(text) = &emphasis.children[0] {
return text.value.clone();
}
}
}
NodeType::Delete => {
if let mdast::Node::Delete(delete) = node {
if let mdast::Node::Text(text) = &delete.children[0] {
return text.value.clone();
}
}
}
}
String::new()
}
pub fn collect_title_in_mdast(heading: &mut Heading) -> (String, String) {
let mut title = String::new();
let mut custom_id = String::new();
for child in &mut heading.children {
match child {
mdast::Node::Text(text) => {
// example: hello world {#custom-id}
// Then we extract the `hello world` as title and `custom-id` as id
let (title_part, id_part) = extract_title_and_id(&text.value);
title.push_str(&title_part);
custom_id = id_part;
}
mdast::Node::Strong(_) => {
title.push_str(format!("**{}**", extract_text_from_node(child, NodeType::Strong)).as_str())
}
mdast::Node::Emphasis(_) => {
title.push_str(format!("*{}*", extract_text_from_node(child, NodeType::Emphasis)).as_str())
}
mdast::Node::Delete(_) => {
title.push_str(format!("~~{}~~", extract_text_from_node(child, NodeType::Delete)).as_str())
}
mdast::Node::InlineCode(code) => title.push_str(format!("`{}`", code.value).as_str()),
mdast::Node::Link(_) => {
title.push_str(extract_text_from_node(child, NodeType::Link).as_str())
}
_ => continue, // Continue if node is not Text or Code
}
}
title = title.trim_end().to_string();
(title, custom_id)
}
pub fn mdx_plugin_toc(node: &mut mdast::Node) -> TocResult {
let mut toc: Vec<TocItem> = vec![];
let mut title = String::new();
let mut slugger = Slugger::new();
if let mdast::Node::Root(root) = node {
for child in &mut root.children {
if let mdast::Node::Heading(heading) = child {
let mut id;
let toc_title;
(toc_title, id) = collect_title_in_mdast(heading);
if heading.depth == 1 {
title = toc_title.clone();
}
if id.is_empty() {
id = slugger.slug(&toc_title, false);
}
// Collect h2 ~ h4
if heading.depth < 2 || heading.depth > 4 {
continue;
}
toc.push(TocItem {
text: toc_title,
depth: heading.depth,
id,
});
}
}
}
TocResult { title, toc }
}
#[cfg(test)]
mod tests {
use super::*;
use markdown::mdast;
#[test]
fn test_collect_title_in_mdast() {
let mut heading = mdast::Heading {
depth: 1,
children: vec![
mdast::Node::Text(mdast::Text {
value: "Hello".to_string(),
position: None,
}),
mdast::Node::InlineCode(mdast::InlineCode {
value: "World".to_string(),
position: None,
}),
],
position: None,
};
let mut heading2 = mdast::Heading {
depth: 1,
children: vec![
mdast::Node::Strong(mdast::Strong {
children: vec![mdast::Node::Text(mdast::Text {
value: "Hello".to_string(),
position: None,
})],
position: None,
}),
mdast::Node::Emphasis(mdast::Emphasis {
children: vec![mdast::Node::Text(mdast::Text {
value: "World".to_string(),
position: None,
})],
position: None,
}),
],
position: None,
};
let mut heading3 = mdast::Heading {
depth: 1,
children: vec![mdast::Node::Emphasis(mdast::Emphasis {
children: vec![mdast::Node::Strong(mdast::Strong {
children: vec![mdast::Node::Text(mdast::Text {
value: "Hello World".to_string(),
position: None,
})],
position: None,
})],
position: None,
})],
position: None,
};
let mut heading4 = mdast::Heading {
depth: 1,
children: vec![
mdast::Node::Text(mdast::Text {
value: "Hello World ".to_string(),
position: None,
}),
mdast::Node::MdxJsxFlowElement(mdast::MdxJsxFlowElement {
name: Some("foo".to_string()),
attributes: vec![],
children: vec![mdast::Node::Text(mdast::Text {
value: "bar".to_string(),
position: None,
})],
position: None,
}),
mdast::Node::Text(mdast::Text {
value: " ".to_string(),
position: None,
}),
],
position: None,
};
let mut heading5 = mdast::Heading {
depth: 1,
children: vec![
mdast::Node::Text(mdast::Text {
value: "Hello".to_string(),
position: None,
}),
mdast::Node::Delete(mdast::Delete {
children: vec![mdast::Node::Text(mdast::Text {
value: "World".to_string(),
position: None,
})],
position: None,
}),
],
position: None,
};
assert_eq!(
collect_title_in_mdast(&mut heading),
("Hello`World`".to_string(), "".to_string())
);
assert_eq!(
collect_title_in_mdast(&mut heading2),
("**Hello***World*".to_string(), "".to_string())
);
assert_eq!(
collect_title_in_mdast(&mut heading3),
("***Hello World***".to_string(), "".to_string())
);
assert_eq!(
collect_title_in_mdast(&mut heading4),
("Hello World".to_string(), "".to_string())
);
assert_eq!(
collect_title_in_mdast(&mut heading5),
("Hello~~World~~".to_string(), "".to_string())
);
}
#[test]
fn test_collect_title_in_mdast_with_custom_id() {
let mut heading = mdast::Heading {
depth: 1,
children: vec![
mdast::Node::Text(mdast::Text {
value: "Hello".to_string(),
position: None,
}),
mdast::Node::InlineCode(mdast::InlineCode {
value: "World".to_string(),
position: None,
}),
mdast::Node::Text(mdast::Text {
value: " 123 {#custom-id}".to_string(),
position: None,
}),
],
position: None,
};
assert_eq!(
collect_title_in_mdast(&mut heading),
("Hello`World` 123".to_string(), "custom-id".to_string())
);
assert_eq!(
heading.children[2],
mdast::Node::Text(mdast::Text {
value: " 123 {#custom-id}".to_string(),
position: None,
})
);
}
#[test]
fn test_mdx_plugin_toc() {
let heading = mdast::Heading {
depth: 1,
children: vec![
mdast::Node::Text(mdast::Text {
value: "Hello".to_string(),
position: None,
}),
mdast::Node::InlineCode(mdast::InlineCode {
value: "World".to_string(),
position: None,
}),
],
position: None,
};
let heading2 = mdast::Heading {
depth: 2,
children: vec![
mdast::Node::Text(mdast::Text {
value: "Hello".to_string(),
position: None,
}),
mdast::Node::InlineCode(mdast::InlineCode {
value: "World".to_string(),
position: None,
}),
],
position: None,
};
let heading3 = mdast::Heading {
depth: 3,
children: vec![
mdast::Node::Text(mdast::Text {
value: "Hello".to_string(),
position: None,
}),
mdast::Node::InlineCode(mdast::InlineCode {
value: "World".to_string(),
position: None,
}),
mdast::Node::Link(mdast::Link {
url: "https://github.com".to_string(),
title: None,
children: vec![mdast::Node::Text(mdast::Text {
value: "Github".to_string(),
position: None,
})],
position: None,
}),
],
position: None,
};
let heading4 = mdast::Heading {
depth: 4,
children: vec![
mdast::Node::Text(mdast::Text {
value: "Hello".to_string(),
position: None,
}),
mdast::Node::InlineCode(mdast::InlineCode {
value: "World".to_string(),
position: None,
}),
],
position: None,
};
let heading5 = mdast::Heading {
depth: 5,
children: vec![
mdast::Node::Text(mdast::Text {
value: "Hello".to_string(),
position: None,
}),
mdast::Node::InlineCode(mdast::InlineCode {
value: "World".to_string(),
position: None,
}),
],
position: None,
};
let heading6 = mdast::Heading {
depth: 6,
children: vec![
mdast::Node::Text(mdast::Text {
value: "Hello".to_string(),
position: None,
}),
mdast::Node::InlineCode(mdast::InlineCode {
value: "World".to_string(),
position: None,
}),
],
position: None,
};
let heading7: Heading = mdast::Heading {
depth: 4,
children: vec![
mdast::Node::Text(mdast::Text {
value: "Hello".to_string(),
position: None,
}),
mdast::Node::Strong(mdast::Strong {
children: vec![mdast::Node::Text(mdast::Text {
value: "abc".to_string(),
position: None,
})],
position: None,
}),
mdast::Node::InlineCode(mdast::InlineCode {
value: "World".to_string(),
position: None,
}),
],
position: None,
};
let mut root = mdast::Node::Root(mdast::Root {
children: vec![
mdast::Node::Heading(heading),
mdast::Node::Heading(heading2),
mdast::Node::Heading(heading3),
mdast::Node::Heading(heading4),
mdast::Node::Heading(heading5),
mdast::Node::Heading(heading6),
mdast::Node::Heading(heading7),
],
position: None,
});
let result = mdx_plugin_toc(&mut root);
assert_eq!(result.title, "Hello`World`");
assert_eq!(result.toc.len(), 4);
assert_eq!(result.toc[1].text, "Hello`World`Github");
assert_eq!(result.toc[3].text, "Hello**abc**`World`");
}
}
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | false |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/mdx_rs/src/swc_util_build_jsx.rs | crates/mdx_rs/src/swc_util_build_jsx.rs | //! Turn JSX into function calls.
use crate::hast_util_to_swc::Program;
use crate::mdx_plugin_recma_document::JsxRuntime;
use crate::swc_utils::{
bytepos_to_point, create_bool_expression, create_call_expression, create_ident,
create_ident_expression, create_member_expression_from_str, create_null_expression,
create_num_expression, create_object_expression, create_prop_name, create_str,
create_str_expression, jsx_attribute_name_to_prop_name, jsx_element_name_to_expression,
prefix_error_with_point, span_to_position,
};
use core::str;
use markdown::Location;
use swc_core::common::{
comments::{Comment, CommentKind},
util::take::Take,
};
use swc_core::ecma::ast::{
ArrayLit, CallExpr, Callee, Expr, ExprOrSpread, ImportDecl, ImportNamedSpecifier,
ImportSpecifier, JSXAttrName, JSXAttrOrSpread, JSXAttrValue, JSXElement, JSXElementChild,
JSXExpr, JSXFragment, KeyValueProp, Lit, ModuleDecl, ModuleExportName, ModuleItem, Prop,
PropName, PropOrSpread, ThisExpr,
};
use swc_core::ecma::visit::{noop_visit_mut_type, VisitMut, VisitMutWith};
/// Configuration.
#[derive(Debug, Default, Clone)]
pub struct Options {
/// Whether to add extra information to error messages in generated code.
pub development: bool,
}
/// Compile JSX away to function calls.
#[allow(unused)]
pub fn swc_util_build_jsx(
program: &mut Program,
options: &Options,
location: Option<&Location>,
) -> Result<(), String> {
let directives = find_directives(&program.comments, location)?;
let mut state = State {
development: options.development,
filepath: program.path.clone(),
location,
automatic: !matches!(directives.runtime, Some(JsxRuntime::Classic)),
import_fragment: false,
import_jsx: false,
import_jsxs: false,
import_jsx_dev: false,
create_element_expression: create_member_expression_from_str(
&directives
.pragma
.unwrap_or_else(|| "React.createElement".into()),
),
fragment_expression: create_member_expression_from_str(
&directives
.pragma_frag
.unwrap_or_else(|| "React.Fragment".into()),
),
error: None,
};
// Rewrite JSX and gather specifiers to import.
program.module.visit_mut_with(&mut state);
if let Some(err) = state.error.take() {
return Err(err);
}
let mut specifiers = vec![];
if state.import_fragment {
specifiers.push(ImportSpecifier::Named(ImportNamedSpecifier {
local: create_ident("_Fragment"),
imported: Some(ModuleExportName::Ident(create_ident("Fragment"))),
span: swc_core::common::DUMMY_SP,
is_type_only: false,
}));
}
if state.import_jsx {
specifiers.push(ImportSpecifier::Named(ImportNamedSpecifier {
local: create_ident("_jsx"),
imported: Some(ModuleExportName::Ident(create_ident("jsx"))),
span: swc_core::common::DUMMY_SP,
is_type_only: false,
}));
}
if state.import_jsxs {
specifiers.push(ImportSpecifier::Named(ImportNamedSpecifier {
local: create_ident("_jsxs"),
imported: Some(ModuleExportName::Ident(create_ident("jsxs"))),
span: swc_core::common::DUMMY_SP,
is_type_only: false,
}));
}
if state.import_jsx_dev {
specifiers.push(ImportSpecifier::Named(ImportNamedSpecifier {
local: create_ident("_jsxDEV"),
imported: Some(ModuleExportName::Ident(create_ident("jsxDEV"))),
span: swc_core::common::DUMMY_SP,
is_type_only: false,
}));
}
if !specifiers.is_empty() {
program.module.body.insert(
0,
ModuleItem::ModuleDecl(ModuleDecl::Import(ImportDecl {
specifiers,
src: Box::new(create_str(&format!(
"{}{}",
directives.import_source.unwrap_or_else(|| "react".into()),
if options.development {
"/jsx-dev-runtime"
} else {
"/jsx-runtime"
}
))),
type_only: false,
asserts: None,
span: swc_core::common::DUMMY_SP,
})),
);
}
Ok(())
}
/// Info gathered from comments.
#[derive(Debug, Default, Clone)]
struct Directives {
/// Inferred JSX runtime.
runtime: Option<JsxRuntime>,
/// Inferred automatic JSX import source.
import_source: Option<String>,
/// Inferred classic JSX pragma.
pragma: Option<String>,
/// Inferred classic JSX pragma fragment.
pragma_frag: Option<String>,
}
/// Context.
#[derive(Debug, Clone)]
struct State<'a> {
/// Location info.
location: Option<&'a Location>,
/// Whether walking the tree produced an error.
error: Option<String>,
/// Path to file.
filepath: Option<String>,
/// Whether the user is in development mode.
development: bool,
/// Whether to import `Fragment`.
import_fragment: bool,
/// Whether to import `jsx`.
import_jsx: bool,
/// Whether to import `jsxs`.
import_jsxs: bool,
/// Whether to import `jsxDEV`.
import_jsx_dev: bool,
/// Whether we’re building in the automatic or classic runtime.
automatic: bool,
/// Expression (ident or member) to use for `createElement` calls in
/// the classic runtime.
create_element_expression: Expr,
/// Expression (ident or member) to use as fragment symbol in the classic
/// runtime.
fragment_expression: Expr,
}
impl<'a> State<'a> {
/// Turn an attribute value into an expression.
fn jsx_attribute_value_to_expression(
&mut self,
value: Option<JSXAttrValue>,
) -> Result<Expr, String> {
match value {
// Boolean prop.
None => Ok(create_bool_expression(true)),
Some(JSXAttrValue::JSXExprContainer(expression_container)) => {
match expression_container.expr {
JSXExpr::JSXEmptyExpr(_) => {
unreachable!("Cannot use empty JSX expressions in attribute values");
}
JSXExpr::Expr(expression) => Ok(*expression),
}
}
Some(JSXAttrValue::Lit(mut literal)) => {
// Remove `raw` so we don’t get character references in strings.
if let Lit::Str(string_literal) = &mut literal {
string_literal.raw = None;
}
Ok(Expr::Lit(literal))
}
Some(JSXAttrValue::JSXFragment(fragment)) => self.jsx_fragment_to_expression(fragment),
Some(JSXAttrValue::JSXElement(element)) => self.jsx_element_to_expression(*element),
}
}
/// Turn children of elements or fragments into expressions.
fn jsx_children_to_expressions(
&mut self,
mut children: Vec<JSXElementChild>,
) -> Result<Vec<Expr>, String> {
let mut result = vec![];
children.reverse();
while let Some(child) = children.pop() {
match child {
JSXElementChild::JSXSpreadChild(child) => {
let lo = child.span.lo;
let start = bytepos_to_point(lo, self.location);
let reason = prefix_error_with_point(
"Unexpected spread child, which is not supported in Babel, SWC, or React",
start.as_ref(),
);
return Err(reason);
}
JSXElementChild::JSXExprContainer(container) => {
if let JSXExpr::Expr(expression) = container.expr {
result.push(*expression);
}
}
JSXElementChild::JSXText(text) => {
let value = jsx_text_to_value(text.value.as_ref());
if !value.is_empty() {
result.push(create_str_expression(&value));
}
}
JSXElementChild::JSXElement(element) => {
result.push(self.jsx_element_to_expression(*element)?);
}
JSXElementChild::JSXFragment(fragment) => {
result.push(self.jsx_fragment_to_expression(fragment)?);
}
}
}
Ok(result)
}
/// Turn optional attributes, and perhaps children (when automatic), into props.
fn jsx_attributes_to_expressions(
&mut self,
attributes: Option<Vec<JSXAttrOrSpread>>,
children: Option<Vec<Expr>>,
) -> Result<(Option<Expr>, Option<Expr>), String> {
let mut objects = vec![];
let mut fields = vec![];
let mut spread = false;
let mut key = None;
if let Some(mut attributes) = attributes {
attributes.reverse();
// Place props in the right order, because we might have duplicates
// in them and what’s spread in.
while let Some(attribute) = attributes.pop() {
match attribute {
JSXAttrOrSpread::SpreadElement(spread_element) => {
if !fields.is_empty() {
objects.push(create_object_expression(fields));
fields = vec![];
}
objects.push(*spread_element.expr);
spread = true;
}
JSXAttrOrSpread::JSXAttr(jsx_attribute) => {
let value = self.jsx_attribute_value_to_expression(jsx_attribute.value)?;
let mut value = Some(value);
if let JSXAttrName::Ident(ident) = &jsx_attribute.name {
if self.automatic && &ident.sym == "key" {
if spread {
let lo = jsx_attribute.span.lo;
let start = bytepos_to_point(lo, self.location);
let reason = prefix_error_with_point(
"Expected `key` to come before any spread expressions",
start.as_ref(),
);
return Err(reason);
}
// Take the value out, so we don’t add it as a prop.
key = value.take();
}
}
if let Some(value) = value {
fields.push(PropOrSpread::Prop(Box::new(Prop::KeyValue(KeyValueProp {
key: jsx_attribute_name_to_prop_name(jsx_attribute.name),
value: Box::new(value),
}))));
}
}
}
}
}
// In the automatic runtime, add children as a prop.
if let Some(mut children) = children {
let value = if children.is_empty() {
None
} else if children.len() == 1 {
Some(children.pop().unwrap())
} else {
let mut elements = vec![];
children.reverse();
while let Some(child) = children.pop() {
elements.push(Some(ExprOrSpread {
spread: None,
expr: Box::new(child),
}));
}
let lit = ArrayLit {
elems: elements,
span: swc_core::common::DUMMY_SP,
};
Some(Expr::Array(lit))
};
if let Some(value) = value {
fields.push(PropOrSpread::Prop(Box::new(Prop::KeyValue(KeyValueProp {
key: create_prop_name("children"),
value: Box::new(value),
}))));
}
}
// Add remaining fields.
if !fields.is_empty() {
objects.push(create_object_expression(fields));
}
let props = if objects.is_empty() {
None
} else if objects.len() == 1 {
Some(objects.pop().unwrap())
} else {
let mut args = vec![];
objects.reverse();
// Don’t mutate the first object, shallow clone into a new
// object instead.
if !matches!(objects.last(), Some(Expr::Object(_))) {
objects.push(create_object_expression(vec![]));
}
while let Some(object) = objects.pop() {
args.push(ExprOrSpread {
spread: None,
expr: Box::new(object),
});
}
let callee = Callee::Expr(Box::new(create_member_expression_from_str("Object.assign")));
Some(create_call_expression(callee, args))
};
Ok((props, key))
}
/// Turn the parsed parts from fragments or elements into a call.
fn jsx_expressions_to_call(
&mut self,
span: &swc_core::common::Span,
name: Expr,
attributes: Option<Vec<JSXAttrOrSpread>>,
mut children: Vec<Expr>,
) -> Result<Expr, String> {
let (callee, parameters) = if self.automatic {
let is_static_children = children.len() > 1;
let (props, key) = self.jsx_attributes_to_expressions(attributes, Some(children))?;
let mut parameters = vec![
// Component name.
//
// ```javascript
// Component
// ```
ExprOrSpread {
spread: None,
expr: Box::new(name),
},
// Props (including children) or empty object.
//
// ```javascript
// Object.assign({x: true, y: 'z'}, {children: […]})
// {x: true, y: 'z'}
// {}
// ```
ExprOrSpread {
spread: None,
expr: Box::new(props.unwrap_or_else(|| create_object_expression(vec![]))),
},
];
// Key or, in development, undefined.
//
// ```javascript
// "xyz"
// ```
if let Some(key) = key {
parameters.push(ExprOrSpread {
spread: None,
expr: Box::new(key),
});
} else if self.development {
parameters.push(ExprOrSpread {
spread: None,
expr: Box::new(create_ident_expression("undefined")),
});
}
if self.development {
// Static children (or not).
//
// ```javascript
// true
// ```
parameters.push(ExprOrSpread {
spread: None,
expr: Box::new(create_bool_expression(is_static_children)),
});
let filename = if let Some(value) = &self.filepath {
create_str_expression(value)
} else {
create_str_expression("<source.js>")
};
let prop = PropOrSpread::Prop(Box::new(Prop::KeyValue(KeyValueProp {
key: PropName::Ident(create_ident("fileName")),
value: Box::new(filename),
})));
let mut meta_fields = vec![prop];
if let Some(position) = span_to_position(span, self.location) {
meta_fields.push(PropOrSpread::Prop(Box::new(Prop::KeyValue(KeyValueProp {
key: create_prop_name("lineNumber"),
value: Box::new(create_num_expression(position.start.line as f64)),
}))));
meta_fields.push(PropOrSpread::Prop(Box::new(Prop::KeyValue(KeyValueProp {
key: create_prop_name("columnNumber"),
value: Box::new(create_num_expression(position.start.column as f64)),
}))));
}
// File name and positional info.
//
// ```javascript
// {
// fileName: "example.jsx",
// lineNumber: 1,
// columnNumber: 3
// }
// ```
parameters.push(ExprOrSpread {
spread: None,
expr: Box::new(create_object_expression(meta_fields)),
});
// Context object.
//
// ```javascript
// this
// ```
let this_expression = ThisExpr {
span: swc_core::common::DUMMY_SP,
};
parameters.push(ExprOrSpread {
spread: None,
expr: Box::new(Expr::This(this_expression)),
});
}
let callee = if self.development {
self.import_jsx_dev = true;
"_jsxDEV"
} else if is_static_children {
self.import_jsxs = true;
"_jsxs"
} else {
self.import_jsx = true;
"_jsx"
};
(create_ident_expression(callee), parameters)
} else {
// Classic runtime.
let (props, key) = self.jsx_attributes_to_expressions(attributes, None)?;
debug_assert!(key.is_none(), "key should not be extracted");
let mut parameters = vec![
// Component name.
//
// ```javascript
// Component
// ```
ExprOrSpread {
spread: None,
expr: Box::new(name),
},
];
// Props or, if with children, null.
//
// ```javascript
// {x: true, y: 'z'}
// ```
if let Some(props) = props {
parameters.push(ExprOrSpread {
spread: None,
expr: Box::new(props),
});
} else if !children.is_empty() {
parameters.push(ExprOrSpread {
spread: None,
expr: Box::new(create_null_expression()),
});
}
// Each child as a parameter.
children.reverse();
while let Some(child) = children.pop() {
parameters.push(ExprOrSpread {
spread: None,
expr: Box::new(child),
});
}
(self.create_element_expression.clone(), parameters)
};
let call_expression = CallExpr {
callee: Callee::Expr(Box::new(callee)),
args: parameters,
type_args: None,
span: *span,
};
Ok(Expr::Call(call_expression))
}
/// Turn a JSX element into an expression.
fn jsx_element_to_expression(&mut self, element: JSXElement) -> Result<Expr, String> {
let children = self.jsx_children_to_expressions(element.children)?;
let mut name = jsx_element_name_to_expression(element.opening.name);
// If the name could be an identifier, but start with a lowercase letter,
// it’s not a component.
if let Expr::Ident(ident) = &name {
let head = ident.as_ref().as_bytes();
if matches!(head.first(), Some(b'a'..=b'z')) {
name = create_str_expression(&ident.sym);
}
}
self.jsx_expressions_to_call(&element.span, name, Some(element.opening.attrs), children)
}
/// Turn a JSX fragment into an expression.
fn jsx_fragment_to_expression(&mut self, fragment: JSXFragment) -> Result<Expr, String> {
let name = if self.automatic {
self.import_fragment = true;
create_ident_expression("_Fragment")
} else {
self.fragment_expression.clone()
};
let children = self.jsx_children_to_expressions(fragment.children)?;
self.jsx_expressions_to_call(&fragment.span, name, None, children)
}
}
impl<'a> VisitMut for State<'a> {
noop_visit_mut_type!();
/// Visit expressions, rewriting JSX, and walking deeper.
fn visit_mut_expr(&mut self, expr: &mut Expr) {
let result = match expr {
Expr::JSXElement(element) => Some(self.jsx_element_to_expression(*element.take())),
Expr::JSXFragment(fragment) => Some(self.jsx_fragment_to_expression(fragment.take())),
_ => None,
};
if let Some(result) = result {
match result {
Ok(expression) => {
*expr = expression;
expr.visit_mut_children_with(self);
}
Err(err) => {
self.error = Some(err);
}
}
} else {
expr.visit_mut_children_with(self);
}
}
}
/// Find directives in comments.
///
/// This looks for block comments (`/* */`) and checks each line that starts
/// with `@jsx`.
/// Then it looks for key/value pairs (each words split by whitespace).
/// Known keys are used for directives.
#[allow(unused)]
fn find_directives(
comments: &Vec<Comment>,
location: Option<&Location>,
) -> Result<Directives, String> {
let mut directives = Directives::default();
for comment in comments {
if comment.kind != CommentKind::Block {
continue;
}
let lines = comment.text.lines();
for line in lines {
let bytes = line.as_bytes();
let mut index = 0;
// Skip initial whitespace.
while index < bytes.len() && matches!(bytes[index], b' ' | b'\t') {
index += 1;
}
// Skip star.
if index < bytes.len() && bytes[index] == b'*' {
index += 1;
// Skip more whitespace.
while index < bytes.len() && matches!(bytes[index], b' ' | b'\t') {
index += 1;
}
}
// Peek if this looks like a JSX directive.
if !(index + 4 < bytes.len()
&& bytes[index] == b'@'
&& bytes[index + 1] == b'j'
&& bytes[index + 2] == b's'
&& bytes[index + 3] == b'x')
{
// Exit if not.
continue;
}
loop {
let mut key_range = (index, index);
while index < bytes.len() && !matches!(bytes[index], b' ' | b'\t') {
index += 1;
}
key_range.1 = index;
// Skip whitespace.
while index < bytes.len() && matches!(bytes[index], b' ' | b'\t') {
index += 1;
}
let mut value_range = (index, index);
while index < bytes.len() && !matches!(bytes[index], b' ' | b'\t') {
index += 1;
}
value_range.1 = index;
let key = String::from_utf8_lossy(&bytes[key_range.0..key_range.1]);
let value = String::from_utf8_lossy(&bytes[value_range.0..value_range.1]);
// Handle the key/value.
match key.as_ref() {
"@jsxRuntime" => match value.as_ref() {
"automatic" => directives.runtime = Some(JsxRuntime::Automatic),
"classic" => directives.runtime = Some(JsxRuntime::Classic),
"" => {}
value => {
let start = bytepos_to_point(comment.span.lo, location);
return Err(prefix_error_with_point(
&format!(
"Runtime must be either `automatic` or `classic`, not {}",
value
),
start.as_ref(),
));
}
},
"@jsxImportSource" => {
match value.as_ref() {
"" => {}
value => {
// SWC sets runtime too, not sure if that’s great.
directives.runtime = Some(JsxRuntime::Automatic);
directives.import_source = Some(value.into());
}
}
}
"@jsxFrag" => match value.as_ref() {
"" => {}
value => directives.pragma_frag = Some(value.into()),
},
"@jsx" => match value.as_ref() {
"" => {}
value => directives.pragma = Some(value.into()),
},
"" => {
// No directive, stop looking for key/value pairs
// on this line.
break;
}
_ => {}
}
// Skip more whitespace.
while index < bytes.len() && matches!(bytes[index], b' ' | b'\t') {
index += 1;
}
}
}
}
Ok(directives)
}
/// Turn JSX text into a string.
fn jsx_text_to_value(value: &str) -> String {
let mut result = String::with_capacity(value.len());
// Replace tabs w/ spaces.
let value = value.replace('\t', " ");
let bytes = value.as_bytes();
let mut index = 0;
let mut start = 0;
while index < bytes.len() {
if !matches!(bytes[index], b'\r' | b'\n') {
index += 1;
continue;
}
// We have an eol, move back past whitespace.
let mut before = index;
while before > start && bytes[before - 1] == b' ' {
before -= 1;
}
if start != before {
if !result.is_empty() {
result.push(' ');
}
result.push_str(str::from_utf8(&bytes[start..before]).unwrap());
}
// Move past whitespace.
index += 1;
while index < bytes.len() && bytes[index] == b' ' {
index += 1;
}
start = index;
}
if start != bytes.len() {
// Without line endings, if it’s just whitespace, ignore it.
if result.is_empty() {
index = 0;
while index < bytes.len() && bytes[index] == b' ' {
index += 1;
}
if index == bytes.len() {
return result;
}
} else {
result.push(' ');
}
result.push_str(str::from_utf8(&bytes[start..]).unwrap());
}
result
}
#[cfg(test)]
mod tests {
use super::*;
use crate::hast_util_to_swc::Program;
use crate::swc::{flat_comments, serialize};
use pretty_assertions::assert_eq;
use swc_core::common::{
comments::SingleThreadedComments, source_map::Pos, BytePos, FileName, SourceFile,
};
use swc_core::ecma::ast::{
EsVersion, ExprStmt, JSXClosingElement, JSXElementName, JSXOpeningElement, JSXSpreadChild,
Module, Stmt,
};
use swc_core::ecma::parser::{parse_file_as_module, EsConfig, Syntax};
fn compile(value: &str, options: &Options) -> Result<String, String> {
let location = Location::new(value.as_bytes());
let mut errors = vec![];
let comments = SingleThreadedComments::default();
let result = parse_file_as_module(
&SourceFile::new(
FileName::Anon,
false,
FileName::Anon,
value.into(),
BytePos::from_usize(1),
),
Syntax::Es(EsConfig {
jsx: true,
..EsConfig::default()
}),
EsVersion::Es2022,
Some(&comments),
&mut errors,
);
match result {
Err(error) => Err(error.kind().msg().into()),
Ok(module) => {
let mut program = Program {
path: Some("example.jsx".into()),
module,
comments: flat_comments(comments),
};
swc_util_build_jsx(&mut program, options, Some(&location))?;
Ok(serialize(&mut program.module, Some(&program.comments)))
}
}
}
#[test]
fn small_default() -> Result<(), String> {
assert_eq!(
compile("let a = <b />", &Options::default())?,
"import { jsx as _jsx } from \"react/jsx-runtime\";\nlet a = _jsx(\"b\", {});\n",
"should compile JSX away"
);
Ok(())
}
#[test]
fn directive_runtime_automatic() -> Result<(), String> {
assert_eq!(
compile(
"/* @jsxRuntime automatic */\nlet a = <b />",
&Options::default()
)?,
"import { jsx as _jsx } from \"react/jsx-runtime\";\nlet a = _jsx(\"b\", {});\n",
"should support a `@jsxRuntime automatic` directive"
);
Ok(())
}
#[test]
fn directive_runtime_classic() -> Result<(), String> {
assert_eq!(
compile(
"/* @jsxRuntime classic */\nlet a = <b />",
&Options::default()
)?,
"let a = React.createElement(\"b\");\n",
"should support a `@jsxRuntime classic` directive"
);
Ok(())
}
#[test]
fn directive_runtime_empty() -> Result<(), String> {
assert_eq!(
compile("/* @jsxRuntime */\nlet a = <b />", &Options::default())?,
"import { jsx as _jsx } from \"react/jsx-runtime\";\nlet a = _jsx(\"b\", {});\n",
"should support an empty `@jsxRuntime` directive"
);
Ok(())
}
#[test]
fn directive_runtime_invalid() {
assert_eq!(
compile(
"/* @jsxRuntime unknown */\nlet a = <b />",
&Options::default()
)
.err()
.unwrap(),
"1:1: Runtime must be either `automatic` or `classic`, not unknown",
"should crash on a non-automatic, non-classic `@jsxRuntime` directive"
);
}
#[test]
fn directive_import_source() -> Result<(), String> {
assert_eq!(
compile(
"/* @jsxImportSource aaa */\nlet a = <b />",
&Options::default()
)?,
"import { jsx as _jsx } from \"aaa/jsx-runtime\";\nlet a = _jsx(\"b\", {});\n",
"should support a `@jsxImportSource` directive"
);
Ok(())
}
#[test]
fn directive_jsx() -> Result<(), String> {
assert_eq!(
compile(
"/* @jsxRuntime classic @jsx a */\nlet b = <c />",
&Options::default()
)?,
"let b = a(\"c\");\n",
"should support a `@jsx` directive"
);
Ok(())
}
#[test]
fn directive_jsx_empty() -> Result<(), String> {
assert_eq!(
compile(
"/* @jsxRuntime classic @jsx */\nlet a = <b />",
&Options::default()
)?,
"let a = React.createElement(\"b\");\n",
"should support an empty `@jsx` directive"
);
Ok(())
}
#[test]
fn directive_jsx_non_identifier() -> Result<(), String> {
assert_eq!(
compile(
"/* @jsxRuntime classic @jsx a.b-c.d! */\n<x />",
&Options::default()
)?,
"a[\"b-c\"][\"d!\"](\"x\");\n",
"should support an `@jsx` directive set to an invalid identifier"
);
Ok(())
}
#[test]
fn directive_jsx_frag() -> Result<(), String> {
assert_eq!(
compile(
"/* @jsxRuntime classic @jsxFrag a */\nlet b = <></>",
&Options::default()
)?,
"let b = React.createElement(a);\n",
"should support a `@jsxFrag` directive"
);
Ok(())
}
#[test]
fn directive_jsx_frag_empty() -> Result<(), String> {
assert_eq!(
compile(
"/* @jsxRuntime classic @jsxFrag */\nlet a = <></>",
&Options::default()
)?,
"let a = React.createElement(React.Fragment);\n",
"should support an empty `@jsxFrag` directive"
);
Ok(())
}
#[test]
fn directive_non_first_line() -> Result<(), String> {
assert_eq!(
compile(
"/*\n first line\n @jsxRuntime classic\n */\n<b />",
&Options::default()
)?,
"React.createElement(\"b\");\n",
"should support a directive on a non-first line"
);
Ok(())
}
#[test]
fn directive_asterisked_line() -> Result<(), String> {
assert_eq!(
compile(
"/*\n * first line\n * @jsxRuntime classic\n */\n<b />",
&Options::default()
)?,
"React.createElement(\"b\");\n",
"should support a directive on an asterisk’ed line"
);
Ok(())
}
#[test]
fn jsx_element_self_closing() -> Result<(), String> {
assert_eq!(
compile("<a />", &Options::default())?,
"import { jsx as _jsx } from \"react/jsx-runtime\";\n_jsx(\"a\", {});\n",
"should support a self-closing element"
);
Ok(())
}
#[test]
fn jsx_element_self_closing_classic() -> Result<(), String> {
assert_eq!(
compile("/* @jsxRuntime classic */\n<a />", &Options::default())?,
"React.createElement(\"a\");\n",
"should support a self-closing element (classic)"
);
Ok(())
}
#[test]
fn jsx_element_closed() -> Result<(), String> {
assert_eq!(
compile(
"<a>b</a>",
&Options::default()
)?,
"import { jsx as _jsx } from \"react/jsx-runtime\";\n_jsx(\"a\", {\n children: \"b\"\n});\n",
"should support a closed element"
);
Ok(())
}
#[test]
fn jsx_element_member_name() -> Result<(), String> {
assert_eq!(
compile("<a.b.c />", &Options::default())?,
"import { jsx as _jsx } from \"react/jsx-runtime\";\n_jsx(a.b.c, {});\n",
"should support an element with a member name"
);
Ok(())
}
#[test]
fn jsx_element_member_name_dashes() -> Result<(), String> {
assert_eq!(
compile("<a.b-c />", &Options::default())?,
"import { jsx as _jsx } from \"react/jsx-runtime\";\n_jsx(a[\"b-c\"], {});\n",
"should support an element with a member name and dashes"
);
Ok(())
}
#[test]
fn jsx_element_member_name_many() -> Result<(), String> {
assert_eq!(
compile("<a.b.c.d />", &Options::default())?,
"import { jsx as _jsx } from \"react/jsx-runtime\";\n_jsx(a.b.c.d, {});\n",
"should support an element with a member name of lots of names"
);
Ok(())
}
#[test]
fn jsx_element_namespace_name() -> Result<(), String> {
assert_eq!(
compile("<a:b />", &Options::default())?,
"import { jsx as _jsx } from \"react/jsx-runtime\";\n_jsx(\"a:b\", {});\n",
"should support an element with a namespace name"
);
Ok(())
}
#[test]
fn jsx_element_name_dashes() -> Result<(), String> {
assert_eq!(
compile("<a-b />", &Options::default())?,
"import { jsx as _jsx } from \"react/jsx-runtime\";\n_jsx(\"a-b\", {});\n",
"should support an element with a dash in the name"
);
Ok(())
}
#[test]
fn jsx_element_name_capital() -> Result<(), String> {
assert_eq!(
compile("<Abc />", &Options::default())?,
"import { jsx as _jsx } from \"react/jsx-runtime\";\n_jsx(Abc, {});\n",
"should support an element with a non-lowercase first character in the name"
);
Ok(())
}
#[test]
fn jsx_element_attribute_boolean() -> Result<(), String> {
assert_eq!(
compile("<a b />", &Options::default())?,
"import { jsx as _jsx } from \"react/jsx-runtime\";\n_jsx(\"a\", {\n b: true\n});\n",
"should support an element with a boolean attribute"
);
Ok(())
}
#[test]
fn jsx_element_attribute_boolean_classic() -> Result<(), String> {
assert_eq!(
compile("/* @jsxRuntime classic */\n<a b />", &Options::default())?,
"React.createElement(\"a\", {\n b: true\n});\n",
"should support an element with a boolean attribute (classic"
);
Ok(())
}
#[test]
fn jsx_element_attribute_name_namespace() -> Result<(), String> {
assert_eq!(
compile("<a b:c />", &Options::default())?,
"import { jsx as _jsx } from \"react/jsx-runtime\";\n_jsx(\"a\", {\n \"b:c\": true\n});\n",
"should support an element with colons in an attribute name"
);
Ok(())
}
#[test]
fn jsx_element_attribute_name_non_identifier() -> Result<(), String> {
assert_eq!(
compile("<a b-c />", &Options::default())?,
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | true |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/mdx_rs/src/swc.rs | crates/mdx_rs/src/swc.rs | //! Bridge between `markdown-rs` and SWC.
extern crate markdown;
use crate::swc_utils::{
create_span, prefix_error_with_point, DropContext, RewritePrefixContext, RewriteStopsContext,
};
use markdown::{mdast::Stop, Location, MdxExpressionKind, MdxSignal};
use std::rc::Rc;
use swc_core::common::{
comments::{Comment, Comments, SingleThreadedComments, SingleThreadedCommentsMap},
source_map::Pos,
sync::Lrc,
BytePos, FileName, FilePathMapping, SourceFile, SourceMap, Span, Spanned,
};
use swc_core::ecma::ast::{EsVersion, Expr, Module, PropOrSpread};
use swc_core::ecma::codegen::{text_writer::JsWriter, Emitter};
use swc_core::ecma::parser::{
error::Error as SwcError, parse_file_as_expr, parse_file_as_module, EsConfig, Syntax,
};
use swc_core::ecma::visit::VisitMutWith;
/// Lex ESM in MDX with SWC.
pub fn parse_esm(value: &str) -> MdxSignal {
let result = parse_esm_core(value);
match result {
Err((span, message)) => swc_error_to_signal(span, &message, value.len()),
Ok(_) => MdxSignal::Ok,
}
}
/// Parse ESM in MDX with SWC.
pub fn parse_esm_to_tree(
value: &str,
stops: &[Stop],
location: Option<&Location>,
) -> Result<Module, String> {
let result = parse_esm_core(value);
let mut rewrite_context = RewriteStopsContext { stops, location };
match result {
Err((span, reason)) => Err(swc_error_to_error(span, &reason, &rewrite_context)),
Ok(mut module) => {
module.visit_mut_with(&mut rewrite_context);
Ok(module)
}
}
}
/// Core to parse ESM.
fn parse_esm_core(value: &str) -> Result<Module, (Span, String)> {
let (file, syntax, version) = create_config(value.into());
let mut errors = vec![];
let result = parse_file_as_module(&file, syntax, version, None, &mut errors);
match result {
Err(error) => Err((
fix_span(error.span(), 1),
format!(
"Could not parse esm with swc: {}",
swc_error_to_string(&error)
),
)),
Ok(module) => {
if errors.is_empty() {
let mut index = 0;
while index < module.body.len() {
let node = &module.body[index];
if !node.is_module_decl() {
return Err((
fix_span(node.span(), 1),
"Unexpected statement in code: only import/exports are supported".into(),
));
}
index += 1;
}
Ok(module)
} else {
Err((
fix_span(errors[0].span(), 1),
format!(
"Could not parse esm with swc: {}",
swc_error_to_string(&errors[0])
),
))
}
}
}
}
fn parse_expression_core(
value: &str,
kind: &MdxExpressionKind,
) -> Result<Option<Box<Expr>>, (Span, String)> {
// Empty expressions are OK.
if matches!(kind, MdxExpressionKind::Expression) && whitespace_and_comments(0, value).is_ok() {
return Ok(None);
}
// For attribute expression, a spread is needed, for which we have to prefix
// and suffix the input.
// See `check_expression_ast` for how the AST is verified.
let (prefix, suffix) = if matches!(kind, MdxExpressionKind::AttributeExpression) {
("({", "})")
} else {
("", "")
};
let (file, syntax, version) = create_config(format!("{}{}{}", prefix, value, suffix));
let mut errors = vec![];
let result = parse_file_as_expr(&file, syntax, version, None, &mut errors);
match result {
Err(error) => Err((
fix_span(error.span(), prefix.len() + 1),
format!(
"Could not parse expression with swc: {}",
swc_error_to_string(&error)
),
)),
Ok(mut expr) => {
if errors.is_empty() {
let expression_end = expr.span().hi.to_usize() - 1;
if let Err((span, reason)) = whitespace_and_comments(expression_end, value) {
return Err((span, reason));
}
expr.visit_mut_with(&mut RewritePrefixContext {
prefix_len: prefix.len() as u32,
});
if matches!(kind, MdxExpressionKind::AttributeExpression) {
let expr_span = expr.span();
if let Expr::Paren(d) = *expr {
if let Expr::Object(mut obj) = *d.expr {
if obj.props.len() > 1 {
return Err((obj.span, "Unexpected extra content in spread (such as `{...x,y}`): only a single spread is supported (such as `{...x}`)".into()));
}
if let Some(PropOrSpread::Spread(d)) = obj.props.pop() {
return Ok(Some(d.expr));
}
}
};
return Err((
expr_span,
"Unexpected prop in spread (such as `{x}`): only a spread is supported (such as `{...x}`)".into(),
));
}
Ok(Some(expr))
} else {
Err((
fix_span(errors[0].span(), prefix.len() + 1),
format!(
"Could not parse expression with swc: {}",
swc_error_to_string(&errors[0])
),
))
}
}
}
}
/// Lex expressions in MDX with SWC.
#[allow(unused)]
pub fn parse_expression(value: &str, kind: &MdxExpressionKind) -> MdxSignal {
let result = parse_expression_core(value, kind);
match result {
Err((span, message)) => swc_error_to_signal(span, &message, value.len()),
Ok(_) => MdxSignal::Ok,
}
}
/// Parse ESM in MDX with SWC.
pub fn parse_expression_to_tree(
value: &str,
kind: &MdxExpressionKind,
stops: &[Stop],
location: Option<&Location>,
) -> Result<Option<Box<Expr>>, String> {
let result = parse_expression_core(value, kind);
let mut rewrite_context = RewriteStopsContext { stops, location };
match result {
Err((span, reason)) => Err(swc_error_to_error(span, &reason, &rewrite_context)),
Ok(expr_opt) => {
if let Some(mut expr) = expr_opt {
expr.visit_mut_with(&mut rewrite_context);
Ok(Some(expr))
} else {
Ok(None)
}
}
}
}
/// Serialize an SWC module.
pub fn serialize(module: &mut Module, comments: Option<&Vec<Comment>>) -> String {
let single_threaded_comments = SingleThreadedComments::default();
if let Some(comments) = comments {
for c in comments {
single_threaded_comments.add_leading(c.span.lo, c.clone());
}
}
module.visit_mut_with(&mut DropContext {});
let mut buf = vec![];
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
{
let mut emitter = Emitter {
cfg: swc_core::ecma::codegen::Config {
..Default::default()
},
cm: cm.clone(),
comments: Some(&single_threaded_comments),
wr: JsWriter::new(cm, "\n", &mut buf, None),
};
emitter.emit_module(module).unwrap();
}
String::from_utf8_lossy(&buf).into()
}
// To do: remove this attribute, use it somewhere.
#[allow(dead_code)]
/// Turn SWC comments into a flat vec.
pub fn flat_comments(single_threaded_comments: SingleThreadedComments) -> Vec<Comment> {
let raw_comments = single_threaded_comments.take_all();
let take = |list: SingleThreadedCommentsMap| {
Rc::try_unwrap(list)
.unwrap()
.into_inner()
.into_values()
.flatten()
.collect::<Vec<_>>()
};
let mut list = take(raw_comments.0);
list.append(&mut take(raw_comments.1));
list
}
/// Turn an SWC error into an `MdxSignal`.
///
/// * If the error happens at `value_len`, yields `MdxSignal::Eof`
/// * Else, yields `MdxSignal::Error`.
fn swc_error_to_signal(span: Span, reason: &str, value_len: usize) -> MdxSignal {
let error_end = span.hi.to_usize();
if error_end >= value_len {
MdxSignal::Eof(reason.into())
} else {
MdxSignal::Error(reason.into(), span.lo.to_usize())
}
}
/// Turn an SWC error into a flat error.
fn swc_error_to_error(span: Span, reason: &str, context: &RewriteStopsContext) -> String {
let point = context
.location
.and_then(|location| location.relative_to_point(context.stops, span.lo.to_usize()));
prefix_error_with_point(reason, point.as_ref())
}
/// Turn an SWC error into a string.
fn swc_error_to_string(error: &SwcError) -> String {
error.kind().msg().into()
}
/// Move past JavaScript whitespace (well, actually ASCII whitespace) and
/// comments.
///
/// This is needed because for expressions, we use an API that parses up to
/// a valid expression, but there may be more expressions after it, which we
/// don’t allow.
fn whitespace_and_comments(mut index: usize, value: &str) -> Result<(), (Span, String)> {
let bytes = value.as_bytes();
let len = bytes.len();
let mut in_multiline = false;
let mut in_line = false;
while index < len {
// In a multiline comment: `/* a */`.
if in_multiline {
if index + 1 < len && bytes[index] == b'*' && bytes[index + 1] == b'/' {
index += 1;
in_multiline = false;
}
}
// In a line comment: `// a`.
else if in_line {
if bytes[index] == b'\r' || bytes[index] == b'\n' {
in_line = false;
}
}
// Not in a comment, opening a multiline comment: `/* a */`.
else if index + 1 < len && bytes[index] == b'/' && bytes[index + 1] == b'*' {
index += 1;
in_multiline = true;
}
// Not in a comment, opening a line comment: `// a`.
else if index + 1 < len && bytes[index] == b'/' && bytes[index + 1] == b'/' {
index += 1;
in_line = true;
}
// Outside comment, whitespace.
else if bytes[index].is_ascii_whitespace() {
// Fine!
}
// Outside comment, not whitespace.
else {
return Err((
create_span(index as u32, value.len() as u32),
"Could not parse expression with swc: Unexpected content after expression".into(),
));
}
index += 1;
}
if in_multiline {
return Err((
create_span(index as u32, value.len() as u32), "Could not parse expression with swc: Unexpected unclosed multiline comment, expected closing: `*/`".into()));
}
if in_line {
// EOF instead of EOL is specifically not allowed, because that would
// mean the closing brace is on the commented-out line
return Err((create_span(index as u32, value.len() as u32), "Could not parse expression with swc: Unexpected unclosed line comment, expected line ending: `\\n`".into()));
}
Ok(())
}
/// Create configuration for SWC, shared between ESM and expressions.
///
/// This enables modern JavaScript (ES2022) + JSX.
fn create_config(source: String) -> (SourceFile, Syntax, EsVersion) {
(
// File.
SourceFile::new(
FileName::Anon,
false,
FileName::Anon,
source,
BytePos::from_usize(1),
),
// Syntax.
Syntax::Es(EsConfig {
jsx: true,
..EsConfig::default()
}),
// Version.
EsVersion::Es2022,
)
}
fn fix_span(mut span: Span, offset: usize) -> Span {
span.lo = BytePos::from_usize(span.lo.to_usize() - offset);
span.hi = BytePos::from_usize(span.hi.to_usize() - offset);
span
}
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | false |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/mdx_rs/src/lib.rs | crates/mdx_rs/src/lib.rs | //! Public API of `mdxjs-rs`.
//!
//! This module exposes primarily [`compile()`][].
//!
//! * [`compile()`][]
//! — turn MDX into JavaScript
#![deny(clippy::pedantic)]
#![allow(clippy::uninlined_format_args)]
#![allow(clippy::missing_panics_doc)]
#![allow(clippy::must_use_candidate)]
#![allow(clippy::too_many_lines)]
#![allow(clippy::struct_excessive_bools)]
#![allow(clippy::cast_possible_truncation)]
#![allow(clippy::cast_precision_loss)]
extern crate markdown;
mod configuration;
mod hast_util_to_swc;
mod mdast_util_to_hast;
mod mdx_plugin_recma_document;
mod mdx_plugin_recma_jsx_rewrite;
mod swc;
mod swc_util_build_jsx;
mod swc_utils;
use crate::{
hast_util_to_swc::hast_util_to_swc,
mdast_util_to_hast::mdast_util_to_hast,
mdx_plugin_recma_document::{mdx_plugin_recma_document, Options as DocumentOptions},
mdx_plugin_recma_jsx_rewrite::{mdx_plugin_recma_jsx_rewrite, Options as RewriteOptions},
swc::{parse_esm, serialize},
};
#[allow(clippy::single_component_path_imports)]
use hast;
use markdown::{to_mdast, Constructs, Location, ParseOptions};
use mdx_plugin_container::mdx_plugin_container;
use mdx_plugin_external_link::mdx_plugin_external_link;
use mdx_plugin_frontmatter::mdx_plugin_frontmatter;
use mdx_plugin_header_anchor::mdx_plugin_header_anchor;
use mdx_plugin_highlighter::mdx_plugin_highlighter;
use mdx_plugin_html::mdx_plugin_html;
use mdx_plugin_normalize_link::mdx_plugin_normalize_link;
use mdx_plugin_toc::{mdx_plugin_toc, TocItem, TocResult};
pub use crate::configuration::{MdxConstructs, MdxParseOptions, Options};
pub use crate::mdx_plugin_recma_document::JsxRuntime;
pub struct CompileResult {
pub code: String,
pub links: Vec<String>,
pub html: String,
pub title: String,
pub toc: Vec<TocItem>,
pub languages: Vec<String>,
pub frontmatter: String,
}
#[allow(clippy::case_sensitive_file_extension_comparisons)]
pub fn compile(value: &str, filepath: &str, development: bool, root: &str) -> CompileResult {
let is_mdx = filepath.ends_with(".mdx");
let parse_options = ParseOptions {
constructs: Constructs {
frontmatter: true,
// Enable GFM Grammar
gfm_autolink_literal: true,
gfm_label_start_footnote: true,
gfm_footnote_definition: true,
gfm_strikethrough: true,
gfm_table: true,
gfm_task_list_item: true,
// If is_mdx is true, use mdx constructs, or use markdown constructs
..if is_mdx {
Constructs::mdx()
} else {
Constructs::default()
}
},
gfm_strikethrough_single_tilde: false,
math_text_single_dollar: false,
mdx_esm_parse: Some(Box::new(parse_esm)),
mdx_expression_parse: None,
};
let document_options = DocumentOptions {
pragma: Some("React.createElement".to_string()),
pragma_frag: Some("React.Fragment".to_string()),
pragma_import_source: Some("react".to_string()),
jsx_import_source: Some("react".to_string()),
jsx_runtime: Some(JsxRuntime::Automatic),
};
let rewrite_options = RewriteOptions {
development,
provider_import_source: Some("@mdx-js/react".to_string()),
};
let location = Location::new(value.as_bytes());
let mut mdast = to_mdast(value, &parse_options).unwrap_or_else(|error| {
eprintln!("File: {:?}\nError: {:?}", filepath, error);
// Provide a default value or handle the error here
to_mdast("", &parse_options).unwrap()
});
let TocResult { toc, title } = mdx_plugin_toc(&mut mdast);
let frontmatter = mdx_plugin_frontmatter(&mut mdast);
let languages = mdx_plugin_highlighter(&mdast);
let mut hast = mdast_util_to_hast(&mdast);
mdx_plugin_header_anchor(&mut hast);
mdx_plugin_container(&mut hast);
mdx_plugin_external_link(&mut hast);
let links = mdx_plugin_normalize_link(&mut hast, root, filepath);
let html = mdx_plugin_html(&hast);
let mut program = hast_util_to_swc(&hast, Some(filepath.to_string()), Some(&location))
.unwrap_or_else(|error| {
eprintln!("File: {:?}\nError: {:?}", filepath, error);
hast_util_to_swc(
&hast::Node::Root(hast::Root {
children: vec![],
position: None,
}),
Some(filepath.to_string()),
Some(&location),
)
.unwrap()
});
mdx_plugin_recma_document(&mut program, &document_options, Some(&location)).unwrap_or_else(
|_| {
eprintln!("Failed to process file: {}", filepath);
},
);
mdx_plugin_recma_jsx_rewrite(&mut program, &rewrite_options, Some(&location));
// We keep the origin jsx here.
// swc_util_build_jsx(&mut program, &build_options, Some(&location)).unwrap();
let code = serialize(&mut program.module, Some(&program.comments));
CompileResult {
code,
links,
html,
title,
toc,
languages,
frontmatter,
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_collect_title_in_mdast() {
compile("## Container Title {#custom-title}", "", true, "");
}
}
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | false |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/mdx_rs/src/configuration.rs | crates/mdx_rs/src/configuration.rs | //! Configuration.
use crate::mdx_plugin_recma_document::JsxRuntime;
/// Like `Constructs` from `markdown-rs`.
///
/// You can’t use:
///
/// * `autolink`
/// * `code_indented`
/// * `html_flow`
/// * `html_text`
/// * `mdx_esm`
/// * `mdx_expression_flow`
/// * `mdx_expression_text`
/// * `mdx_jsx_flow`
/// * `mdx_jsx_text`
///
// To do: link all docs when `markdown-rs` is stable.
#[derive(Clone, Debug, Eq, PartialEq)]
#[cfg_attr(feature = "serializable", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "serializable", serde(rename_all = "camelCase", default))]
pub struct MdxConstructs {
pub attention: bool,
pub block_quote: bool,
pub character_escape: bool,
pub character_reference: bool,
pub code_fenced: bool,
pub code_text: bool,
pub definition: bool,
pub frontmatter: bool,
pub gfm_autolink_literal: bool,
pub gfm_footnote_definition: bool,
pub gfm_label_start_footnote: bool,
pub gfm_strikethrough: bool,
pub gfm_table: bool,
pub gfm_task_list_item: bool,
pub hard_break_escape: bool,
pub hard_break_trailing: bool,
pub heading_atx: bool,
pub heading_setext: bool,
pub label_start_image: bool,
pub label_start_link: bool,
pub label_end: bool,
pub list_item: bool,
pub math_flow: bool,
pub math_text: bool,
pub thematic_break: bool,
}
impl Default for MdxConstructs {
/// MDX with `CommonMark`.
///
/// `CommonMark` is a relatively strong specification of how markdown
/// works.
/// Most markdown parsers try to follow it.
///
/// For more information, see the `CommonMark` specification:
/// <https://spec.commonmark.org>.
fn default() -> Self {
Self {
attention: true,
block_quote: true,
character_escape: true,
character_reference: true,
code_fenced: true,
code_text: true,
definition: true,
frontmatter: false,
gfm_autolink_literal: false,
gfm_label_start_footnote: false,
gfm_footnote_definition: false,
gfm_strikethrough: false,
gfm_table: false,
gfm_task_list_item: false,
hard_break_escape: true,
hard_break_trailing: true,
heading_atx: true,
heading_setext: true,
label_start_image: true,
label_start_link: true,
label_end: true,
list_item: true,
math_flow: false,
math_text: false,
thematic_break: true,
}
}
}
impl MdxConstructs {
/// MDX with GFM.
///
/// GFM stands for **GitHub flavored markdown**.
/// GFM extends `CommonMark` and adds support for autolink literals,
/// footnotes, strikethrough, tables, and tasklists.
///
/// For more information, see the GFM specification:
/// <https://github.github.com/gfm/>.
pub fn gfm() -> Self {
Self {
gfm_autolink_literal: true,
gfm_footnote_definition: true,
gfm_label_start_footnote: true,
gfm_strikethrough: true,
gfm_table: true,
gfm_task_list_item: true,
..Self::default()
}
}
}
// To do: link all docs when `markdown-rs` is stable.
/// Like `ParseOptions` from `markdown-rs`.
///
/// The constructs you can pass are limited.
///
/// Additionally, you can’t use:
///
/// * `mdx_expression_parse`
/// * `mdx_esm_parse`
#[derive(Clone, Debug, Eq, PartialEq)]
#[cfg_attr(feature = "serializable", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "serializable", serde(rename_all = "camelCase", default))]
pub struct MdxParseOptions {
pub constructs: MdxConstructs,
pub gfm_strikethrough_single_tilde: bool,
pub math_text_single_dollar: bool,
}
impl Default for MdxParseOptions {
/// MDX with `CommonMark` defaults.
fn default() -> Self {
Self {
constructs: MdxConstructs::default(),
gfm_strikethrough_single_tilde: true,
math_text_single_dollar: true,
}
}
}
impl MdxParseOptions {
/// MDX with GFM.
///
/// GFM stands for GitHub flavored markdown.
/// GFM extends `CommonMark` and adds support for autolink literals,
/// footnotes, strikethrough, tables, and tasklists.
///
/// For more information, see the GFM specification:
/// <https://github.github.com/gfm/>
pub fn gfm() -> Self {
Self {
constructs: MdxConstructs::gfm(),
..Self::default()
}
}
}
/// Configuration (optional).
#[derive(Clone, Debug)]
#[cfg_attr(feature = "serializable", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "serializable", serde(rename_all = "camelCase", default))]
pub struct Options {
/// Configuration that describes how to parse from markdown.
pub parse: MdxParseOptions,
/// Whether to add extra information to error messages in generated code
/// (default: `false`).
///
/// When in the automatic JSX runtime, this also enabled its development
/// functionality.
pub development: bool,
// To do: some alternative to generate source maps.
// SourceMapGenerator
/// Place to import a provider from (default: `None`, example:
/// `Some("@mdx-js/react").into()`).
///
/// Useful for runtimes that support context (React, Preact).
/// The provider must export a `useMDXComponents`, which is called to
/// access an object of components.
pub provider_import_source: Option<String>,
/// Whether to keep JSX (default: `false`).
///
/// The default is to compile JSX away so that the resulting file is
/// immediately runnable.
pub jsx: bool,
/// JSX runtime to use (default: `Some(JsxRuntime::Automatic)`).
///
/// The classic runtime compiles to calls such as `h('p')`, the automatic
/// runtime compiles to
/// `import _jsx from '$importSource/jsx-runtime'\n_jsx('p')`.
pub jsx_runtime: Option<JsxRuntime>,
/// Place to import automatic JSX runtimes from (`Option<String>`, default:
/// `Some("react".into())`).
///
/// When in the automatic runtime, this is used to define an import for
/// `_Fragment`, `_jsx`, and `_jsxs`.
pub jsx_import_source: Option<String>,
/// Pragma for JSX (default: `Some("React.createElement".into())`).
///
/// When in the classic runtime, this is used as an identifier for function
/// calls: `<x />` to `React.createElement('x')`.
///
/// You should most probably define `pragma_frag` and `pragma_import_source`
/// too when changing this.
pub pragma: Option<String>,
/// Pragma for JSX fragments (default: `Some("React.Fragment".into())`).
///
/// When in the classic runtime, this is used as an identifier for
/// fragments: `<>` to `React.createElement(React.Fragment)`.
///
/// You should most probably define `pragma` and `pragma_import_source`
/// too when changing this.
pub pragma_frag: Option<String>,
/// Where to import the identifier of `pragma` from (default:
/// `Some("react".into())`).
///
/// When in the classic runtime, this is used to import the `pragma`
/// function.
/// To illustrate with an example: when `pragma` is `"a.b"` and
/// `pragma_import_source` is `"c"`, the following will be generated:
/// `import a from 'c'`.
pub pragma_import_source: Option<String>,
// New:
/// File path to the source file (example:
/// `Some("path/to/example.mdx".into())`).
///
/// Used when `development: true` to improve error messages.
pub filepath: Option<String>,
}
impl Default for Options {
/// Default options to use the automatic JSX runtime with React
/// and handle MDX according to `CommonMark`.
fn default() -> Self {
Self {
parse: MdxParseOptions::default(),
development: false,
provider_import_source: None,
jsx: false,
jsx_runtime: Some(JsxRuntime::default()),
jsx_import_source: None,
pragma: None,
pragma_frag: None,
pragma_import_source: None,
filepath: None,
}
}
}
impl Options {
/// MDX with GFM.
///
/// GFM stands for GitHub flavored markdown.
/// GFM extends `CommonMark` and adds support for autolink literals,
/// footnotes, strikethrough, tables, and tasklists.
/// On the compilation side, GFM turns on the GFM tag filter.
/// The tagfilter is useless, but it’s included here for consistency.
///
/// For more information, see the GFM specification:
/// <https://github.github.com/gfm/>
pub fn gfm() -> Self {
Self {
parse: MdxParseOptions::gfm(),
..Self::default()
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_constructs() {
let constructs = MdxConstructs::default();
assert!(constructs.attention, "should default to `CommonMark` (1)");
assert!(
!constructs.gfm_autolink_literal,
"should default to `CommonMark` (2)"
);
assert!(
!constructs.frontmatter,
"should default to `CommonMark` (3)"
);
let constructs = MdxConstructs::gfm();
assert!(constructs.attention, "should support `gfm` shortcut (1)");
assert!(
constructs.gfm_autolink_literal,
"should support `gfm` shortcut (2)"
);
assert!(!constructs.frontmatter, "should support `gfm` shortcut (3)");
}
#[test]
fn test_parse_options() {
let options = MdxParseOptions::default();
assert!(
options.constructs.attention,
"should default to `CommonMark` (1)"
);
assert!(
!options.constructs.gfm_autolink_literal,
"should default to `CommonMark` (2)"
);
assert!(
!options.constructs.frontmatter,
"should default to `CommonMark` (3)"
);
let options = MdxParseOptions::gfm();
assert!(
options.constructs.attention,
"should support `gfm` shortcut (1)"
);
assert!(
options.constructs.gfm_autolink_literal,
"should support `gfm` shortcut (2)"
);
assert!(
!options.constructs.frontmatter,
"should support `gfm` shortcut (3)"
);
}
#[test]
fn test_options() {
let options = Options::default();
assert!(
options.parse.constructs.attention,
"should default to `CommonMark` (1)"
);
assert!(
!options.parse.constructs.gfm_autolink_literal,
"should default to `CommonMark` (2)"
);
assert!(
!options.parse.constructs.frontmatter,
"should default to `CommonMark` (3)"
);
let options = Options::gfm();
assert!(
options.parse.constructs.attention,
"should support `gfm` shortcut (1)"
);
assert!(
options.parse.constructs.gfm_autolink_literal,
"should support `gfm` shortcut (2)"
);
assert!(
!options.parse.constructs.frontmatter,
"should support `gfm` shortcut (3)"
);
}
}
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | false |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/mdx_rs/src/mdx_plugin_recma_document.rs | crates/mdx_rs/src/mdx_plugin_recma_document.rs | //! Turn a JavaScript AST, coming from MD(X), into a component.
//!
//! Port of <https://github.com/mdx-js/mdx/blob/main/packages/mdx/lib/plugin/recma-document.js>,
//! by the same author.
use crate::hast_util_to_swc::Program;
use crate::swc_utils::{
bytepos_to_point, create_call_expression, create_ident, create_ident_expression,
create_null_expression, create_object_expression, create_str, position_opt_to_string,
prefix_error_with_point, span_to_position,
};
use markdown::{
unist::{Point, Position},
Location,
};
use swc_core::ecma::ast::{
AssignPat, BindingIdent, BlockStmt, Callee, CondExpr, Decl, DefaultDecl, ExportDefaultExpr,
ExportSpecifier, Expr, ExprOrSpread, FnDecl, Function, ImportDecl, ImportDefaultSpecifier,
ImportNamedSpecifier, ImportSpecifier, JSXAttrOrSpread, JSXClosingElement, JSXElement,
JSXElementChild, JSXElementName, JSXOpeningElement, ModuleDecl, ModuleExportName, ModuleItem,
Param, Pat, ReturnStmt, SpreadElement, Stmt, VarDecl, VarDeclKind, VarDeclarator,
};
/// JSX runtimes (default: `JsxRuntime::Automatic`).
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)]
#[cfg_attr(feature = "serializable", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "serializable", serde(rename_all = "camelCase"))]
pub enum JsxRuntime {
/// Automatic runtime.
///
/// With the automatic runtime, some module is expected to exist somewhere.
/// That modules is expected to expose a certain API.
/// The compiler adds an import of that module and compiles JSX away to
/// function calls that use that API.
#[default]
Automatic,
/// Classic runtime.
///
/// With the classic runtime, you define two values yourself in each file,
/// which are expected to work a certain way.
/// The compiler compiles JSX away to function calls using those two values.
Classic,
}
/// Configuration.
#[derive(Debug, PartialEq, Eq)]
pub struct Options {
/// Pragma for JSX (used in classic runtime).
///
/// Default: `React.createElement`.
pub pragma: Option<String>,
/// Pragma for JSX fragments (used in classic runtime).
///
/// Default: `React.Fragment`.
pub pragma_frag: Option<String>,
/// Where to import the identifier of `pragma` from (used in classic runtime).
///
/// Default: `react`.
pub pragma_import_source: Option<String>,
/// Place to import automatic JSX runtimes from (used in automatic runtime).
///
/// Default: `react`.
pub jsx_import_source: Option<String>,
/// JSX runtime to use.
///
/// Default: `automatic`.
pub jsx_runtime: Option<JsxRuntime>,
}
impl Default for Options {
/// Use the automatic JSX runtime with React.
fn default() -> Self {
Self {
pragma: None,
pragma_frag: None,
pragma_import_source: None,
jsx_import_source: None,
jsx_runtime: Some(JsxRuntime::default()),
}
}
}
/// Wrap the SWC ES AST nodes coming from hast into a whole document.
pub fn mdx_plugin_recma_document(
program: &mut Program,
options: &Options,
location: Option<&Location>,
) -> Result<(), String> {
// New body children.
let mut replacements = vec![];
// Inject JSX configuration comment.
if let Some(runtime) = &options.jsx_runtime {
let mut pragmas = vec![];
let react = &"react".into();
let create_element = &"React.createElement".into();
let fragment = &"React.Fragment".into();
if *runtime == JsxRuntime::Automatic {
pragmas.push("@jsxRuntime automatic".into());
pragmas.push(format!(
"@jsxImportSource {}",
if let Some(jsx_import_source) = &options.jsx_import_source {
jsx_import_source
} else {
react
}
));
} else {
pragmas.push("@jsxRuntime classic".into());
pragmas.push(format!(
"@jsx {}",
if let Some(pragma) = &options.pragma {
pragma
} else {
create_element
}
));
pragmas.push(format!(
"@jsxFrag {}",
if let Some(pragma_frag) = &options.pragma_frag {
pragma_frag
} else {
fragment
}
));
}
if !pragmas.is_empty() {
program.comments.insert(
0,
swc_core::common::comments::Comment {
kind: swc_core::common::comments::CommentKind::Block,
text: pragmas.join(" ").into(),
span: swc_core::common::DUMMY_SP,
},
);
}
}
// Inject an import in the classic runtime for the pragma (and presumably,
// fragment).
if options.jsx_runtime == Some(JsxRuntime::Classic) {
let pragma = if let Some(pragma) = &options.pragma {
pragma
} else {
"React"
};
let sym = pragma.split('.').next().expect("first item always exists");
replacements.push(ModuleItem::ModuleDecl(ModuleDecl::Import(ImportDecl {
specifiers: vec![ImportSpecifier::Default(ImportDefaultSpecifier {
local: create_ident(sym),
span: swc_core::common::DUMMY_SP,
})],
src: Box::new(create_str(
if let Some(source) = &options.pragma_import_source {
source
} else {
"react"
},
)),
type_only: false,
asserts: None,
span: swc_core::common::DUMMY_SP,
})));
}
// Find the `export default`, the JSX expression, and leave the rest as it
// is.
let mut input = program.module.body.split_off(0);
input.reverse();
let mut layout = false;
let mut layout_position = None;
let mut content = false;
while let Some(module_item) = input.pop() {
match module_item {
// ```js
// export default props => <>{props.children}</>
// ```
//
// Treat it as an inline layout declaration.
//
// In estree, the below two are the same node (`ExportDefault`).
ModuleItem::ModuleDecl(ModuleDecl::ExportDefaultDecl(decl)) => {
err_for_double_layout(
layout,
layout_position.as_ref(),
bytepos_to_point(decl.span.lo, location).as_ref(),
)?;
layout = true;
layout_position = span_to_position(&decl.span, location);
match decl.decl {
DefaultDecl::Class(cls) => {
replacements.push(create_layout_decl(Expr::Class(cls)));
}
DefaultDecl::Fn(func) => {
replacements.push(create_layout_decl(Expr::Fn(func)));
}
DefaultDecl::TsInterfaceDecl(_) => {
return Err(
prefix_error_with_point(
"Cannot use TypeScript interface declarations as default export in MDX files. The default export is reserved for a layout, which must be a component",
bytepos_to_point(decl.span.lo, location).as_ref()
)
);
}
}
}
ModuleItem::ModuleDecl(ModuleDecl::ExportDefaultExpr(expr)) => {
err_for_double_layout(
layout,
layout_position.as_ref(),
bytepos_to_point(expr.span.lo, location).as_ref(),
)?;
layout = true;
layout_position = span_to_position(&expr.span, location);
replacements.push(create_layout_decl(*expr.expr));
}
// ```js
// export {a, b as c} from 'd'
// export {a, b as c}
// ```
ModuleItem::ModuleDecl(ModuleDecl::ExportNamed(mut named_export)) => {
let mut index = 0;
let mut id = None;
while index < named_export.specifiers.len() {
let mut take = false;
// Note: the `ExportSpecifier::Default`
// branch of this looks interesting, but as far as I
// understand it *is not* valid ES.
// `export a from 'b'` is a syntax error, even in SWC.
if let ExportSpecifier::Named(named) = &named_export.specifiers[index] {
if let Some(ModuleExportName::Ident(ident)) = &named.exported {
if &ident.sym == "default" {
// For some reason the AST supports strings
// instead of identifiers.
// Looks like some TC39 proposal. Ignore for now
// and only do things if this is an ID.
if let ModuleExportName::Ident(ident) = &named.orig {
err_for_double_layout(
layout,
layout_position.as_ref(),
bytepos_to_point(ident.span.lo, location).as_ref(),
)?;
layout = true;
layout_position = span_to_position(&ident.span, location);
take = true;
id = Some(ident.clone());
}
}
}
}
if take {
named_export.specifiers.remove(index);
} else {
index += 1;
}
}
if let Some(id) = id {
let source = named_export.src.clone();
// If there was just a default export, we can drop the original node.
if !named_export.specifiers.is_empty() {
// Pass through.
replacements.push(ModuleItem::ModuleDecl(ModuleDecl::ExportNamed(
named_export,
)));
}
// It’s an `export {x} from 'y'`, so generate an import.
if let Some(source) = source {
replacements.push(ModuleItem::ModuleDecl(ModuleDecl::Import(ImportDecl {
specifiers: vec![ImportSpecifier::Named(ImportNamedSpecifier {
local: create_ident("MDXLayout"),
imported: Some(ModuleExportName::Ident(id)),
span: swc_core::common::DUMMY_SP,
is_type_only: false,
})],
src: source,
type_only: false,
asserts: None,
span: swc_core::common::DUMMY_SP,
})));
}
// It’s an `export {x}`, so generate a variable declaration.
else {
replacements.push(create_layout_decl(create_ident_expression(&id.sym)));
}
} else {
// Pass through.
replacements.push(ModuleItem::ModuleDecl(ModuleDecl::ExportNamed(
named_export,
)));
}
}
ModuleItem::ModuleDecl(ModuleDecl::Import(x)) => {
// Pass through.
replacements.push(ModuleItem::ModuleDecl(ModuleDecl::Import(x)));
}
ModuleItem::ModuleDecl(
ModuleDecl::ExportDecl(_)
| ModuleDecl::ExportAll(_)
| ModuleDecl::TsImportEquals(_)
| ModuleDecl::TsExportAssignment(_)
| ModuleDecl::TsNamespaceExport(_),
) => {
// Pass through.
replacements.push(module_item);
}
ModuleItem::Stmt(Stmt::Expr(expr_stmt)) => {
match *expr_stmt.expr {
Expr::JSXElement(elem) => {
content = true;
replacements.append(&mut create_mdx_content(
Some(Expr::JSXElement(elem)),
layout,
));
}
Expr::JSXFragment(mut frag) => {
// Unwrap if possible.
if frag.children.len() == 1 {
let item = frag.children.pop().unwrap();
if let JSXElementChild::JSXElement(elem) = item {
content = true;
replacements.append(&mut create_mdx_content(
Some(Expr::JSXElement(elem)),
layout,
));
continue;
}
frag.children.push(item);
}
content = true;
replacements.append(&mut create_mdx_content(
Some(Expr::JSXFragment(frag)),
layout,
));
}
_ => {
// Pass through.
replacements.push(ModuleItem::Stmt(Stmt::Expr(expr_stmt)));
}
}
}
ModuleItem::Stmt(stmt) => {
replacements.push(ModuleItem::Stmt(stmt));
}
}
}
// Generate an empty component.
if !content {
replacements.append(&mut create_mdx_content(None, layout));
}
// ```jsx
// export default MDXContent
// ```
replacements.push(ModuleItem::ModuleDecl(ModuleDecl::ExportDefaultExpr(
ExportDefaultExpr {
expr: Box::new(create_ident_expression("MDXContent")),
span: swc_core::common::DUMMY_SP,
},
)));
program.module.body = replacements;
Ok(())
}
/// Create a content component.
fn create_mdx_content(expr: Option<Expr>, has_internal_layout: bool) -> Vec<ModuleItem> {
// ```jsx
// <MDXLayout {...props}>xxx</MDXLayout>
// ```
let mut result = Expr::JSXElement(Box::new(JSXElement {
opening: JSXOpeningElement {
name: JSXElementName::Ident(create_ident("MDXLayout")),
attrs: vec![JSXAttrOrSpread::SpreadElement(SpreadElement {
dot3_token: swc_core::common::DUMMY_SP,
expr: Box::new(create_ident_expression("props")),
})],
self_closing: false,
type_args: None,
span: swc_core::common::DUMMY_SP,
},
closing: Some(JSXClosingElement {
name: JSXElementName::Ident(create_ident("MDXLayout")),
span: swc_core::common::DUMMY_SP,
}),
// ```jsx
// <_createMdxContent {...props} />
// ```
children: vec![JSXElementChild::JSXElement(Box::new(JSXElement {
opening: JSXOpeningElement {
name: JSXElementName::Ident(create_ident("_createMdxContent")),
attrs: vec![JSXAttrOrSpread::SpreadElement(SpreadElement {
dot3_token: swc_core::common::DUMMY_SP,
expr: Box::new(create_ident_expression("props")),
})],
self_closing: true,
type_args: None,
span: swc_core::common::DUMMY_SP,
},
closing: None,
children: vec![],
span: swc_core::common::DUMMY_SP,
}))],
span: swc_core::common::DUMMY_SP,
}));
if !has_internal_layout {
// ```jsx
// MDXLayout ? <MDXLayout>xxx</MDXLayout> : _createMdxContent(props)
// ```
result = Expr::Cond(CondExpr {
test: Box::new(create_ident_expression("MDXLayout")),
cons: Box::new(result),
alt: Box::new(create_call_expression(
Callee::Expr(Box::new(create_ident_expression("_createMdxContent"))),
vec![ExprOrSpread {
spread: None,
expr: Box::new(create_ident_expression("props")),
}],
)),
span: swc_core::common::DUMMY_SP,
});
}
// ```jsx
// function _createMdxContent(props) {
// return xxx
// }
// ```
let create_mdx_content = ModuleItem::Stmt(Stmt::Decl(Decl::Fn(FnDecl {
ident: create_ident("_createMdxContent"),
declare: false,
function: Box::new(Function {
params: vec![Param {
pat: Pat::Ident(BindingIdent {
id: create_ident("props"),
type_ann: None,
}),
decorators: vec![],
span: swc_core::common::DUMMY_SP,
}],
decorators: vec![],
body: Some(BlockStmt {
stmts: vec![Stmt::Return(ReturnStmt {
arg: Some(Box::new(expr.unwrap_or_else(create_null_expression))),
span: swc_core::common::DUMMY_SP,
})],
span: swc_core::common::DUMMY_SP,
}),
is_generator: false,
is_async: false,
type_params: None,
return_type: None,
span: swc_core::common::DUMMY_SP,
}),
})));
// ```jsx
// function MDXContent(props = {}) {
// return <MDXLayout>xxx</MDXLayout>
// }
// ```
let mdx_content = ModuleItem::Stmt(Stmt::Decl(Decl::Fn(FnDecl {
ident: create_ident("MDXContent"),
declare: false,
function: Box::new(Function {
params: vec![Param {
pat: Pat::Assign(AssignPat {
left: Box::new(Pat::Ident(BindingIdent {
id: create_ident("props"),
type_ann: None,
})),
right: Box::new(create_object_expression(vec![])),
span: swc_core::common::DUMMY_SP,
}),
decorators: vec![],
span: swc_core::common::DUMMY_SP,
}],
decorators: vec![],
body: Some(BlockStmt {
stmts: vec![Stmt::Return(ReturnStmt {
arg: Some(Box::new(result)),
span: swc_core::common::DUMMY_SP,
})],
span: swc_core::common::DUMMY_SP,
}),
is_generator: false,
is_async: false,
type_params: None,
return_type: None,
span: swc_core::common::DUMMY_SP,
}),
})));
vec![create_mdx_content, mdx_content]
}
/// Create a layout, inside the document.
fn create_layout_decl(expr: Expr) -> ModuleItem {
// ```jsx
// const MDXLayout = xxx
// ```
ModuleItem::Stmt(Stmt::Decl(Decl::Var(Box::new(VarDecl {
kind: VarDeclKind::Const,
declare: false,
decls: vec![VarDeclarator {
name: Pat::Ident(BindingIdent {
id: create_ident("MDXLayout"),
type_ann: None,
}),
init: Some(Box::new(expr)),
span: swc_core::common::DUMMY_SP,
definite: false,
}],
span: swc_core::common::DUMMY_SP,
}))))
}
/// Create an error about multiple layouts.
fn err_for_double_layout(
layout: bool,
previous: Option<&Position>,
at: Option<&Point>,
) -> Result<(), String> {
if layout {
Err(prefix_error_with_point(
&format!(
"Cannot specify multiple layouts (previous: {})",
position_opt_to_string(previous)
),
at,
))
} else {
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::hast_util_to_swc::hast_util_to_swc;
use crate::mdast_util_to_hast::mdast_util_to_hast;
use crate::mdx_plugin_recma_document::{mdx_plugin_recma_document, Options as DocumentOptions};
use crate::swc::{parse_esm, parse_expression, serialize};
use crate::swc_utils::create_bool_expression;
use markdown::{to_mdast, ParseOptions};
use pretty_assertions::assert_eq;
use swc_core::ecma::ast::{
EmptyStmt, ExportDefaultDecl, ExprStmt, JSXClosingFragment, JSXFragment, JSXOpeningFragment,
JSXText, Module, TsInterfaceBody, TsInterfaceDecl, WhileStmt,
};
fn compile(value: &str) -> Result<String, String> {
let location = Location::new(value.as_bytes());
let mdast = to_mdast(
value,
&ParseOptions {
mdx_esm_parse: Some(Box::new(parse_esm)),
mdx_expression_parse: Some(Box::new(parse_expression)),
..ParseOptions::mdx()
},
)?;
let hast = mdast_util_to_hast(&mdast);
let mut program = hast_util_to_swc(&hast, None, Some(&location))?;
mdx_plugin_recma_document(&mut program, &DocumentOptions::default(), Some(&location))?;
Ok(serialize(&mut program.module, Some(&program.comments)))
}
#[test]
fn small() -> Result<(), String> {
assert_eq!(
compile("# hi\n\nAlpha *bravo* **charlie**.")?,
"function _createMdxContent(props) {
return <><h1>{\"hi\"}</h1>{\"\\n\"}<p>{\"Alpha \"}<em>{\"bravo\"}</em>{\" \"}<strong>{\"charlie\"}</strong>{\".\"}</p></>;
}
function MDXContent(props = {}) {
return MDXLayout ? <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout> : _createMdxContent(props);
}
export default MDXContent;
",
"should support a small program",
);
Ok(())
}
#[test]
fn import() -> Result<(), String> {
assert_eq!(
compile("import a from 'b'\n\n# {a}")?,
"import a from 'b';
function _createMdxContent(props) {
return <h1>{a}</h1>;
}
function MDXContent(props = {}) {
return MDXLayout ? <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout> : _createMdxContent(props);
}
export default MDXContent;
",
"should support an import",
);
Ok(())
}
#[test]
fn export() -> Result<(), String> {
assert_eq!(
compile("export * from 'a'\n\n# b")?,
"export * from 'a';
function _createMdxContent(props) {
return <h1>{\"b\"}</h1>;
}
function MDXContent(props = {}) {
return MDXLayout ? <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout> : _createMdxContent(props);
}
export default MDXContent;
",
"should support an export all",
);
assert_eq!(
compile("export function a() {}")?,
"export function a() {}
function _createMdxContent(props) {
return <></>;
}
function MDXContent(props = {}) {
return MDXLayout ? <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout> : _createMdxContent(props);
}
export default MDXContent;
",
"should support an export declaration",
);
assert_eq!(
compile("export class A {}")?,
"export class A {
}
function _createMdxContent(props) {
return <></>;
}
function MDXContent(props = {}) {
return MDXLayout ? <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout> : _createMdxContent(props);
}
export default MDXContent;
",
"should support an export class",
);
Ok(())
}
#[test]
fn export_default() -> Result<(), String> {
assert_eq!(
compile("export default a")?,
"const MDXLayout = a;
function _createMdxContent(props) {
return <></>;
}
function MDXContent(props = {}) {
return <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout>;
}
export default MDXContent;
",
"should support an export default expression",
);
assert_eq!(
compile("export default function () {}")?,
"const MDXLayout = function() {};
function _createMdxContent(props) {
return <></>;
}
function MDXContent(props = {}) {
return <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout>;
}
export default MDXContent;
",
"should support an export default declaration",
);
assert_eq!(
compile("export default class A {}")?,
"const MDXLayout = class A {
};
function _createMdxContent(props) {
return <></>;
}
function MDXContent(props = {}) {
return <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout>;
}
export default MDXContent;
",
"should support an export default class",
);
Ok(())
}
#[test]
fn named_exports() -> Result<(), String> {
assert_eq!(
compile("export {a, b as default}")?,
"export { a };
const MDXLayout = b;
function _createMdxContent(props) {
return <></>;
}
function MDXContent(props = {}) {
return <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout>;
}
export default MDXContent;
",
"should support a named export w/o source, w/ a default specifier",
);
assert_eq!(
compile("export {a}")?,
"export { a };
function _createMdxContent(props) {
return <></>;
}
function MDXContent(props = {}) {
return MDXLayout ? <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout> : _createMdxContent(props);
}
export default MDXContent;
",
"should support a named export w/o source, w/o a default specifier",
);
assert_eq!(
compile("export {}")?,
"export { };
function _createMdxContent(props) {
return <></>;
}
function MDXContent(props = {}) {
return MDXLayout ? <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout> : _createMdxContent(props);
}
export default MDXContent;
",
"should support a named export w/o source, w/o a specifiers",
);
assert_eq!(
compile("export {a, b as default} from 'c'")?,
"export { a } from 'c';
import { b as MDXLayout } from 'c';
function _createMdxContent(props) {
return <></>;
}
function MDXContent(props = {}) {
return <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout>;
}
export default MDXContent;
",
"should support a named export w/ source, w/ a default specifier",
);
assert_eq!(
compile("export {a} from 'b'")?,
"export { a } from 'b';
function _createMdxContent(props) {
return <></>;
}
function MDXContent(props = {}) {
return MDXLayout ? <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout> : _createMdxContent(props);
}
export default MDXContent;
",
"should support a named export w/ source, w/o a default specifier",
);
assert_eq!(
compile("export {} from 'a'")?,
"export { } from 'a';
function _createMdxContent(props) {
return <></>;
}
function MDXContent(props = {}) {
return MDXLayout ? <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout> : _createMdxContent(props);
}
export default MDXContent;
",
"should support a named export w/ source, w/o a specifiers",
);
Ok(())
}
#[test]
fn multiple_layouts() {
assert_eq!(
compile("export default a = 1\n\nexport default b = 2")
.err()
.unwrap(),
"3:1: Cannot specify multiple layouts (previous: 1:1-1:21)",
"should crash on multiple layouts"
);
}
#[test]
fn ts_default_interface_declaration() {
assert_eq!(
mdx_plugin_recma_document(
&mut Program {
path: None,
comments: vec![],
module: Module {
span: swc_core::common::DUMMY_SP,
shebang: None,
body: vec![ModuleItem::ModuleDecl(
ModuleDecl::ExportDefaultDecl(
ExportDefaultDecl {
span: swc_core::common::DUMMY_SP,
decl: DefaultDecl::TsInterfaceDecl(Box::new(
TsInterfaceDecl {
span: swc_core::common::DUMMY_SP,
id: create_ident("a"),
declare: true,
type_params: None,
extends: vec![],
body: TsInterfaceBody {
span: swc_core::common::DUMMY_SP,
body: vec![]
}
}
))
}
)
)]
}
},
&Options::default(),
None
)
.err()
.unwrap(),
"0:0: Cannot use TypeScript interface declarations as default export in MDX files. The default export is reserved for a layout, which must be a component",
"should crash on a TypeScript default interface declaration"
);
}
#[test]
fn statement_pass_through() -> Result<(), String> {
let mut program = Program {
path: None,
comments: vec![],
module: Module {
span: swc_core::common::DUMMY_SP,
shebang: None,
body: vec![ModuleItem::Stmt(Stmt::While(WhileStmt {
span: swc_core::common::DUMMY_SP,
test: Box::new(create_bool_expression(true)),
body: Box::new(Stmt::Empty(EmptyStmt {
span: swc_core::common::DUMMY_SP,
})),
}))],
},
};
mdx_plugin_recma_document(&mut program, &Options::default(), None)?;
assert_eq!(
serialize(&mut program.module, None),
"while(true);
function _createMdxContent(props) {
return null;
}
function MDXContent(props = {}) {
return MDXLayout ? <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout> : _createMdxContent(props);
}
export default MDXContent;
",
"should pass statements through"
);
Ok(())
}
#[test]
fn expression_pass_through() -> Result<(), String> {
let mut program = Program {
path: None,
comments: vec![],
module: Module {
span: swc_core::common::DUMMY_SP,
shebang: None,
body: vec![ModuleItem::Stmt(Stmt::Expr(ExprStmt {
span: swc_core::common::DUMMY_SP,
expr: Box::new(create_bool_expression(true)),
}))],
},
};
mdx_plugin_recma_document(&mut program, &Options::default(), None)?;
assert_eq!(
serialize(&mut program.module, None),
"true;
function _createMdxContent(props) {
return null;
}
function MDXContent(props = {}) {
return MDXLayout ? <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout> : _createMdxContent(props);
}
export default MDXContent;
",
"should pass expressions through"
);
Ok(())
}
#[test]
fn fragment_non_element_single_child() -> Result<(), String> {
let mut program = Program {
path: None,
comments: vec![],
module: Module {
span: swc_core::common::DUMMY_SP,
shebang: None,
body: vec![ModuleItem::Stmt(Stmt::Expr(ExprStmt {
span: swc_core::common::DUMMY_SP,
expr: Box::new(Expr::JSXFragment(JSXFragment {
span: swc_core::common::DUMMY_SP,
opening: JSXOpeningFragment {
span: swc_core::common::DUMMY_SP,
},
closing: JSXClosingFragment {
span: swc_core::common::DUMMY_SP,
},
children: vec![JSXElementChild::JSXText(JSXText {
value: "a".into(),
span: swc_core::common::DUMMY_SP,
raw: "a".into(),
})],
})),
}))],
},
};
mdx_plugin_recma_document(&mut program, &Options::default(), None)?;
assert_eq!(
serialize(&mut program.module, None),
"function _createMdxContent(props) {
return <>a</>;
}
function MDXContent(props = {}) {
return MDXLayout ? <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout> : _createMdxContent(props);
}
export default MDXContent;
",
"should pass a fragment with a single child that isn’t an element through"
);
Ok(())
}
#[test]
fn element() -> Result<(), String> {
let mut program = Program {
path: None,
comments: vec![],
module: Module {
span: swc_core::common::DUMMY_SP,
shebang: None,
body: vec![ModuleItem::Stmt(Stmt::Expr(ExprStmt {
span: swc_core::common::DUMMY_SP,
expr: Box::new(Expr::JSXElement(Box::new(JSXElement {
span: swc_core::common::DUMMY_SP,
opening: JSXOpeningElement {
name: JSXElementName::Ident(create_ident("a")),
attrs: vec![],
self_closing: false,
type_args: None,
span: swc_core::common::DUMMY_SP,
},
closing: Some(JSXClosingElement {
name: JSXElementName::Ident(create_ident("a")),
span: swc_core::common::DUMMY_SP,
}),
children: vec![JSXElementChild::JSXText(JSXText {
value: "b".into(),
span: swc_core::common::DUMMY_SP,
raw: "b".into(),
})],
}))),
}))],
},
};
mdx_plugin_recma_document(&mut program, &Options::default(), None)?;
assert_eq!(
serialize(&mut program.module, None),
"function _createMdxContent(props) {
return <a>b</a>;
}
function MDXContent(props = {}) {
return MDXLayout ? <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout> : _createMdxContent(props);
}
export default MDXContent;
",
"should pass an element through"
);
Ok(())
}
}
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | false |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/mdx_rs/src/hast_util_to_swc.rs | crates/mdx_rs/src/hast_util_to_swc.rs | //! Turn an HTML AST into a JavaScript AST.
//!
//! Port of <https://github.com/syntax-tree/hast-util-to-estree>, by the same
//! author:
//!
//! (The MIT License)
//!
//! Copyright (c) 2016 Titus Wormer <tituswormer@gmail.com>
//!
//! Permission is hereby granted, free of charge, to any person obtaining
//! a copy of this software and associated documentation files (the
//! 'Software'), to deal in the Software without restriction, including
//! without limitation the rights to use, copy, modify, merge, publish,
//! distribute, sublicense, and/or sell copies of the Software, and to
//! permit persons to whom the Software is furnished to do so, subject to
//! the following conditions:
//!
//! The above copyright notice and this permission notice shall be
//! included in all copies or substantial portions of the Software.
//!
//! THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
//! EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
//! MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
//! IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
//! CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
//! TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
//! SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
use crate::hast;
use crate::swc::{parse_esm_to_tree, parse_expression_to_tree};
use crate::swc_utils::{
create_jsx_attr_name_from_str, create_jsx_name_from_str, inter_element_whitespace,
position_to_span,
};
use core::str;
use markdown::{Location, MdxExpressionKind};
use swc_core::ecma::ast::{
Expr, ExprStmt, JSXAttr, JSXAttrOrSpread, JSXAttrValue, JSXClosingElement, JSXClosingFragment,
JSXElement, JSXElementChild, JSXEmptyExpr, JSXExpr, JSXExprContainer, JSXFragment,
JSXOpeningElement, JSXOpeningFragment, Lit, Module, ModuleItem, SpreadElement, Stmt, Str,
};
pub const MAGIC_EXPLICIT_MARKER: u32 = 1337;
/// Result.
#[derive(Debug, PartialEq, Eq)]
pub struct Program {
/// File path.
pub path: Option<String>,
/// JS AST.
pub module: Module,
/// Comments relating to AST.
pub comments: Vec<swc_core::common::comments::Comment>,
}
/// Whether we’re in HTML or SVG.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum Space {
/// The HTML space.
Html,
/// The SVG space.
Svg,
}
/// Context used to compile hast into SWC’s ES AST.
#[derive(Debug)]
struct Context<'a> {
/// Whether we’re in HTML or SVG.
///
/// Not used yet, likely useful in the future.
space: Space,
/// Comments we gather.
comments: Vec<swc_core::common::comments::Comment>,
/// Declarations and stuff.
esm: Vec<ModuleItem>,
/// Optional way to turn relative positions into points.
location: Option<&'a Location>,
}
/// Compile hast into SWC’s ES AST.
pub fn hast_util_to_swc(
tree: &hast::Node,
path: Option<String>,
location: Option<&Location>,
) -> Result<Program, String> {
let mut context = Context {
space: Space::Html,
comments: vec![],
esm: vec![],
location,
};
let expr = match one(&mut context, tree)? {
Some(JSXElementChild::JSXFragment(x)) => Some(Expr::JSXFragment(x)),
Some(JSXElementChild::JSXElement(x)) => Some(Expr::JSXElement(x)),
Some(child) => Some(Expr::JSXFragment(create_fragment(vec![child], tree))),
None => None,
};
// Add the ESM.
let mut module = Module {
shebang: None,
body: context.esm,
span: position_to_span(tree.position()),
};
// We have some content, wrap it.
if let Some(expr) = expr {
module.body.push(ModuleItem::Stmt(Stmt::Expr(ExprStmt {
expr: Box::new(expr),
span: swc_core::common::DUMMY_SP,
})));
}
Ok(Program {
path,
module,
comments: context.comments,
})
}
/// Transform one node.
fn one(context: &mut Context, node: &hast::Node) -> Result<Option<JSXElementChild>, String> {
let value = match node {
hast::Node::Comment(x) => Some(transform_comment(context, node, x)),
hast::Node::Element(x) => transform_element(context, node, x)?,
hast::Node::MdxJsxElement(x) => transform_mdx_jsx_element(context, node, x)?,
hast::Node::MdxExpression(x) => transform_mdx_expression(context, node, x)?,
hast::Node::MdxjsEsm(x) => transform_mdxjs_esm(context, node, x)?,
hast::Node::Root(x) => transform_root(context, node, x)?,
hast::Node::Text(x) => transform_text(context, node, x),
// Ignore:
hast::Node::Doctype(_) => None,
};
Ok(value)
}
/// Transform children of `parent`.
fn all(context: &mut Context, parent: &hast::Node) -> Result<Vec<JSXElementChild>, String> {
let mut result = vec![];
if let Some(children) = parent.children() {
let mut index = 0;
while index < children.len() {
let child = &children[index];
// To do: remove line endings between table elements?
// <https://github.com/syntax-tree/hast-util-to-estree/blob/6c45f166d106ea3a165c14ec50c35ed190055e65/lib/index.js>
if let Some(child) = one(context, child)? {
result.push(child);
}
index += 1;
}
}
Ok(result)
}
/// [`Comment`][hast::Comment].
fn transform_comment(
context: &mut Context,
node: &hast::Node,
comment: &hast::Comment,
) -> JSXElementChild {
context.comments.push(swc_core::common::comments::Comment {
kind: swc_core::common::comments::CommentKind::Block,
text: comment.value.clone().into(),
span: position_to_span(node.position()),
});
// Might be useless.
// Might be useful when transforming to acorn/babel later.
// This is done in the JS version too:
// <https://github.com/syntax-tree/hast-util-to-estree/blob/6c45f166d106ea3a165c14ec50c35ed190055e65/lib/index.js#L168>
JSXElementChild::JSXExprContainer(JSXExprContainer {
expr: JSXExpr::JSXEmptyExpr(JSXEmptyExpr {
span: position_to_span(node.position()),
}),
span: position_to_span(node.position()),
})
}
/// [`Element`][hast::Element].
fn transform_element(
context: &mut Context,
node: &hast::Node,
element: &hast::Element,
) -> Result<Option<JSXElementChild>, String> {
let space = context.space;
if space == Space::Html && element.tag_name == "svg" {
context.space = Space::Svg;
}
let children = all(context, node)?;
context.space = space;
let mut attrs = vec![];
let mut index = 0;
while index < element.properties.len() {
let prop = &element.properties[index];
// To do: turn style props into objects.
let value = match &prop.1 {
hast::PropertyValue::Boolean(x) => {
// No value is same as `{true}` / Ignore `false`.
if *x {
None
} else {
index += 1;
continue;
}
}
hast::PropertyValue::String(x) => Some(Lit::Str(Str {
value: x.clone().into(),
span: swc_core::common::DUMMY_SP,
raw: None,
})),
hast::PropertyValue::CommaSeparated(x) => Some(Lit::Str(Str {
value: x.join(", ").into(),
span: swc_core::common::DUMMY_SP,
raw: None,
})),
hast::PropertyValue::SpaceSeparated(x) => Some(Lit::Str(Str {
value: x.join(" ").into(),
span: swc_core::common::DUMMY_SP,
raw: None,
})),
};
// Turn property case into either React-specific case, or HTML
// attribute case.
// To do: create a spread if this is an invalid attr name.
let attr_name = prop_to_attr_name(&prop.0);
attrs.push(JSXAttrOrSpread::JSXAttr(JSXAttr {
name: create_jsx_attr_name_from_str(&attr_name),
value: value.map(JSXAttrValue::Lit),
span: swc_core::common::DUMMY_SP,
}));
index += 1;
}
Ok(Some(JSXElementChild::JSXElement(create_element(
&element.tag_name,
attrs,
children,
node,
false,
))))
}
/// [`MdxJsxElement`][hast::MdxJsxElement].
fn transform_mdx_jsx_element(
context: &mut Context,
node: &hast::Node,
element: &hast::MdxJsxElement,
) -> Result<Option<JSXElementChild>, String> {
let space = context.space;
if let Some(name) = &element.name {
if space == Space::Html && name == "svg" {
context.space = Space::Svg;
}
}
let children = all(context, node)?;
context.space = space;
let mut attrs = vec![];
let mut index = 0;
while index < element.attributes.len() {
let attr = match &element.attributes[index] {
hast::AttributeContent::Property(prop) => {
let value = match prop.value.as_ref() {
Some(hast::AttributeValue::Literal(x)) => Some(JSXAttrValue::Lit(Lit::Str(Str {
value: x.clone().into(),
span: swc_core::common::DUMMY_SP,
raw: None,
}))),
Some(hast::AttributeValue::Expression(expression)) => {
Some(JSXAttrValue::JSXExprContainer(JSXExprContainer {
expr: JSXExpr::Expr(
parse_expression_to_tree(
&expression.value,
&MdxExpressionKind::AttributeValueExpression,
&expression.stops,
context.location,
)?
.unwrap(),
),
span: swc_core::common::DUMMY_SP,
}))
}
None => None,
};
JSXAttrOrSpread::JSXAttr(JSXAttr {
span: swc_core::common::DUMMY_SP,
name: create_jsx_attr_name_from_str(&prop.name),
value,
})
}
hast::AttributeContent::Expression { value, stops } => {
let expr = parse_expression_to_tree(
value,
&MdxExpressionKind::AttributeExpression,
stops,
context.location,
)?;
JSXAttrOrSpread::SpreadElement(SpreadElement {
dot3_token: swc_core::common::DUMMY_SP,
expr: expr.unwrap(),
})
}
};
attrs.push(attr);
index += 1;
}
Ok(Some(if let Some(name) = &element.name {
JSXElementChild::JSXElement(create_element(name, attrs, children, node, true))
} else {
JSXElementChild::JSXFragment(create_fragment(children, node))
}))
}
/// [`MdxExpression`][hast::MdxExpression].
fn transform_mdx_expression(
context: &mut Context,
node: &hast::Node,
expression: &hast::MdxExpression,
) -> Result<Option<JSXElementChild>, String> {
let expr = parse_expression_to_tree(
&expression.value,
&MdxExpressionKind::Expression,
&expression.stops,
context.location,
)?;
let span = position_to_span(node.position());
let child = if let Some(expr) = expr {
JSXExpr::Expr(expr)
} else {
JSXExpr::JSXEmptyExpr(JSXEmptyExpr { span })
};
Ok(Some(JSXElementChild::JSXExprContainer(JSXExprContainer {
expr: child,
span,
})))
}
/// [`MdxjsEsm`][hast::MdxjsEsm].
fn transform_mdxjs_esm(
context: &mut Context,
_node: &hast::Node,
esm: &hast::MdxjsEsm,
) -> Result<Option<JSXElementChild>, String> {
let mut module = parse_esm_to_tree(&esm.value, &esm.stops, context.location)?;
context.esm.append(&mut module.body);
Ok(None)
}
/// [`Root`][hast::Root].
fn transform_root(
context: &mut Context,
node: &hast::Node,
_root: &hast::Root,
) -> Result<Option<JSXElementChild>, String> {
let mut children = all(context, node)?;
let mut queue = vec![];
let mut nodes = vec![];
let mut seen = false;
children.reverse();
// Remove initial/final whitespace.
while let Some(child) = children.pop() {
let mut stash = false;
if let JSXElementChild::JSXExprContainer(container) = &child {
if let JSXExpr::Expr(expr) = &container.expr {
if let Expr::Lit(Lit::Str(str)) = (*expr).as_ref() {
if inter_element_whitespace(str.value.as_ref()) {
stash = true;
}
}
}
}
if stash {
if seen {
queue.push(child);
}
} else {
if !queue.is_empty() {
nodes.append(&mut queue);
}
nodes.push(child);
seen = true;
}
}
Ok(Some(JSXElementChild::JSXFragment(create_fragment(
nodes, node,
))))
}
/// [`Text`][hast::Text].
fn transform_text(
_context: &mut Context,
node: &hast::Node,
text: &hast::Text,
) -> Option<JSXElementChild> {
if text.value.is_empty() {
None
} else {
Some(JSXElementChild::JSXExprContainer(JSXExprContainer {
expr: JSXExpr::Expr(Box::new(Expr::Lit(Lit::Str(Str {
value: text.value.clone().into(),
span: position_to_span(node.position()),
raw: None,
})))),
span: position_to_span(node.position()),
}))
}
}
/// Create an element.
///
/// Creates a void one if there are no children.
#[allow(clippy::unnecessary_box_returns)]
fn create_element(
name: &str,
attrs: Vec<JSXAttrOrSpread>,
children: Vec<JSXElementChild>,
node: &hast::Node,
explicit: bool,
) -> Box<JSXElement> {
let mut span = position_to_span(node.position());
span.ctxt = if explicit {
swc_core::common::SyntaxContext::from_u32(MAGIC_EXPLICIT_MARKER)
} else {
swc_core::common::SyntaxContext::empty()
};
Box::new(JSXElement {
opening: JSXOpeningElement {
name: create_jsx_name_from_str(name),
attrs,
self_closing: children.is_empty(),
type_args: None,
span: swc_core::common::DUMMY_SP,
},
closing: if children.is_empty() {
None
} else {
Some(JSXClosingElement {
name: create_jsx_name_from_str(name),
span: swc_core::common::DUMMY_SP,
})
},
children,
span,
})
}
/// Create a fragment.
fn create_fragment(children: Vec<JSXElementChild>, node: &hast::Node) -> JSXFragment {
JSXFragment {
opening: JSXOpeningFragment {
span: swc_core::common::DUMMY_SP,
},
closing: JSXClosingFragment {
span: swc_core::common::DUMMY_SP,
},
children,
span: position_to_span(node.position()),
}
}
/// Turn a hast property into something that particularly React understands.
fn prop_to_attr_name(prop: &str) -> String {
// Arbitrary data props, kebab case them.
if prop.len() > 4 && prop.starts_with("data") {
// Assume like two dashes maybe?
let mut result = String::with_capacity(prop.len() + 2);
let bytes = prop.as_bytes();
let mut index = 4;
let mut start = index;
let mut valid = true;
result.push_str("data");
while index < bytes.len() {
let byte = bytes[index];
let mut dash = index == 4;
match byte {
b'A'..=b'Z' => dash = true,
b'-' | b'.' | b':' | b'0'..=b'9' | b'a'..=b'z' => {}
_ => {
valid = false;
break;
}
}
if dash {
result.push_str(&prop[start..index]);
if byte != b'-' {
result.push('-');
}
result.push(byte.to_ascii_lowercase().into());
start = index + 1;
}
index += 1;
}
if valid {
result.push_str(&prop[start..]);
return result;
}
}
// Look up if prop differs from attribute case.
// Unknown things are passed through.
PROP_TO_REACT_PROP
.iter()
.find(|d| d.0 == prop)
.or_else(|| PROP_TO_ATTR_EXCEPTIONS_SHARED.iter().find(|d| d.0 == prop))
.map_or_else(|| prop.into(), |d| d.1.into())
}
// Below data is generated with:
//
// Note: there are currently no HTML and SVG specific exceptions.
// If those would start appearing, the logic that uses these lists needs
// To support spaces.
//
// ```js
// import * as x from "property-information";
//
// /** @type {Record<string, string>} */
// let shared = {};
// /** @type {Record<string, string>} */
// let html = {};
// /** @type {Record<string, string>} */
// let svg = {};
//
// Object.keys(x.html.property).forEach((prop) => {
// let attr = x.html.property[prop].attribute;
// if (!x.html.property[prop].space && prop !== attr) {
// html[prop] = attr;
// }
// });
//
// Object.keys(x.svg.property).forEach((prop) => {
// let attr = x.svg.property[prop].attribute;
// if (!x.svg.property[prop].space && prop !== attr) {
// // Shared.
// if (prop in html && html[prop] === attr) {
// shared[prop] = attr;
// delete html[prop];
// } else {
// svg[prop] = attr;
// }
// }
// });
//
// /** @type {Array<[string, Array<[string, string]>]>} */
// const all = [
// ["PROP_TO_REACT_PROP", Object.entries(x.hastToReact)],
// ["PROP_TO_ATTR_EXCEPTIONS", Object.entries(shared)],
// ["PROP_TO_ATTR_EXCEPTIONS_HTML", Object.entries(html)],
// ["PROP_TO_ATTR_EXCEPTIONS_SVG", Object.entries(svg)],
// ];
//
// console.log(
// all
// .map((d) => {
// return `const ${d[0]}: [(&str, &str); ${d[1].length}] = [
// ${d[1].map((d) => ` ("${d[0]}", "${d[1]}")`).join(",\n")}
// ];`;
// })
// .join("\n\n")
// );
// ```
/// hast property names to React property names, if they differ.
const PROP_TO_REACT_PROP: [(&str, &str); 17] = [
("classId", "classID"),
("dataType", "datatype"),
("itemId", "itemID"),
("strokeDashArray", "strokeDasharray"),
("strokeDashOffset", "strokeDashoffset"),
("strokeLineCap", "strokeLinecap"),
("strokeLineJoin", "strokeLinejoin"),
("strokeMiterLimit", "strokeMiterlimit"),
("typeOf", "typeof"),
("xLinkActuate", "xlinkActuate"),
("xLinkArcRole", "xlinkArcrole"),
("xLinkHref", "xlinkHref"),
("xLinkRole", "xlinkRole"),
("xLinkShow", "xlinkShow"),
("xLinkTitle", "xlinkTitle"),
("xLinkType", "xlinkType"),
("xmlnsXLink", "xmlnsXlink"),
];
/// hast property names to HTML attribute names, if they differ.
const PROP_TO_ATTR_EXCEPTIONS_SHARED: [(&str, &str); 48] = [
("ariaActiveDescendant", "aria-activedescendant"),
("ariaAtomic", "aria-atomic"),
("ariaAutoComplete", "aria-autocomplete"),
("ariaBusy", "aria-busy"),
("ariaChecked", "aria-checked"),
("ariaColCount", "aria-colcount"),
("ariaColIndex", "aria-colindex"),
("ariaColSpan", "aria-colspan"),
("ariaControls", "aria-controls"),
("ariaCurrent", "aria-current"),
("ariaDescribedBy", "aria-describedby"),
("ariaDetails", "aria-details"),
("ariaDisabled", "aria-disabled"),
("ariaDropEffect", "aria-dropeffect"),
("ariaErrorMessage", "aria-errormessage"),
("ariaExpanded", "aria-expanded"),
("ariaFlowTo", "aria-flowto"),
("ariaGrabbed", "aria-grabbed"),
("ariaHasPopup", "aria-haspopup"),
("ariaHidden", "aria-hidden"),
("ariaInvalid", "aria-invalid"),
("ariaKeyShortcuts", "aria-keyshortcuts"),
("ariaLabel", "aria-label"),
("ariaLabelledBy", "aria-labelledby"),
("ariaLevel", "aria-level"),
("ariaLive", "aria-live"),
("ariaModal", "aria-modal"),
("ariaMultiLine", "aria-multiline"),
("ariaMultiSelectable", "aria-multiselectable"),
("ariaOrientation", "aria-orientation"),
("ariaOwns", "aria-owns"),
("ariaPlaceholder", "aria-placeholder"),
("ariaPosInSet", "aria-posinset"),
("ariaPressed", "aria-pressed"),
("ariaReadOnly", "aria-readonly"),
("ariaRelevant", "aria-relevant"),
("ariaRequired", "aria-required"),
("ariaRoleDescription", "aria-roledescription"),
("ariaRowCount", "aria-rowcount"),
("ariaRowIndex", "aria-rowindex"),
("ariaRowSpan", "aria-rowspan"),
("ariaSelected", "aria-selected"),
("ariaSetSize", "aria-setsize"),
("ariaSort", "aria-sort"),
("ariaValueMax", "aria-valuemax"),
("ariaValueMin", "aria-valuemin"),
("ariaValueNow", "aria-valuenow"),
("ariaValueText", "aria-valuetext"),
];
#[cfg(test)]
mod tests {
use super::*;
use crate::hast;
use crate::hast_util_to_swc::{hast_util_to_swc, Program};
use crate::markdown::mdast;
use crate::swc::serialize;
use pretty_assertions::assert_eq;
use swc_core::ecma::ast::{
Ident, ImportDecl, ImportDefaultSpecifier, ImportSpecifier, JSXAttrName, JSXElementName,
ModuleDecl,
};
#[test]
fn comments() -> Result<(), String> {
let mut comment_ast = hast_util_to_swc(
&hast::Node::Comment(hast::Comment {
value: "a".into(),
position: None,
}),
None,
None,
)?;
assert_eq!(
comment_ast,
Program {
path: None,
module: Module {
shebang: None,
body: vec![ModuleItem::Stmt(Stmt::Expr(ExprStmt {
expr: Box::new(Expr::JSXFragment(JSXFragment {
opening: JSXOpeningFragment {
span: swc_core::common::DUMMY_SP,
},
closing: JSXClosingFragment {
span: swc_core::common::DUMMY_SP,
},
children: vec![JSXElementChild::JSXExprContainer(JSXExprContainer {
expr: JSXExpr::JSXEmptyExpr(JSXEmptyExpr {
span: swc_core::common::DUMMY_SP,
}),
span: swc_core::common::DUMMY_SP,
},)],
span: swc_core::common::DUMMY_SP,
})),
span: swc_core::common::DUMMY_SP,
},))],
span: swc_core::common::DUMMY_SP,
},
comments: vec![swc_core::common::comments::Comment {
kind: swc_core::common::comments::CommentKind::Block,
text: "a".into(),
span: swc_core::common::DUMMY_SP,
}],
},
"should support a `Comment`",
);
assert_eq!(
serialize(&mut comment_ast.module, Some(&comment_ast.comments)),
// To do: comment should be in this.
"<>{}</>;\n",
"should support a `Comment` (serialize)",
);
Ok(())
}
#[test]
fn elements() -> Result<(), String> {
let mut element_ast = hast_util_to_swc(
&hast::Node::Element(hast::Element {
tag_name: "a".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["b".into()]),
)],
children: vec![],
position: None,
}),
None,
None,
)?;
assert_eq!(
element_ast,
Program {
path: None,
module: Module {
shebang: None,
body: vec![ModuleItem::Stmt(Stmt::Expr(ExprStmt {
expr: Box::new(Expr::JSXElement(Box::new(JSXElement {
opening: JSXOpeningElement {
name: JSXElementName::Ident(Ident {
span: swc_core::common::DUMMY_SP,
sym: "a".into(),
optional: false,
}),
attrs: vec![JSXAttrOrSpread::JSXAttr(JSXAttr {
name: JSXAttrName::Ident(Ident {
sym: "className".into(),
span: swc_core::common::DUMMY_SP,
optional: false,
}),
value: Some(JSXAttrValue::Lit(Lit::Str(Str {
value: "b".into(),
span: swc_core::common::DUMMY_SP,
raw: None,
}))),
span: swc_core::common::DUMMY_SP,
},)],
self_closing: true,
type_args: None,
span: swc_core::common::DUMMY_SP,
},
closing: None,
children: vec![],
span: swc_core::common::DUMMY_SP,
}))),
span: swc_core::common::DUMMY_SP,
},))],
span: swc_core::common::DUMMY_SP,
},
comments: vec![],
},
"should support an `Element`",
);
assert_eq!(
serialize(&mut element_ast.module, Some(&element_ast.comments)),
"<a className=\"b\"/>;\n",
"should support an `Element` (serialize)",
);
assert_eq!(
serialize(
&mut hast_util_to_swc(
&hast::Node::Element(hast::Element {
tag_name: "a".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: "a".into(),
position: None,
})],
position: None,
}),
None,
None
)?
.module,
None
),
"<a>{\"a\"}</a>;\n",
"should support an `Element` w/ children",
);
assert_eq!(
serialize(
&mut hast_util_to_swc(
&hast::Node::Element(hast::Element {
tag_name: "svg".into(),
properties: vec![],
children: vec![],
position: None,
}),
None,
None
)?
.module,
None
),
"<svg/>;\n",
"should support an `Element` in the SVG space",
);
Ok(())
}
#[test]
fn element_attributes() -> Result<(), String> {
assert_eq!(
serialize(
&mut hast_util_to_swc(
&hast::Node::Element(hast::Element {
tag_name: "a".into(),
properties: vec![("b".into(), hast::PropertyValue::String("c".into()),)],
children: vec![],
position: None,
}),
None,
None
)?
.module,
None
),
"<a b=\"c\"/>;\n",
"should support an `Element` w/ a string attribute",
);
assert_eq!(
serialize(
&mut hast_util_to_swc(
&hast::Node::Element(hast::Element {
tag_name: "a".into(),
properties: vec![("b".into(), hast::PropertyValue::Boolean(true),)],
children: vec![],
position: None,
}),
None,
None
)?
.module,
None
),
"<a b/>;\n",
"should support an `Element` w/ a boolean (true) attribute",
);
assert_eq!(
serialize(
&mut hast_util_to_swc(
&hast::Node::Element(hast::Element {
tag_name: "a".into(),
properties: vec![("b".into(), hast::PropertyValue::Boolean(false),)],
children: vec![],
position: None,
}),
None,
None
)?
.module,
None
),
"<a/>;\n",
"should support an `Element` w/ a boolean (false) attribute",
);
assert_eq!(
serialize(
&mut hast_util_to_swc(
&hast::Node::Element(hast::Element {
tag_name: "a".into(),
properties: vec![(
"b".into(),
hast::PropertyValue::CommaSeparated(vec!["c".into(), "d".into()]),
)],
children: vec![],
position: None,
}),
None,
None
)?
.module,
None
),
"<a b=\"c, d\"/>;\n",
"should support an `Element` w/ a comma-separated attribute",
);
assert_eq!(
serialize(
&mut hast_util_to_swc(
&hast::Node::Element(hast::Element {
tag_name: "a".into(),
properties: vec![
("data123".into(), hast::PropertyValue::Boolean(true)),
("dataFoo".into(), hast::PropertyValue::Boolean(true)),
("dataBAR".into(), hast::PropertyValue::Boolean(true)),
("data+invalid".into(), hast::PropertyValue::Boolean(true)),
("data--x".into(), hast::PropertyValue::Boolean(true))
],
children: vec![],
position: None,
}),
None,
None
)?
.module,
None
),
"<a data-123 data-foo data-b-a-r data+invalid data--x/>;\n",
"should support an `Element` w/ data attributes",
);
assert_eq!(
serialize(
&mut hast_util_to_swc(
&hast::Node::Element(hast::Element {
tag_name: "a".into(),
properties: vec![
("role".into(), hast::PropertyValue::Boolean(true),),
("ariaValueNow".into(), hast::PropertyValue::Boolean(true),),
("ariaDescribedBy".into(), hast::PropertyValue::Boolean(true),)
],
children: vec![],
position: None,
}),
None,
None
)?
.module,
None
),
"<a role aria-valuenow aria-describedby/>;\n",
"should support an `Element` w/ aria attributes",
);
Ok(())
}
#[test]
fn mdx_element() -> Result<(), String> {
let mut mdx_element_ast = hast_util_to_swc(
&hast::Node::MdxJsxElement(hast::MdxJsxElement {
name: None,
attributes: vec![],
children: vec![],
position: None,
}),
None,
None,
)?;
assert_eq!(
mdx_element_ast,
Program {
path: None,
module: Module {
shebang: None,
body: vec![ModuleItem::Stmt(Stmt::Expr(ExprStmt {
expr: Box::new(Expr::JSXFragment(JSXFragment {
opening: JSXOpeningFragment {
span: swc_core::common::DUMMY_SP,
},
closing: JSXClosingFragment {
span: swc_core::common::DUMMY_SP,
},
children: vec![],
span: swc_core::common::DUMMY_SP,
})),
span: swc_core::common::DUMMY_SP,
},))],
span: swc_core::common::DUMMY_SP,
},
comments: vec![],
},
"should support an `MdxElement` (fragment)",
);
assert_eq!(
serialize(&mut mdx_element_ast.module, Some(&mdx_element_ast.comments)),
"<></>;\n",
"should support an `MdxElement` (fragment, serialize)",
);
assert_eq!(
serialize(
&mut hast_util_to_swc(
&hast::Node::MdxJsxElement(hast::MdxJsxElement {
name: Some("a".into()),
attributes: vec![],
children: vec![],
position: None,
}),
None,
None
)?
.module,
None
),
"<a/>;\n",
"should support an `MdxElement` (element, no children)",
);
assert_eq!(
serialize(
&mut hast_util_to_swc(
&hast::Node::MdxJsxElement(hast::MdxJsxElement {
name: Some("a".into()),
attributes: vec![],
children: vec![hast::Node::Text(hast::Text {
value: "b".into(),
position: None,
})],
position: None,
}),
None,
None
)?
.module,
None
),
"<a>{\"b\"}</a>;\n",
"should support an `MdxElement` (element, children)",
);
Ok(())
}
#[test]
fn mdx_element_name() -> Result<(), String> {
assert_eq!(
serialize(
&mut hast_util_to_swc(
&hast::Node::MdxJsxElement(hast::MdxJsxElement {
name: Some("a:b".into()),
attributes: vec![],
children: vec![],
position: None,
}),
None,
None
)?
.module,
None
),
"<a:b/>;\n",
"should support an `MdxElement` (element, namespace id)",
);
assert_eq!(
serialize(
&mut hast_util_to_swc(
&hast::Node::MdxJsxElement(hast::MdxJsxElement {
name: Some("a.b.c".into()),
attributes: vec![],
children: vec![],
position: None,
}),
None,
None
)?
.module,
None
),
"<a.b.c/>;\n",
"should support an `MdxElement` (element, member expression)",
);
assert_eq!(
serialize(
&mut hast_util_to_swc(
&hast::Node::MdxJsxElement(hast::MdxJsxElement {
name: Some("svg".into()),
attributes: vec![],
children: vec![],
position: None,
}),
None,
None
)?
.module,
None
),
"<svg/>;\n",
"should support an `MdxElement` (element, `<svg>`)",
);
Ok(())
}
#[test]
fn mdx_element_attributes() -> Result<(), String> {
assert_eq!(
serialize(
&mut hast_util_to_swc(
&hast::Node::MdxJsxElement(hast::MdxJsxElement {
name: Some("a".into()),
attributes: vec![hast::AttributeContent::Property(hast::MdxJsxAttribute {
name: "b:c".into(),
value: None
})],
children: vec![],
position: None,
}),
None,
None
)?
.module,
None
),
"<a b:c/>;\n",
"should support an `MdxElement` (element, namespace attribute name)",
);
assert_eq!(
serialize(
&mut hast_util_to_swc(
&hast::Node::MdxJsxElement(hast::MdxJsxElement {
name: Some("a".into()),
attributes: vec![hast::AttributeContent::Property(hast::MdxJsxAttribute {
name: "b".into(),
value: None
})],
children: vec![],
position: None,
}),
None,
None
)?
.module,
None
),
"<a b/>;\n",
"should support an `MdxElement` (element, boolean attribute)",
);
assert_eq!(
serialize(
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | true |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/mdx_rs/src/swc_utils.rs | crates/mdx_rs/src/swc_utils.rs | //! Lots of helpers for dealing with SWC, particularly from unist, and for
//! building its ES AST.
use markdown::{
id_cont, id_start,
mdast::Stop,
unist::{Point, Position},
Location,
};
use swc_core::common::{BytePos, Span, SyntaxContext, DUMMY_SP};
use swc_core::ecma::ast::{
BinExpr, BinaryOp, Bool, CallExpr, Callee, ComputedPropName, Expr, ExprOrSpread, Ident,
JSXAttrName, JSXElementName, JSXMemberExpr, JSXNamespacedName, JSXObject, Lit, MemberExpr,
MemberProp, Null, Number, ObjectLit, PropName, PropOrSpread, Str,
};
use swc_core::ecma::visit::{noop_visit_mut_type, VisitMut};
/// Turn a unist position, into an SWC span, of two byte positions.
///
/// > 👉 **Note**: SWC byte positions are offset by one: they are `0` when they
/// > are missing or incremented by `1` when valid.
pub fn position_to_span(position: Option<&Position>) -> Span {
position.map_or(DUMMY_SP, |d| Span {
lo: point_to_bytepos(&d.start),
hi: point_to_bytepos(&d.end),
ctxt: SyntaxContext::empty(),
})
}
/// Turn an SWC span, of two byte positions, into a unist position.
///
/// This assumes the span comes from a fixed tree, or is a dummy.
///
/// > 👉 **Note**: SWC byte positions are offset by one: they are `0` when they
/// > are missing or incremented by `1` when valid.
pub fn span_to_position(span: &Span, location: Option<&Location>) -> Option<Position> {
let lo = span.lo.0 as usize;
let hi = span.hi.0 as usize;
if lo > 0 && hi > 0 {
if let Some(location) = location {
if let Some(start) = location.to_point(lo - 1) {
if let Some(end) = location.to_point(hi - 1) {
return Some(Position { start, end });
}
}
}
}
None
}
/// Turn a unist point into an SWC byte position.
///
/// > 👉 **Note**: SWC byte positions are offset by one: they are `0` when they
/// > are missing or incremented by `1` when valid.
pub fn point_to_bytepos(point: &Point) -> BytePos {
BytePos(point.offset as u32 + 1)
}
/// Turn an SWC byte position into a unist point.
///
/// This assumes the byte position comes from a fixed tree, or is a dummy.
///
/// > 👉 **Note**: SWC byte positions are offset by one: they are `0` when they
/// > are missing or incremented by `1` when valid.
pub fn bytepos_to_point(bytepos: BytePos, location: Option<&Location>) -> Option<Point> {
let pos = bytepos.0 as usize;
if pos > 0 {
if let Some(location) = location {
return location.to_point(pos - 1);
}
}
None
}
/// Prefix an error message with an optional point.
pub fn prefix_error_with_point(reason: &str, point: Option<&Point>) -> String {
format!("{}: {}", point_opt_to_string(point), reason)
}
/// Serialize a unist position for humans.
pub fn position_opt_to_string(position: Option<&Position>) -> String {
if let Some(position) = position {
position_to_string(position)
} else {
"0:0".into()
}
}
/// Serialize a unist position for humans.
pub fn position_to_string(position: &Position) -> String {
format!(
"{}-{}",
point_to_string(&position.start),
point_to_string(&position.end)
)
}
/// Serialize a unist point for humans.
pub fn point_opt_to_string(point: Option<&Point>) -> String {
if let Some(point) = point {
point_to_string(point)
} else {
"0:0".into()
}
}
/// Serialize a unist point for humans.
pub fn point_to_string(point: &Point) -> String {
format!("{}:{}", point.line, point.column)
}
/// Visitor to fix SWC byte positions.
///
/// This assumes the byte position comes from an **unfixed** tree.
///
/// > 👉 **Note**: SWC byte positions are offset by one: they are `0` when they
/// > are missing or incremented by `1` when valid.
#[derive(Debug, Default, Clone)]
pub struct RewriteStopsContext<'a> {
/// Stops in the original source.
pub stops: &'a [Stop],
/// Location info.
pub location: Option<&'a Location>,
}
impl<'a> VisitMut for RewriteStopsContext<'a> {
noop_visit_mut_type!();
/// Rewrite spans.
fn visit_mut_span(&mut self, span: &mut Span) {
let mut result = DUMMY_SP;
let lo_rel = span.lo.0 as usize;
let hi_rel = span.hi.0 as usize;
let lo_clean = Location::relative_to_absolute(self.stops, lo_rel - 1);
let hi_clean = Location::relative_to_absolute(self.stops, hi_rel - 1);
if let Some(lo_abs) = lo_clean {
if let Some(hi_abs) = hi_clean {
result = create_span(lo_abs as u32 + 1, hi_abs as u32 + 1);
}
}
*span = result;
}
}
/// Visitor to fix SWC byte positions by removing a prefix.
///
/// > 👉 **Note**: SWC byte positions are offset by one: they are `0` when they
/// > are missing or incremented by `1` when valid.
#[derive(Debug, Default, Clone)]
pub struct RewritePrefixContext {
/// Size of prefix considered outside this tree.
pub prefix_len: u32,
}
impl VisitMut for RewritePrefixContext {
noop_visit_mut_type!();
/// Rewrite spans.
fn visit_mut_span(&mut self, span: &mut Span) {
let mut result = DUMMY_SP;
if span.lo.0 > self.prefix_len && span.hi.0 > self.prefix_len {
result = create_span(span.lo.0 - self.prefix_len, span.hi.0 - self.prefix_len);
}
*span = result;
}
}
/// Visitor to drop SWC spans.
#[derive(Debug, Default, Clone)]
pub struct DropContext {}
impl VisitMut for DropContext {
noop_visit_mut_type!();
/// Rewrite spans.
fn visit_mut_span(&mut self, span: &mut Span) {
*span = DUMMY_SP;
}
}
/// Generate a span.
pub fn create_span(lo: u32, hi: u32) -> Span {
Span {
lo: BytePos(lo),
hi: BytePos(hi),
ctxt: SyntaxContext::default(),
}
}
/// Generate an ident.
///
/// ```js
/// a
/// ```
pub fn create_ident(sym: &str) -> Ident {
Ident {
sym: sym.into(),
optional: false,
span: DUMMY_SP,
}
}
/// Generate an ident expression.
///
/// ```js
/// a
/// ```
pub fn create_ident_expression(sym: &str) -> Expr {
Expr::Ident(create_ident(sym))
}
/// Generate a null.
pub fn create_null() -> Null {
Null {
span: swc_core::common::DUMMY_SP,
}
}
/// Generate a null.
pub fn create_null_lit() -> Lit {
Lit::Null(create_null())
}
/// Generate a null.
pub fn create_null_expression() -> Expr {
Expr::Lit(create_null_lit())
}
/// Generate a null.
pub fn create_str(value: &str) -> Str {
value.into()
}
/// Generate a str.
pub fn create_str_lit(value: &str) -> Lit {
Lit::Str(create_str(value))
}
/// Generate a str.
pub fn create_str_expression(value: &str) -> Expr {
Expr::Lit(create_str_lit(value))
}
/// Generate a bool.
pub fn create_bool(value: bool) -> Bool {
value.into()
}
/// Generate a bool.
pub fn create_bool_lit(value: bool) -> Lit {
Lit::Bool(create_bool(value))
}
/// Generate a bool.
pub fn create_bool_expression(value: bool) -> Expr {
Expr::Lit(create_bool_lit(value))
}
/// Generate a number.
pub fn create_num(value: f64) -> Number {
value.into()
}
/// Generate a num.
pub fn create_num_lit(value: f64) -> Lit {
Lit::Num(create_num(value))
}
/// Generate a num.
pub fn create_num_expression(value: f64) -> Expr {
Expr::Lit(create_num_lit(value))
}
/// Generate an object.
pub fn create_object_lit(value: Vec<PropOrSpread>) -> ObjectLit {
ObjectLit {
props: value,
span: DUMMY_SP,
}
}
/// Generate an object.
pub fn create_object_expression(value: Vec<PropOrSpread>) -> Expr {
Expr::Object(create_object_lit(value))
}
/// Generate a call.
pub fn create_call(callee: Callee, args: Vec<ExprOrSpread>) -> CallExpr {
CallExpr {
callee,
args,
span: DUMMY_SP,
type_args: None,
}
}
/// Generate a call.
pub fn create_call_expression(callee: Callee, args: Vec<ExprOrSpread>) -> Expr {
Expr::Call(create_call(callee, args))
}
/// Generate a binary expression.
///
/// ```js
/// a + b + c
/// a || b
/// ```
pub fn create_binary_expression(mut exprs: Vec<Expr>, op: BinaryOp) -> Expr {
exprs.reverse();
let mut left = None;
while let Some(right_expr) = exprs.pop() {
left = Some(if let Some(left_expr) = left {
Expr::Bin(BinExpr {
left: Box::new(left_expr),
right: Box::new(right_expr),
op,
span: DUMMY_SP,
})
} else {
right_expr
});
}
left.expect("expected one or more expressions")
}
/// Generate a member expression from a string.
///
/// ```js
/// a.b
/// a
/// ```
pub fn create_member_expression_from_str(name: &str) -> Expr {
match parse_js_name(name) {
// `a`
JsName::Normal(name) => create_ident_expression(name),
// `a.b.c`
JsName::Member(parts) => {
let mut member = create_member(
create_ident_expression(parts[0]),
create_member_prop_from_str(parts[1]),
);
let mut index = 2;
while index < parts.len() {
member = create_member(
Expr::Member(member),
create_member_prop_from_str(parts[index]),
);
index += 1;
}
Expr::Member(member)
}
}
}
/// Generate a member expression from an object and prop.
pub fn create_member(obj: Expr, prop: MemberProp) -> MemberExpr {
MemberExpr {
obj: Box::new(obj),
prop,
span: DUMMY_SP,
}
}
/// Create a member prop from a string.
pub fn create_member_prop_from_str(name: &str) -> MemberProp {
if is_identifier_name(name) {
MemberProp::Ident(create_ident(name))
} else {
MemberProp::Computed(ComputedPropName {
expr: Box::new(create_str_expression(name)),
span: DUMMY_SP,
})
}
}
/// Generate a member expression from a string.
///
/// ```js
/// a.b-c
/// a
/// ```
pub fn create_jsx_name_from_str(name: &str) -> JSXElementName {
match parse_jsx_name(name) {
// `a`
JsxName::Normal(name) => JSXElementName::Ident(create_ident(name)),
// `a:b`
JsxName::Namespace(ns, name) => JSXElementName::JSXNamespacedName(JSXNamespacedName {
ns: create_ident(ns),
name: create_ident(name),
}),
// `a.b.c`
JsxName::Member(parts) => {
let mut member = create_jsx_member(
JSXObject::Ident(create_ident(parts[0])),
create_ident(parts[1]),
);
let mut index = 2;
while index < parts.len() {
member = create_jsx_member(
JSXObject::JSXMemberExpr(Box::new(member)),
create_ident(parts[index]),
);
index += 1;
}
JSXElementName::JSXMemberExpr(member)
}
}
}
/// Generate a member expression from an object and prop.
pub fn create_jsx_member(obj: JSXObject, prop: Ident) -> JSXMemberExpr {
JSXMemberExpr { obj, prop }
}
/// Turn an JSX element name into an expression.
pub fn jsx_element_name_to_expression(node: JSXElementName) -> Expr {
match node {
JSXElementName::JSXMemberExpr(member_expr) => jsx_member_expression_to_expression(member_expr),
JSXElementName::JSXNamespacedName(namespace_name) => create_str_expression(&format!(
"{}:{}",
namespace_name.ns.sym, namespace_name.name.sym
)),
JSXElementName::Ident(ident) => create_ident_or_literal(&ident),
}
}
/// Create a JSX attribute name.
pub fn create_jsx_attr_name_from_str(name: &str) -> JSXAttrName {
match parse_jsx_name(name) {
JsxName::Member(_) => {
unreachable!("member expressions in attribute names are not supported")
}
// `<a b:c />`
JsxName::Namespace(ns, name) => JSXAttrName::JSXNamespacedName(JSXNamespacedName {
ns: create_ident(ns),
name: create_ident(name),
}),
// `<a b />`
JsxName::Normal(name) => JSXAttrName::Ident(create_ident(name)),
}
}
/// Turn a JSX member expression name into a member expression.
pub fn jsx_member_expression_to_expression(node: JSXMemberExpr) -> Expr {
Expr::Member(create_member(
jsx_object_to_expression(node.obj),
ident_to_member_prop(&node.prop),
))
}
/// Turn an ident into a member prop.
pub fn ident_to_member_prop(node: &Ident) -> MemberProp {
if is_identifier_name(node.as_ref()) {
MemberProp::Ident(Ident {
sym: node.sym.clone(),
optional: false,
span: node.span,
})
} else {
MemberProp::Computed(ComputedPropName {
expr: Box::new(create_str_expression(&node.sym)),
span: node.span,
})
}
}
/// Turn a JSX attribute name into a prop prop.
pub fn jsx_attribute_name_to_prop_name(node: JSXAttrName) -> PropName {
match node {
JSXAttrName::JSXNamespacedName(namespace_name) => create_prop_name(&format!(
"{}:{}",
namespace_name.ns.sym, namespace_name.name.sym
)),
JSXAttrName::Ident(ident) => create_prop_name(&ident.sym),
}
}
/// Turn a JSX object into an expression.
pub fn jsx_object_to_expression(node: JSXObject) -> Expr {
match node {
JSXObject::Ident(ident) => create_ident_or_literal(&ident),
JSXObject::JSXMemberExpr(member_expr) => jsx_member_expression_to_expression(*member_expr),
}
}
/// Create either an ident expression or a literal expression.
pub fn create_ident_or_literal(node: &Ident) -> Expr {
if is_identifier_name(node.as_ref()) {
create_ident_expression(node.sym.as_ref())
} else {
create_str_expression(&node.sym)
}
}
/// Create a prop name.
pub fn create_prop_name(name: &str) -> PropName {
if is_identifier_name(name) {
PropName::Ident(create_ident(name))
} else {
PropName::Str(create_str(name))
}
}
/// Check if a name is a literal tag name or an identifier to a component.
pub fn is_literal_name(name: &str) -> bool {
matches!(name.as_bytes().first(), Some(b'a'..=b'z')) || !is_identifier_name(name)
}
/// Check if a name is a valid identifier name.
pub fn is_identifier_name(name: &str) -> bool {
for (index, char) in name.chars().enumerate() {
if if index == 0 {
!id_start(char)
} else {
!id_cont(char, false)
} {
return false;
}
}
true
}
/// Different kinds of JS names.
pub enum JsName<'a> {
/// Member: `a.b.c`
Member(Vec<&'a str>),
/// Name: `a`
Normal(&'a str),
}
/// Different kinds of JSX names.
pub enum JsxName<'a> {
/// Member: `a.b.c`
Member(Vec<&'a str>),
/// Namespace: `a:b`
Namespace(&'a str, &'a str),
/// Name: `a`
Normal(&'a str),
}
/// Parse a JavaScript member expression or name.
pub fn parse_js_name(name: &str) -> JsName {
let bytes = name.as_bytes();
let mut index = 0;
let mut start = 0;
let mut parts = vec![];
while index < bytes.len() {
if bytes[index] == b'.' {
parts.push(&name[start..index]);
start = index + 1;
}
index += 1;
}
// `a`
if parts.is_empty() {
JsName::Normal(name)
}
// `a.b.c`
else {
parts.push(&name[start..]);
JsName::Member(parts)
}
}
/// Parse a JSX name from a string.
pub fn parse_jsx_name(name: &str) -> JsxName {
match parse_js_name(name) {
// `<a.b.c />`
JsName::Member(parts) => JsxName::Member(parts),
JsName::Normal(name) => {
// `<a:b />`
if let Some(colon) = name.as_bytes().iter().position(|d| matches!(d, b':')) {
JsxName::Namespace(&name[0..colon], &name[(colon + 1)..])
}
// `<a />`
else {
JsxName::Normal(name)
}
}
}
}
/// Get the identifiers used in a JSX member expression.
///
/// `Foo.Bar` -> `vec!["Foo", "Bar"]`
pub fn jsx_member_to_parts(node: &JSXMemberExpr) -> Vec<&str> {
let mut parts = vec![];
let mut member_opt = Some(node);
while let Some(member) = member_opt {
parts.push(member.prop.sym.as_ref());
match &member.obj {
JSXObject::Ident(d) => {
parts.push(d.sym.as_ref());
member_opt = None;
}
JSXObject::JSXMemberExpr(node) => {
member_opt = Some(node);
}
}
}
parts.reverse();
parts
}
/// Check if a text value is inter-element whitespace.
///
/// See: <https://github.com/syntax-tree/hast-util-whitespace>.
pub fn inter_element_whitespace(value: &str) -> bool {
let bytes = value.as_bytes();
let mut index = 0;
while index < bytes.len() {
match bytes[index] {
b'\t' | 0x0C | b'\r' | b'\n' | b' ' => {}
_ => return false,
}
index += 1;
}
true
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
#[test]
fn bytepos_to_point_test() {
assert_eq!(
bytepos_to_point(BytePos(123), None),
None,
"should support no location"
);
}
#[test]
fn prefix_error_with_point_test() {
assert_eq!(
prefix_error_with_point("aaa", None),
"0:0: aaa",
"should support no point"
);
}
#[test]
fn position_opt_to_string_test() {
assert_eq!(
position_opt_to_string(None),
"0:0",
"should support no position"
);
}
#[test]
fn point_opt_to_string_test() {
assert_eq!(point_opt_to_string(None), "0:0", "should support no point");
}
#[test]
fn jsx_member_to_parts_test() {
assert_eq!(
jsx_member_to_parts(&JSXMemberExpr {
prop: create_ident("a"),
obj: JSXObject::Ident(create_ident("b"))
}),
vec!["b", "a"],
"should support a member with 2 items"
);
assert_eq!(
jsx_member_to_parts(&JSXMemberExpr {
prop: create_ident("a"),
obj: JSXObject::JSXMemberExpr(Box::new(JSXMemberExpr {
prop: create_ident("b"),
obj: JSXObject::JSXMemberExpr(Box::new(JSXMemberExpr {
prop: create_ident("c"),
obj: JSXObject::Ident(create_ident("d"))
}))
}))
}),
vec!["d", "c", "b", "a"],
"should support a member with 4 items"
);
}
}
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | false |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/mdx_rs/src/mdx_plugin_recma_jsx_rewrite.rs | crates/mdx_rs/src/mdx_plugin_recma_jsx_rewrite.rs | //! Rewrite JSX tags to accept them from props and an optional provider.
//!
//! Port of <https://github.com/mdx-js/mdx/blob/main/packages/mdx/lib/plugin/recma-jsx-rewrite.js>,
//! by the same author.
use crate::hast_util_to_swc::{Program, MAGIC_EXPLICIT_MARKER};
use crate::swc_utils::{
create_binary_expression, create_bool_expression, create_call_expression, create_ident,
create_ident_expression, create_jsx_name_from_str, create_member,
create_member_expression_from_str, create_member_prop_from_str, create_object_expression,
create_prop_name, create_str, create_str_expression, is_identifier_name, is_literal_name,
jsx_member_to_parts, position_to_string, span_to_position,
};
use markdown::{unist::Position, Location};
use swc_core::common::{util::take::Take, Span, DUMMY_SP};
use swc_core::ecma::ast::{
ArrowExpr, AssignPatProp, BinaryOp, BindingIdent, BlockStmt, BlockStmtOrExpr, Callee,
CatchClause, ClassDecl, CondExpr, Decl, DoWhileStmt, Expr, ExprOrSpread, ExprStmt, FnDecl,
FnExpr, ForInStmt, ForOfStmt, ForStmt, Function, IfStmt, ImportDecl, ImportNamedSpecifier,
ImportSpecifier, JSXElement, JSXElementName, KeyValuePatProp, KeyValueProp, MemberExpr,
MemberProp, ModuleDecl, ModuleExportName, ModuleItem, NewExpr, ObjectPat, ObjectPatProp, Param,
ParenExpr, Pat, Prop, PropOrSpread, ReturnStmt, Stmt, ThrowStmt, UnaryExpr, UnaryOp, VarDecl,
VarDeclKind, VarDeclarator, WhileStmt,
};
use swc_core::ecma::visit::{noop_visit_mut_type, VisitMut, VisitMutWith};
/// Configuration.
#[derive(Debug, Default, Clone)]
pub struct Options {
/// Place to import a provider from.
///
/// See [MDX provider](https://mdxjs.com/docs/using-mdx/#mdx-provider)
/// on the MDX website for more info.
pub provider_import_source: Option<String>,
/// Whether to add extra information to error messages in generated code.
pub development: bool,
}
/// Rewrite JSX in an MDX file so that components can be passed in and provided.
pub fn mdx_plugin_recma_jsx_rewrite(
program: &mut Program,
options: &Options,
location: Option<&Location>,
) {
let mut state = State {
scopes: vec![],
location,
provider: options.provider_import_source.is_some(),
path: program.path.clone(),
development: options.development,
create_provider_import: false,
create_error_helper: false,
};
state.enter(Some(Info::default()));
program.module.visit_mut_with(&mut state);
// If a provider is used (and can be used), import it.
if let Some(source) = &options.provider_import_source {
if state.create_provider_import {
program
.module
.body
.insert(0, create_import_provider(source));
}
}
// If potentially missing components are used, add the helper used for
// errors.
if state.create_error_helper {
program
.module
.body
.push(create_error_helper(state.development, state.path));
}
}
/// Collection of different SWC functions.
#[derive(Debug)]
enum Func<'a> {
/// Function declaration.
Decl(&'a mut FnDecl),
/// Function expression.
Expr(&'a mut FnExpr),
/// Arrow function.
Arrow(&'a mut ArrowExpr),
}
/// Non-literal reference.
#[derive(Debug, Default, Clone)]
struct Dynamic {
/// Name.
///
/// ```jsx
/// "a.b.c"
/// "A"
/// ```
name: String,
/// Component or not (in which case, object).
component: bool,
/// Positional info where it was (first) referenced.
position: Option<Position>,
}
/// Alias.
#[derive(Debug, Default, Clone)]
struct Alias {
/// Unsafe.
original: String,
/// Safe.
safe: String,
}
/// Info for a function scope.
#[derive(Debug, Default, Clone)]
struct Info {
/// Function name.
name: Option<String>,
/// Used literals (`<a />`).
literal: Vec<String>,
/// Non-literal references (components and objects).
dynamic: Vec<Dynamic>,
/// List of JSX identifiers of literal that are not valid JS identifiers.
aliases: Vec<Alias>,
}
/// Scope (block or function/global).
#[derive(Debug, Clone)]
struct Scope {
/// If this is a function (or global) scope, we track info.
info: Option<Info>,
/// Things that are defined in this scope.
defined: Vec<String>,
}
/// Context.
#[derive(Debug, Default, Clone)]
struct State<'a> {
/// Location info.
location: Option<&'a Location>,
/// Path to file.
path: Option<String>,
/// List of current scopes.
scopes: Vec<Scope>,
/// Whether the user is in development mode.
development: bool,
/// Whether the user uses a provider.
provider: bool,
/// Whether a provider is referenced.
create_provider_import: bool,
/// Whether a missing component helper is referenced.
///
/// When things are referenced that might not be defined, we reference a
/// helper function to throw when they are missing.
create_error_helper: bool,
}
impl<'a> State<'a> {
/// Open a new scope.
fn enter(&mut self, info: Option<Info>) {
self.scopes.push(Scope {
info,
defined: vec![],
});
}
/// Close the current scope.
fn exit(&mut self) -> Scope {
self.scopes.pop().expect("expected scope")
}
/// Close a function.
fn exit_func(&mut self, func: Func) {
let mut scope = self.exit();
let mut defaults = vec![];
let info = scope.info.take().unwrap();
let mut statements = vec![];
if !info.literal.is_empty() || !info.dynamic.is_empty() {
let mut parameters = vec![];
// Use a provider, if configured.
//
// ```jsx
// _provideComponents()
// ```
if self.provider {
self.create_provider_import = true;
let call = create_ident_expression("_provideComponents");
let callee = Callee::Expr(Box::new(call));
parameters.push(create_call_expression(callee, vec![]));
}
// Accept `components` as a prop if this is the `MDXContent` or
// `_createMdxContent` function.
//
// ```jsx
// props.components
// ```
if is_props_receiving_fn(&info.name) {
let member = MemberExpr {
obj: Box::new(create_ident_expression("props")),
prop: MemberProp::Ident(create_ident("components")),
span: DUMMY_SP,
};
parameters.push(Expr::Member(member));
}
// Create defaults for literal tags.
//
// Literal tags are optional.
// When they are not passed, they default to their tag name.
//
// ```jsx
// {h1: 'h1'}
// ```
let mut index = 0;
while index < info.literal.len() {
let name = &info.literal[index];
defaults.push(PropOrSpread::Prop(Box::new(Prop::KeyValue(KeyValueProp {
key: create_prop_name(name),
value: Box::new(create_str_expression(name)),
}))));
index += 1;
}
// Inject an object at the start, when:
// - there are defaults,
// - there are two sources
//
// ```jsx
// (_provideComponents(), props.components)
// ()
// ```
//
// To:
//
// ```jsx
// ({}, _provideComponents(), props.components)
// ({h1: 'h1'})
// ```
if !defaults.is_empty() || parameters.len() > 1 {
parameters.insert(0, create_object_expression(defaults));
}
// Merge things and prevent errors.
//
// ```jsx
// {}, _provideComponents(), props.components
// props.components
// _provideComponents()
// ```
//
// To:
//
// ```jsx
// Object.assign({}, _provideComponents(), props.components)
// props.components || {}
// _provideComponents()
// ```
let mut components_init = if parameters.len() > 1 {
let mut args = vec![];
parameters.reverse();
while let Some(param) = parameters.pop() {
args.push(ExprOrSpread {
spread: None,
expr: Box::new(param),
});
}
let callee = create_member_expression_from_str("Object.assign");
create_call_expression(Callee::Expr(Box::new(callee)), args)
} else {
// Always one.
let param = parameters.pop().unwrap();
if let Expr::Member(_) = param {
create_binary_expression(
vec![param, create_object_expression(vec![])],
BinaryOp::LogicalOr,
)
} else {
param
}
};
let mut declarators = vec![];
// If there are tags, they are taken from `_components`, so we need
// to make it defined.
if !info.literal.is_empty() {
let declarator = VarDeclarator {
span: DUMMY_SP,
name: Pat::Ident(BindingIdent {
id: create_ident("_components"),
type_ann: None,
}),
init: Some(Box::new(components_init)),
definite: false,
};
declarators.push(declarator);
components_init = create_ident_expression("_components");
}
// For JSX IDs that can’t be represented as JavaScript IDs (as in,
// those with dashes, such as `custom-element`), we generated a
// separate variable that is a valid JS ID (such as `_component0`),
// and here we take it from components:
// ```js
// const _component0 = _components['custom-element']
// ```
if !info.aliases.is_empty() {
let mut index = 0;
while index < info.aliases.len() {
let alias = &info.aliases[index];
let declarator = VarDeclarator {
span: DUMMY_SP,
name: Pat::Ident(BindingIdent {
id: create_ident(&alias.safe),
type_ann: None,
}),
init: Some(Box::new(Expr::Member(create_member(
create_ident_expression("_components"),
create_member_prop_from_str(&alias.original),
)))),
definite: false,
};
declarators.push(declarator);
index += 1;
}
}
// Add components to scope.
//
// For `['MyComponent', 'MDXLayout']` this generates:
//
// ```js
// const {MyComponent, wrapper: MDXLayout} = _components
// ```
//
// Note that MDXLayout is special as it’s taken from
// `_components.wrapper`.
let mut props = vec![];
for reference in &info.dynamic {
let invalid = info.aliases.iter().any(|d| d.original == reference.name);
// The primary ID of objects and components that are referenced.
// Ignore if invalid.
if !reference.name.contains('.') && !invalid {
// `wrapper: MDXLayout`
if reference.name == "MDXLayout" {
let binding = BindingIdent {
id: create_ident(&reference.name),
type_ann: None,
};
let prop = KeyValuePatProp {
key: create_prop_name("wrapper"),
value: Box::new(Pat::Ident(binding)),
};
props.push(ObjectPatProp::KeyValue(prop));
} else {
// `MyComponent`
let prop = AssignPatProp {
key: create_ident(&reference.name),
value: None,
span: DUMMY_SP,
};
props.push(ObjectPatProp::Assign(prop));
}
}
}
if !props.is_empty() {
let pat = ObjectPat {
props,
optional: false,
span: DUMMY_SP,
type_ann: None,
};
let declarator = VarDeclarator {
name: Pat::Object(pat),
init: Some(Box::new(components_init)),
span: DUMMY_SP,
definite: false,
};
declarators.push(declarator);
};
// Add the variable declaration.
let decl = VarDecl {
kind: VarDeclKind::Const,
decls: declarators,
span: DUMMY_SP,
declare: false,
};
let var_decl = Decl::Var(Box::new(decl));
statements.push(Stmt::Decl(var_decl));
}
// Add checks at runtime to verify that object/components are passed.
//
// ```js
// if (!a) _missingMdxReference("a", false);
// if (!a.b) _missingMdxReference("a.b", true);
// ```
for reference in info.dynamic {
// We use a conditional to check if `MDXLayout` is defined or not
// in the `MDXContent` component.
let layout = reference.name == "MDXLayout" && info.name == Some("MDXContent".into());
if !layout {
self.create_error_helper = true;
let mut args = vec![
ExprOrSpread {
spread: None,
expr: Box::new(create_str_expression(&reference.name)),
},
ExprOrSpread {
spread: None,
expr: Box::new(create_bool_expression(reference.component)),
},
];
// Add the source location if it exists and if `development` is on.
if let Some(position) = reference.position.as_ref() {
if self.development {
args.push(ExprOrSpread {
spread: None,
expr: Box::new(create_str_expression(&position_to_string(position))),
});
}
}
let mut name = reference.name;
let split = name.split('.');
let mut path = split.map(String::from).collect::<Vec<_>>();
let alias = info.aliases.iter().find(|d| d.original == path[0]);
if let Some(alias) = alias {
path[0] = alias.safe.clone();
name = path.join(".");
}
let test = UnaryExpr {
op: UnaryOp::Bang,
arg: Box::new(create_member_expression_from_str(&name)),
span: DUMMY_SP,
};
let callee = create_ident_expression("_missingMdxReference");
let call = create_call_expression(Callee::Expr(Box::new(callee)), args);
let cons = ExprStmt {
span: DUMMY_SP,
expr: Box::new(call),
};
let statement = IfStmt {
test: Box::new(Expr::Unary(test)),
cons: Box::new(Stmt::Expr(cons)),
alt: None,
span: DUMMY_SP,
};
statements.push(Stmt::If(statement));
}
}
// Add statements to functions.
if !statements.is_empty() {
let body: &mut BlockStmt = match func {
Func::Expr(expr) => {
// Always exists if we have components in it.
expr.function.body.as_mut().unwrap()
}
Func::Decl(decl) => {
// Always exists if we have components in it.
decl.function.body.as_mut().unwrap()
}
Func::Arrow(arr) => {
if let BlockStmtOrExpr::Expr(expr) = &mut *arr.body {
let block = BlockStmt {
stmts: vec![Stmt::Return(ReturnStmt {
arg: Some(expr.take()),
span: DUMMY_SP,
})],
span: DUMMY_SP,
};
arr.body = Box::new(BlockStmtOrExpr::BlockStmt(block));
}
arr.body.as_mut_block_stmt().unwrap()
}
};
statements.append(&mut body.stmts.split_off(0));
body.stmts = statements;
}
}
/// Get the current function scope.
fn current_fn_scope_mut(&mut self) -> &mut Scope {
let mut index = self.scopes.len();
while index > 0 {
index -= 1;
if self.scopes[index].info.is_some() {
return &mut self.scopes[index];
}
}
unreachable!("expected scope")
}
/// Get the current scope.
fn current_scope_mut(&mut self) -> &mut Scope {
self.scopes.last_mut().expect("expected scope")
}
/// Get the top-level scope’s info, mutably.
fn current_top_level_info(&mut self) -> Option<&mut Info> {
if let Some(scope) = self.scopes.get_mut(1) {
scope.info.as_mut()
} else {
None
}
}
/// Check if `id` is in scope.
fn in_scope(&self, id: &String) -> bool {
let mut index = self.scopes.len();
while index > 0 {
index -= 1;
if self.scopes[index].defined.contains(id) {
return true;
}
}
false
}
/// Reference a literal tag name.
fn ref_tag(&mut self, name: &str) {
let scope = self.current_top_level_info().expect("expected scope");
let name = name.to_string();
if !scope.literal.contains(&name) {
scope.literal.push(name);
}
}
/// Reference a component or object name.
fn ref_dynamic(&mut self, path: &[String], component: bool, position: &Option<Position>) {
let scope = self.current_top_level_info().expect("expected scope");
let name = path.join(".");
let existing = scope.dynamic.iter_mut().find(|d| d.name == name);
if let Some(existing) = existing {
if component {
existing.component = component;
}
} else {
let dynamic = Dynamic {
name,
component,
position: position.clone(),
};
scope.dynamic.push(dynamic);
}
}
fn create_alias(&mut self, id: &str) -> String {
let scope = self.current_top_level_info().expect("expected scope");
let existing = scope.aliases.iter().find(|d| d.original == id);
if let Some(alias) = existing {
alias.safe.to_string()
} else {
let name = format!("_component{}", scope.aliases.len());
scope.aliases.push(Alias {
original: id.to_string(),
safe: name.clone(),
});
name
}
}
fn ref_ids(&mut self, ids: &[String], span: &Span) -> Option<JSXElementName> {
// If there is a top-level, non-global, scope which is a function:
if let Some(info) = self.current_top_level_info() {
// Rewrite only if we can rewrite.
if is_props_receiving_fn(&info.name) || self.provider {
debug_assert!(!ids.is_empty(), "expected non-empty ids");
let explicit_jsx = span.ctxt.as_u32() == MAGIC_EXPLICIT_MARKER;
let mut path = ids.to_owned();
let position = span_to_position(span, self.location);
// A tag name of a literal element (not a component).
if ids.len() == 1 && is_literal_name(&path[0]) {
self.ref_tag(&path[0]);
// The author did not used explicit JSX (`<h1>a</h1>`),
// but markdown (`# a`), so rewrite.
if !explicit_jsx {
path.insert(0, "_components".into());
}
} else if !self.in_scope(&path[0]) {
// Component or object not in scope.
let mut index = 1;
while index <= path.len() {
self.ref_dynamic(&path[0..index], index == ids.len(), &position);
index += 1;
}
}
// If the primary ID is not a valid JS ID:
if !is_identifier_name(&path[0]) {
path[0] = self.create_alias(&path[0]);
}
if path != ids {
return Some(create_jsx_name_from_str(&path.join(".")));
}
}
}
None
}
/// Define an identifier in a scope.
fn define_id(&mut self, id: String, block: bool) {
let scope = if block {
self.current_scope_mut()
} else {
self.current_fn_scope_mut()
};
scope.defined.push(id);
}
/// Define a pattern in a scope.
fn define_pat(&mut self, pat: &Pat, block: bool) {
// `x`
if let Pat::Ident(d) = pat {
self.define_id(d.id.sym.to_string(), block);
}
// `...x`
if let Pat::Array(d) = pat {
let mut index = 0;
while index < d.elems.len() {
if let Some(d) = &d.elems[index] {
self.define_pat(d, block);
}
index += 1;
}
}
// `...x`
if let Pat::Rest(d) = pat {
self.define_pat(&d.arg, block);
}
// `{x=y}`
if let Pat::Assign(d) = pat {
self.define_pat(&d.left, block);
}
if let Pat::Object(d) = pat {
let mut index = 0;
while index < d.props.len() {
match &d.props[index] {
// `{...x}`
ObjectPatProp::Rest(d) => {
self.define_pat(&d.arg, block);
}
// `{key: value}`
ObjectPatProp::KeyValue(d) => {
self.define_pat(&d.value, block);
}
// `{key}` or `{key = value}`
ObjectPatProp::Assign(d) => {
self.define_id(d.key.sym.to_string(), block);
}
}
index += 1;
}
}
}
}
impl<'a> VisitMut for State<'a> {
noop_visit_mut_type!();
/// Rewrite JSX identifiers.
fn visit_mut_jsx_element(&mut self, node: &mut JSXElement) {
let parts = match &node.opening.name {
// `<x.y>`, `<Foo.Bar>`, `<x.y.z>`.
JSXElementName::JSXMemberExpr(d) => {
let parts = jsx_member_to_parts(d);
parts.into_iter().map(String::from).collect::<Vec<_>>()
}
// `<foo>`, `<Foo>`, `<$>`, `<_bar>`, `<a_b>`.
JSXElementName::Ident(d) => vec![(d.sym).to_string()],
// `<xml:thing>`.
JSXElementName::JSXNamespacedName(d) => {
vec![format!("{}:{}", d.ns.sym, d.name.sym)]
}
};
if let Some(name) = self.ref_ids(&parts, &node.span) {
if let Some(closing) = node.closing.as_mut() {
closing.name = name.clone();
}
node.opening.name = name;
}
node.visit_mut_children_with(self);
}
/// Add specifiers of import declarations.
fn visit_mut_import_decl(&mut self, node: &mut ImportDecl) {
let mut index = 0;
while index < node.specifiers.len() {
let ident = match &node.specifiers[index] {
ImportSpecifier::Default(x) => &x.local.sym,
ImportSpecifier::Namespace(x) => &x.local.sym,
ImportSpecifier::Named(x) => &x.local.sym,
};
self.define_id(ident.to_string(), false);
index += 1;
}
node.visit_mut_children_with(self);
}
/// Add patterns of variable declarations.
fn visit_mut_var_decl(&mut self, node: &mut VarDecl) {
let block = node.kind != VarDeclKind::Var;
let mut index = 0;
while index < node.decls.len() {
self.define_pat(&node.decls[index].name, block);
index += 1;
}
node.visit_mut_children_with(self);
}
/// Add identifier of class declaration.
fn visit_mut_class_decl(&mut self, node: &mut ClassDecl) {
self.define_id(node.ident.sym.to_string(), false);
node.visit_mut_children_with(self);
}
/// On function declarations, add name, create scope, add parameters.
fn visit_mut_fn_decl(&mut self, node: &mut FnDecl) {
let id = node.ident.sym.to_string();
self.define_id(id.clone(), false);
self.enter(Some(Info {
name: Some(id),
..Default::default()
}));
let mut index = 0;
while index < node.function.params.len() {
self.define_pat(&node.function.params[index].pat, false);
index += 1;
}
node.visit_mut_children_with(self);
// Rewrite.
self.exit_func(Func::Decl(node));
}
/// On function expressions, add name, create scope, add parameters.
fn visit_mut_fn_expr(&mut self, node: &mut FnExpr) {
// Note: `periscopic` adds the ID to the newly generated scope, for
// fn expressions.
// That seems wrong?
let name = if let Some(ident) = &node.ident {
let id = ident.sym.to_string();
self.define_id(id.clone(), false);
Some(id)
} else {
None
};
self.enter(Some(Info {
name,
..Default::default()
}));
let mut index = 0;
while index < node.function.params.len() {
self.define_pat(&node.function.params[index].pat, false);
index += 1;
}
node.visit_mut_children_with(self);
self.exit_func(Func::Expr(node));
}
/// On arrow functions, create scope, add parameters.
fn visit_mut_arrow_expr(&mut self, node: &mut ArrowExpr) {
self.enter(Some(Info::default()));
let mut index = 0;
while index < node.params.len() {
self.define_pat(&node.params[index], false);
index += 1;
}
node.visit_mut_children_with(self);
self.exit_func(Func::Arrow(node));
}
// Blocks.
// Not sure why `periscopic` only does `For`/`ForIn`/`ForOf`/`Block`.
// I added `While`/`DoWhile` here just to be sure.
// But there are more.
/// On for statements, create scope.
fn visit_mut_for_stmt(&mut self, node: &mut ForStmt) {
self.enter(None);
node.visit_mut_children_with(self);
self.exit();
}
/// On for/in statements, create scope.
fn visit_mut_for_in_stmt(&mut self, node: &mut ForInStmt) {
self.enter(None);
node.visit_mut_children_with(self);
self.exit();
}
/// On for/of statements, create scope.
fn visit_mut_for_of_stmt(&mut self, node: &mut ForOfStmt) {
self.enter(None);
node.visit_mut_children_with(self);
self.exit();
}
/// On while statements, create scope.
fn visit_mut_while_stmt(&mut self, node: &mut WhileStmt) {
self.enter(None);
node.visit_mut_children_with(self);
self.exit();
}
/// On do/while statements, create scope.
fn visit_mut_do_while_stmt(&mut self, node: &mut DoWhileStmt) {
self.enter(None);
node.visit_mut_children_with(self);
self.exit();
}
/// On block statements, create scope.
fn visit_mut_block_stmt(&mut self, node: &mut BlockStmt) {
self.enter(None);
node.visit_mut_children_with(self);
self.exit();
}
/// On catch clauses, create scope, add param.
fn visit_mut_catch_clause(&mut self, node: &mut CatchClause) {
self.enter(None);
if let Some(pat) = &node.param {
self.define_pat(pat, true);
}
node.visit_mut_children_with(self);
self.exit();
}
}
/// Generate an import provider.
///
/// ```js
/// import { useMDXComponents as _provideComponents } from "x"
/// ```
fn create_import_provider(source: &str) -> ModuleItem {
ModuleItem::ModuleDecl(ModuleDecl::Import(ImportDecl {
specifiers: vec![ImportSpecifier::Named(ImportNamedSpecifier {
local: create_ident("_provideComponents"),
imported: Some(ModuleExportName::Ident(create_ident("useMDXComponents"))),
span: DUMMY_SP,
is_type_only: false,
})],
src: Box::new(create_str(source)),
type_only: false,
asserts: None,
span: DUMMY_SP,
}))
}
/// Generate an error helper.
///
/// ```js
/// function _missingMdxReference(id, component) {
/// throw new Error("Expected " + (component ? "component" : "object") + " `" + id + "` to be defined: you likely forgot to import, pass, or provide it.");
/// }
/// ```
fn create_error_helper(development: bool, path: Option<String>) -> ModuleItem {
let mut parameters = vec![
Param {
pat: Pat::Ident(BindingIdent {
id: create_ident("id"),
type_ann: None,
}),
decorators: vec![],
span: DUMMY_SP,
},
Param {
pat: Pat::Ident(BindingIdent {
id: create_ident("component"),
type_ann: None,
}),
decorators: vec![],
span: DUMMY_SP,
},
];
// Accept a source location (which might be undefined).
if development {
parameters.push(Param {
pat: Pat::Ident(BindingIdent {
id: create_ident("place"),
type_ann: None,
}),
decorators: vec![],
span: DUMMY_SP,
});
}
let mut message = vec![
create_str_expression("Expected "),
// `component ? "component" : "object"`
Expr::Paren(ParenExpr {
expr: Box::new(Expr::Cond(CondExpr {
test: Box::new(create_ident_expression("component")),
cons: Box::new(create_str_expression("component")),
alt: Box::new(create_str_expression("object")),
span: DUMMY_SP,
})),
span: DUMMY_SP,
}),
create_str_expression(" `"),
create_ident_expression("id"),
create_str_expression("` to be defined: you likely forgot to import, pass, or provide it."),
];
// `place ? "\nIt’s referenced in your code at `" + place+ "`" : ""`
if development {
message.push(Expr::Paren(ParenExpr {
expr: Box::new(Expr::Cond(CondExpr {
test: Box::new(create_ident_expression("place")),
cons: Box::new(create_binary_expression(
vec![
create_str_expression("\nIt’s referenced in your code at `"),
create_ident_expression("place"),
if let Some(path) = path {
create_str_expression(&format!("` in `{}`", path))
} else {
create_str_expression("`")
},
],
BinaryOp::Add,
)),
alt: Box::new(create_str_expression("")),
span: DUMMY_SP,
})),
span: DUMMY_SP,
}));
}
ModuleItem::Stmt(Stmt::Decl(Decl::Fn(FnDecl {
ident: create_ident("_missingMdxReference"),
declare: false,
function: Box::new(Function {
params: parameters,
decorators: vec![],
body: Some(BlockStmt {
stmts: vec![Stmt::Throw(ThrowStmt {
arg: Box::new(Expr::New(NewExpr {
callee: Box::new(create_ident_expression("Error")),
args: Some(vec![ExprOrSpread {
spread: None,
expr: Box::new(create_binary_expression(message, BinaryOp::Add)),
}]),
span: DUMMY_SP,
type_args: None,
})),
span: DUMMY_SP,
})],
span: DUMMY_SP,
}),
is_generator: false,
is_async: false,
type_params: None,
return_type: None,
span: DUMMY_SP,
}),
})))
}
/// Check if this function is a props receiving component: it’s one of ours.
fn is_props_receiving_fn(name: &Option<String>) -> bool {
if let Some(name) = name {
name == "_createMdxContent" || name == "MDXContent"
} else {
false
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::hast_util_to_swc::hast_util_to_swc;
use crate::mdast_util_to_hast::mdast_util_to_hast;
use crate::mdx_plugin_recma_document::{mdx_plugin_recma_document, Options as DocumentOptions};
use crate::swc::{parse_esm, parse_expression, serialize};
use crate::swc_utils::create_jsx_name_from_str;
use markdown::{to_mdast, Location, ParseOptions};
use pretty_assertions::assert_eq;
use swc_core::ecma::ast::{Invalid, JSXOpeningElement, Module};
fn compile(value: &str, options: &Options, named: bool) -> Result<String, String> {
let location = Location::new(value.as_bytes());
let mdast = to_mdast(
value,
&ParseOptions {
mdx_esm_parse: Some(Box::new(parse_esm)),
mdx_expression_parse: Some(Box::new(parse_expression)),
..ParseOptions::mdx()
},
)?;
let hast = mdast_util_to_hast(&mdast);
let filepath = if named {
Some("example.mdx".into())
} else {
None
};
let mut program = hast_util_to_swc(&hast, filepath, Some(&location))?;
mdx_plugin_recma_document(&mut program, &DocumentOptions::default(), Some(&location))?;
mdx_plugin_recma_jsx_rewrite(&mut program, options, Some(&location));
Ok(serialize(&mut program.module, Some(&program.comments)))
}
#[test]
fn empty() -> Result<(), String> {
assert_eq!(
compile("", &Options::default(), true)?,
"function _createMdxContent(props) {
return <></>;
}
function MDXContent(props = {}) {
const { wrapper: MDXLayout } = props.components || {};
return MDXLayout ? <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout> : _createMdxContent(props);
}
export default MDXContent;
",
"should work on an empty file",
);
Ok(())
}
#[test]
fn pass_literal() -> Result<(), String> {
assert_eq!(
compile("# hi", &Options::default(), true)?,
"function _createMdxContent(props) {
const _components = Object.assign({
h1: \"h1\"
}, props.components);
return <_components.h1>{\"hi\"}</_components.h1>;
}
function MDXContent(props = {}) {
const { wrapper: MDXLayout } = props.components || {};
return MDXLayout ? <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout> : _createMdxContent(props);
}
export default MDXContent;
",
"should support passing in a layout (as `wrapper`) and components for literal tags",
);
Ok(())
}
#[test]
fn pass_namespace() -> Result<(), String> {
assert_eq!(
compile("<a:b />", &Options::default(), true)?,
"function _createMdxContent(props) {
const _components = Object.assign({
\"a:b\": \"a:b\"
}, props.components), _component0 = _components[\"a:b\"];
return <_component0/>;
}
function MDXContent(props = {}) {
const { wrapper: MDXLayout } = props.components || {};
return MDXLayout ? <MDXLayout {...props}><_createMdxContent {...props}/></MDXLayout> : _createMdxContent(props);
}
export default MDXContent;
",
"should support passing in a component for a JSX namespace name (`x:y`)",
);
Ok(())
}
#[test]
fn pass_scope_defined_layout_import_named() -> Result<(), String> {
assert_eq!(
compile(
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | true |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/mdx_rs/src/mdast_util_to_hast.rs | crates/mdx_rs/src/mdast_util_to_hast.rs | //! Turn a markdown AST into an HTML AST.
//!
//! Port of <https://github.com/syntax-tree/mdast-util-to-hast>, by the same
//! author:
//!
//! (The MIT License)
//!
//! Copyright (c) 2016 Titus Wormer <tituswormer@gmail.com>
//!
//! Permission is hereby granted, free of charge, to any person obtaining
//! a copy of this software and associated documentation files (the
//! 'Software'), to deal in the Software without restriction, including
//! without limitation the rights to use, copy, modify, merge, publish,
//! distribute, sublicense, and/or sell copies of the Software, and to
//! permit persons to whom the Software is furnished to do so, subject to
//! the following conditions:
//!
//! The above copyright notice and this permission notice shall be
//! included in all copies or substantial portions of the Software.
//!
//! THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
//! EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
//! MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
//! IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
//! CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
//! TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
//! SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
use crate::hast;
use crate::swc_utils::inter_element_whitespace;
use markdown::{mdast, sanitize, unist::Position};
// To do: support these compile options:
// ```
// pub gfm_footnote_label: Option<String>,
// pub gfm_footnote_label_tag_name: Option<String>,
// pub gfm_footnote_label_attributes: Option<String>,
// pub gfm_footnote_back_label: Option<String>,
// pub gfm_footnote_clobber_prefix: Option<String>,
// ```
//
// Maybe also:
// * option to persist `meta`?
// * option to generate a `style` attribute instead of `align`?
// * support `Raw` nodes for HTML?
//
// To do:
// * revert references when undefined?
// <https://github.com/syntax-tree/mdast-util-to-hast/blob/c393d0a/lib/revert.js>
// * when externalizing, move mdx unraveling somewhere else.
/// State needed to turn mdast into hast.
#[derive(Debug)]
struct State {
/// List of gathered definitions.
///
/// The field at `0` is the identifier, `1` the URL, and `2` the title.
definitions: Vec<(String, String, Option<String>)>,
/// List of gathered GFM footnote definitions.
///
/// The field at `0` is the identifier, `1` the node.
footnote_definitions: Vec<(String, Vec<hast::Node>)>,
/// List of gathered GFM footnote calls.
///
/// The field at `0` is the identifier, `1` a counter of how many times
/// it is used.
footnote_calls: Vec<(String, usize)>,
}
/// Result of turning something into hast.
#[derive(Debug)]
enum Result {
/// Multiple nodes.
Fragment(Vec<hast::Node>),
/// Single nodes.
Node(hast::Node),
/// Nothing.
None,
}
/// Turn mdast into hast.
pub fn mdast_util_to_hast(mdast: &mdast::Node) -> hast::Node {
let mut definitions = vec![];
// Collect definitions.
// Calls take info from their definition.
// Calls can come come before definitions.
// Footnote calls can also come before footnote definitions, but those
// calls *do not* take info from their definitions, so we don’t care
// about footnotes here.
visit(mdast, |node| {
if let mdast::Node::Definition(definition) = node {
definitions.push((
definition.identifier.clone(),
definition.url.clone(),
definition.title.clone(),
));
}
});
let mut state = State {
definitions,
footnote_definitions: vec![],
footnote_calls: vec![],
};
let result = one(&mut state, mdast, None);
if state.footnote_calls.is_empty() {
if let Result::Node(node) = result {
return node;
}
}
// We either have to generate a footer, or we don’t have a single node.
// So we need a root.
let mut root = hast::Root {
children: vec![],
position: None,
};
match result {
Result::Fragment(children) => root.children = children,
Result::Node(node) => {
if let hast::Node::Root(existing) = node {
root = existing;
} else {
root.children.push(node);
}
}
Result::None => {}
}
if !state.footnote_calls.is_empty() {
let mut items = vec![];
let mut index = 0;
while index < state.footnote_calls.len() {
let (id, count) = &state.footnote_calls[index];
let safe_id = sanitize(&id.to_lowercase());
// Find definition: we’ll always find it.
let mut definition_index = 0;
while definition_index < state.footnote_definitions.len() {
if &state.footnote_definitions[definition_index].0 == id {
break;
}
definition_index += 1;
}
debug_assert_ne!(
definition_index,
state.footnote_definitions.len(),
"expected definition"
);
// We’ll find each used definition once, so we can split off to take the content.
let mut content = state.footnote_definitions[definition_index].1.split_off(0);
let mut reference_index = 0;
let mut backreferences = vec![];
while reference_index < *count {
let mut backref_children = vec![hast::Node::Text(hast::Text {
value: "↩".into(),
position: None,
})];
if reference_index != 0 {
backreferences.push(hast::Node::Text(hast::Text {
value: " ".into(),
position: None,
}));
backref_children.push(hast::Node::Element(hast::Element {
tag_name: "sup".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: (reference_index + 1).to_string(),
position: None,
})],
position: None,
}));
}
backreferences.push(hast::Node::Element(hast::Element {
tag_name: "a".into(),
properties: vec![
(
"href".into(),
hast::PropertyValue::String(format!(
"#fnref-{}{}",
safe_id,
if reference_index == 0 {
String::new()
} else {
format!("-{}", &(reference_index + 1).to_string())
}
)),
),
(
"dataFootnoteBackref".into(),
hast::PropertyValue::Boolean(true),
),
(
"ariaLabel".into(),
hast::PropertyValue::String("Back to content".into()),
),
(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["data-footnote-backref".into()]),
),
],
children: backref_children,
position: None,
}));
reference_index += 1;
}
let mut backreference_opt = Some(backreferences);
if let Some(hast::Node::Element(tail_element)) = content.last_mut() {
if tail_element.tag_name == "p" {
if let Some(hast::Node::Text(text)) = tail_element.children.last_mut() {
text.value.push(' ');
} else {
tail_element.children.push(hast::Node::Text(hast::Text {
value: " ".into(),
position: None,
}));
}
tail_element
.children
.append(&mut backreference_opt.take().unwrap());
}
}
// No paragraph, just push them.
if let Some(mut backreference) = backreference_opt {
content.append(&mut backreference);
}
items.push(hast::Node::Element(hast::Element {
tag_name: "li".into(),
properties: vec![(
"id".into(),
hast::PropertyValue::String(format!("#fn-{}", safe_id)),
)],
children: wrap(content, true),
position: None,
}));
index += 1;
}
root.children.push(hast::Node::Text(hast::Text {
value: "\n".into(),
position: None,
}));
root.children.push(hast::Node::Element(hast::Element {
tag_name: "section".into(),
properties: vec![
("dataFootnotes".into(), hast::PropertyValue::Boolean(true)),
(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["footnotes".into()]),
),
],
children: vec![
hast::Node::Element(hast::Element {
tag_name: "h2".into(),
properties: vec![
(
"id".into(),
hast::PropertyValue::String("footnote-label".into()),
),
(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["sr-only".into()]),
),
],
children: vec![hast::Node::Text(hast::Text {
value: "Footnotes".into(),
position: None,
})],
position: None,
}),
hast::Node::Text(hast::Text {
value: "\n".into(),
position: None,
}),
hast::Node::Element(hast::Element {
tag_name: "ol".into(),
properties: vec![],
children: wrap(items, true),
position: None,
}),
hast::Node::Text(hast::Text {
value: "\n".into(),
position: None,
}),
],
position: None,
}));
root.children.push(hast::Node::Text(hast::Text {
value: "\n".into(),
position: None,
}));
}
// print every child's type and value of root
// for child in &root.children {
// match child {
// hast::Node::Element(element) => {
// println!("element: {:?}", element.tag_name);
// if element.tag_name == "p" {
// // print every child's type and value of p
// for child in &element.children {
// match child {
// hast::Node::Text(text) => {
// println!("text: {:?}", text.value);
// }
// _ => {}
// }
// }
// }
// }
// hast::Node::Text(text) => {
// println!("text: {:?}", text.value);
// }
// hast::Node::Root(_) => todo!(),
// hast::Node::Doctype(_) => todo!(),
// hast::Node::Comment(_) => todo!(),
// hast::Node::MdxJsxElement(_) => todo!(),
// hast::Node::MdxjsEsm(_) => todo!(),
// hast::Node::MdxExpression(_) => todo!(),
// }
// }
// println!("{:#?}", root.children);
hast::Node::Root(root)
}
/// Turn one mdast node into hast.
fn one(state: &mut State, node: &mdast::Node, parent: Option<&mdast::Node>) -> Result {
match node {
mdast::Node::BlockQuote(d) => transform_block_quote(state, node, d),
mdast::Node::Break(d) => transform_break(state, node, d),
mdast::Node::Code(d) => transform_code(state, node, d),
mdast::Node::Delete(d) => transform_delete(state, node, d),
mdast::Node::Emphasis(d) => transform_emphasis(state, node, d),
mdast::Node::FootnoteDefinition(d) => transform_footnote_definition(state, node, d),
mdast::Node::FootnoteReference(d) => transform_footnote_reference(state, node, d),
mdast::Node::Heading(d) => transform_heading(state, node, d),
mdast::Node::Image(d) => transform_image(state, node, d),
mdast::Node::ImageReference(d) => transform_image_reference(state, node, d),
mdast::Node::InlineCode(d) => transform_inline_code(state, node, d),
mdast::Node::InlineMath(d) => transform_inline_math(state, node, d),
mdast::Node::Link(d) => transform_link(state, node, d),
mdast::Node::LinkReference(d) => transform_link_reference(state, node, d),
mdast::Node::ListItem(d) => transform_list_item(state, node, parent, d),
mdast::Node::List(d) => transform_list(state, node, d),
mdast::Node::Math(d) => transform_math(state, node, d),
mdast::Node::MdxFlowExpression(_) | mdast::Node::MdxTextExpression(_) => {
transform_mdx_expression(state, node)
}
mdast::Node::MdxJsxFlowElement(_) | mdast::Node::MdxJsxTextElement(_) => {
transform_mdx_jsx_element(state, node)
}
mdast::Node::MdxjsEsm(d) => transform_mdxjs_esm(state, node, d),
mdast::Node::Paragraph(d) => transform_paragraph(state, node, d),
mdast::Node::Root(d) => transform_root(state, node, d),
mdast::Node::Strong(d) => transform_strong(state, node, d),
// Note: this is only called here if there is a single cell passed, not when one is found in a table.
mdast::Node::TableCell(d) => {
transform_table_cell(state, node, false, mdast::AlignKind::None, d)
}
// Note: this is only called here if there is a single row passed, not when one is found in a table.
mdast::Node::TableRow(d) => transform_table_row(state, node, false, None, d),
mdast::Node::Table(d) => transform_table(state, node, d),
mdast::Node::Text(d) => transform_text(state, node, d),
mdast::Node::ThematicBreak(d) => transform_thematic_break(state, node, d),
// Ignore.
mdast::Node::Definition(_)
| mdast::Node::Html(_)
| mdast::Node::Yaml(_)
| mdast::Node::Toml(_) => Result::None,
}
}
/// [`BlockQuote`][mdast::BlockQuote].
fn transform_block_quote(
state: &mut State,
node: &mdast::Node,
block_quote: &mdast::BlockQuote,
) -> Result {
Result::Node(hast::Node::Element(hast::Element {
tag_name: "blockquote".into(),
properties: vec![],
children: wrap(all(state, node), true),
position: block_quote.position.clone(),
}))
}
/// [`Break`][mdast::Break].
fn transform_break(_state: &mut State, _node: &mdast::Node, break_: &mdast::Break) -> Result {
Result::Fragment(vec![
hast::Node::Element(hast::Element {
tag_name: "br".into(),
properties: vec![],
children: vec![],
position: break_.position.clone(),
}),
hast::Node::Text(hast::Text {
value: "\n".into(),
position: None,
}),
])
}
/// [`Code`][mdast::Code].
fn transform_code(_state: &mut State, _node: &mdast::Node, code: &mdast::Code) -> Result {
let mut value = code.value.clone();
value.push('\n');
let mut properties = vec![];
if let Some(lang) = code.lang.as_ref() {
properties.push((
"className".into(),
hast::PropertyValue::SpaceSeparated(vec![format!("language-{}", lang)]),
));
}
if let Some(meta) = code.meta.as_ref() {
properties.push((
"meta".into(),
hast::PropertyValue::SpaceSeparated(vec![meta.clone()]),
));
}
Result::Node(hast::Node::Element(hast::Element {
tag_name: "pre".into(),
properties: vec![],
children: vec![hast::Node::Element(hast::Element {
tag_name: "code".into(),
properties,
children: vec![hast::Node::Text(hast::Text {
value,
position: None,
})],
position: code.position.clone(),
})],
position: code.position.clone(),
}))
}
/// [`Delete`][mdast::Delete].
fn transform_delete(state: &mut State, node: &mdast::Node, delete: &mdast::Delete) -> Result {
Result::Node(hast::Node::Element(hast::Element {
tag_name: "del".into(),
properties: vec![],
children: all(state, node),
position: delete.position.clone(),
}))
}
/// [`Emphasis`][mdast::Emphasis].
fn transform_emphasis(state: &mut State, node: &mdast::Node, emphasis: &mdast::Emphasis) -> Result {
Result::Node(hast::Node::Element(hast::Element {
tag_name: "em".into(),
properties: vec![],
children: all(state, node),
position: emphasis.position.clone(),
}))
}
/// [`FootnoteDefinition`][mdast::FootnoteDefinition].
fn transform_footnote_definition(
state: &mut State,
node: &mdast::Node,
footnote_definition: &mdast::FootnoteDefinition,
) -> Result {
let children = all(state, node);
// Set aside.
state
.footnote_definitions
.push((footnote_definition.identifier.clone(), children));
Result::None
}
/// [`FootnoteReference`][mdast::FootnoteReference].
fn transform_footnote_reference(
state: &mut State,
_node: &mdast::Node,
footnote_reference: &mdast::FootnoteReference,
) -> Result {
let safe_id = sanitize(&footnote_reference.identifier.to_lowercase());
let mut call_index = 0;
// See if this has been called before.
while call_index < state.footnote_calls.len() {
if state.footnote_calls[call_index].0 == footnote_reference.identifier {
break;
}
call_index += 1;
}
// New.
if call_index == state.footnote_calls.len() {
state
.footnote_calls
.push((footnote_reference.identifier.clone(), 0));
}
// Increment.
state.footnote_calls[call_index].1 += 1;
let reuse_counter = state.footnote_calls[call_index].1;
Result::Node(hast::Node::Element(hast::Element {
tag_name: "sup".into(),
properties: vec![],
children: vec![hast::Node::Element(hast::Element {
tag_name: "a".into(),
properties: vec![
(
"href".into(),
hast::PropertyValue::String(format!("#fn-{}", safe_id)),
),
(
"id".into(),
hast::PropertyValue::String(format!(
"fnref-{}{}",
safe_id,
if reuse_counter > 1 {
format!("-{}", reuse_counter)
} else {
String::new()
}
)),
),
("dataFootnoteRef".into(), hast::PropertyValue::Boolean(true)),
(
"ariaDescribedBy".into(),
hast::PropertyValue::String("footnote-label".into()),
),
],
children: vec![hast::Node::Text(hast::Text {
value: (call_index + 1).to_string(),
position: None,
})],
position: None,
})],
position: footnote_reference.position.clone(),
}))
}
/// [`Heading`][mdast::Heading].
fn transform_heading(state: &mut State, node: &mdast::Node, heading: &mdast::Heading) -> Result {
Result::Node(hast::Node::Element(hast::Element {
tag_name: format!("h{}", heading.depth),
properties: vec![],
children: all(state, node),
position: heading.position.clone(),
}))
}
/// [`Image`][mdast::Image].
fn transform_image(_state: &mut State, _node: &mdast::Node, image: &mdast::Image) -> Result {
let mut properties = vec![];
properties.push((
"src".into(),
hast::PropertyValue::String(sanitize(&image.url)),
));
properties.push(("alt".into(), hast::PropertyValue::String(image.alt.clone())));
if let Some(value) = image.title.as_ref() {
properties.push(("title".into(), hast::PropertyValue::String(value.into())));
}
Result::Node(hast::Node::Element(hast::Element {
tag_name: "img".into(),
properties,
children: vec![],
position: image.position.clone(),
}))
}
/// [`ImageReference`][mdast::ImageReference].
fn transform_image_reference(
state: &mut State,
_node: &mdast::Node,
image_reference: &mdast::ImageReference,
) -> Result {
let mut properties = vec![];
let definition = state
.definitions
.iter()
.find(|d| d.0 == image_reference.identifier);
let (_, url, title) = definition.expect("expected reference to have a corresponding definition");
properties.push(("src".into(), hast::PropertyValue::String(sanitize(url))));
properties.push((
"alt".into(),
hast::PropertyValue::String(image_reference.alt.clone()),
));
if let Some(value) = title {
properties.push(("title".into(), hast::PropertyValue::String(value.into())));
}
Result::Node(hast::Node::Element(hast::Element {
tag_name: "img".into(),
properties,
children: vec![],
position: image_reference.position.clone(),
}))
}
/// [`InlineCode`][mdast::InlineCode].
fn transform_inline_code(
_state: &mut State,
_node: &mdast::Node,
inline_code: &mdast::InlineCode,
) -> Result {
Result::Node(hast::Node::Element(hast::Element {
tag_name: "code".into(),
properties: vec![],
children: vec![hast::Node::Text(hast::Text {
value: replace_eols_with_spaces(&inline_code.value),
position: None,
})],
position: inline_code.position.clone(),
}))
}
/// [`InlineMath`][mdast::InlineMath].
fn transform_inline_math(
_state: &mut State,
_node: &mdast::Node,
inline_math: &mdast::InlineMath,
) -> Result {
Result::Node(hast::Node::Element(hast::Element {
tag_name: "code".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["language-math".into(), "math-inline".into()]),
)],
children: vec![hast::Node::Text(hast::Text {
value: replace_eols_with_spaces(&inline_math.value),
position: None,
})],
position: inline_math.position.clone(),
}))
}
/// [`Link`][mdast::Link].
fn transform_link(state: &mut State, node: &mdast::Node, link: &mdast::Link) -> Result {
let mut properties = vec![];
properties.push((
"href".into(),
hast::PropertyValue::String(sanitize(&link.url)),
));
if let Some(value) = link.title.as_ref() {
properties.push(("title".into(), hast::PropertyValue::String(value.into())));
}
Result::Node(hast::Node::Element(hast::Element {
tag_name: "a".into(),
properties,
children: all(state, node),
position: link.position.clone(),
}))
}
/// [`LinkReference`][mdast::LinkReference].
fn transform_link_reference(
state: &mut State,
node: &mdast::Node,
link_reference: &mdast::LinkReference,
) -> Result {
let mut properties = vec![];
let definition = state
.definitions
.iter()
.find(|d| d.0 == link_reference.identifier);
let (_, url, title) = definition.expect("expected reference to have a corresponding definition");
properties.push(("href".into(), hast::PropertyValue::String(sanitize(url))));
if let Some(value) = title {
properties.push(("title".into(), hast::PropertyValue::String(value.into())));
}
Result::Node(hast::Node::Element(hast::Element {
tag_name: "a".into(),
properties,
children: all(state, node),
position: link_reference.position.clone(),
}))
}
/// [`ListItem`][mdast::ListItem].
fn transform_list_item(
state: &mut State,
node: &mdast::Node,
parent: Option<&mdast::Node>,
list_item: &mdast::ListItem,
) -> Result {
let mut children = all(state, node);
let mut loose = list_item_loose(node);
if let Some(parent) = parent {
if matches!(parent, mdast::Node::List(_)) {
loose = list_loose(parent);
}
};
let mut properties = vec![];
// Inject a checkbox.
if let Some(checked) = list_item.checked {
// According to github-markdown-css, this class hides bullet.
// See: <https://github.com/sindresorhus/github-markdown-css>.
properties.push((
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["task-list-item".into()]),
));
let mut input = Some(hast::Node::Element(hast::Element {
tag_name: "input".into(),
properties: vec![
(
"type".into(),
hast::PropertyValue::String("checkbox".into()),
),
("checked".into(), hast::PropertyValue::Boolean(checked)),
("disabled".into(), hast::PropertyValue::Boolean(true)),
],
children: vec![],
position: None,
}));
if let Some(hast::Node::Element(x)) = children.first_mut() {
if x.tag_name == "p" {
if !x.children.is_empty() {
x.children.insert(
0,
hast::Node::Text(hast::Text {
value: " ".into(),
position: None,
}),
);
}
x.children.insert(0, input.take().unwrap());
}
}
// If the input wasn‘t injected yet, inject a paragraph.
if let Some(input) = input {
children.insert(
0,
hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children: vec![input],
position: None,
}),
);
}
}
children.reverse();
let mut result = vec![];
let mut head = true;
let empty = children.is_empty();
let mut tail_p = false;
while let Some(child) = children.pop() {
let mut is_p = false;
if let hast::Node::Element(el) = &child {
if el.tag_name == "p" {
is_p = true;
}
}
// Add eols before nodes, except if this is a tight, first paragraph.
if loose || !head || !is_p {
result.push(hast::Node::Text(hast::Text {
value: "\n".into(),
position: None,
}));
}
if is_p && !loose {
// Unwrap the paragraph.
if let hast::Node::Element(mut el) = child {
result.append(&mut el.children);
}
} else {
result.push(child);
}
head = false;
tail_p = is_p;
}
// Add eol after last node, except if it is tight or a paragraph.
if !empty && (loose || !tail_p) {
result.push(hast::Node::Text(hast::Text {
value: "\n".into(),
position: None,
}));
}
Result::Node(hast::Node::Element(hast::Element {
tag_name: "li".into(),
properties,
children: result,
position: list_item.position.clone(),
}))
}
/// [`List`][mdast::List].
fn transform_list(state: &mut State, node: &mdast::Node, list: &mdast::List) -> Result {
let mut contains_task_list = false;
let mut index = 0;
while index < list.children.len() {
if let mdast::Node::ListItem(item) = &list.children[index] {
if item.checked.is_some() {
contains_task_list = true;
}
}
index += 1;
}
let mut properties = vec![];
// Add start.
if let Some(start) = list.start {
if list.ordered && start != 1 {
properties.push((
"start".into(),
hast::PropertyValue::String(start.to_string()),
));
}
}
// Like GitHub, add a class for custom styling.
if contains_task_list {
properties.push((
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["contains-task-list".into()]),
));
}
Result::Node(hast::Node::Element(hast::Element {
tag_name: if list.ordered {
"ol".into()
} else {
"ul".into()
},
properties,
children: wrap(all(state, node), true),
position: list.position.clone(),
}))
}
/// [`Math`][mdast::Math].
fn transform_math(_state: &mut State, _node: &mdast::Node, math: &mdast::Math) -> Result {
let mut value = math.value.clone();
value.push('\n');
Result::Node(hast::Node::Element(hast::Element {
tag_name: "pre".into(),
properties: vec![],
children: vec![hast::Node::Element(hast::Element {
tag_name: "code".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["language-math".into(), "math-display".into()]),
)],
children: vec![hast::Node::Text(hast::Text {
value,
position: None,
})],
position: math.position.clone(),
})],
position: math.position.clone(),
}))
}
/// [`MdxFlowExpression`][mdast::MdxFlowExpression],[`MdxTextExpression`][mdast::MdxTextExpression].
fn transform_mdx_expression(_state: &mut State, node: &mdast::Node) -> Result {
match node {
mdast::Node::MdxFlowExpression(node) => {
Result::Node(hast::Node::MdxExpression(hast::MdxExpression {
value: node.value.clone(),
position: node.position.clone(),
stops: node.stops.clone(),
}))
}
mdast::Node::MdxTextExpression(node) => {
Result::Node(hast::Node::MdxExpression(hast::MdxExpression {
value: node.value.clone(),
position: node.position.clone(),
stops: node.stops.clone(),
}))
}
_ => unreachable!("expected expression"),
}
}
/// [`MdxJsxFlowElement`][mdast::MdxJsxFlowElement],[`MdxJsxTextElement`][mdast::MdxJsxTextElement].
fn transform_mdx_jsx_element(state: &mut State, node: &mdast::Node) -> Result {
let (name, attributes) = match node {
mdast::Node::MdxJsxFlowElement(n) => (&n.name, &n.attributes),
mdast::Node::MdxJsxTextElement(n) => (&n.name, &n.attributes),
_ => unreachable!("expected mdx jsx element"),
};
Result::Node(hast::Node::MdxJsxElement(hast::MdxJsxElement {
name: name.clone(),
attributes: attributes.clone(),
children: all(state, node),
position: node.position().cloned(),
}))
}
/// [`MdxjsEsm`][mdast::MdxjsEsm].
fn transform_mdxjs_esm(
_state: &mut State,
_node: &mdast::Node,
mdxjs_esm: &mdast::MdxjsEsm,
) -> Result {
Result::Node(hast::Node::MdxjsEsm(hast::MdxjsEsm {
value: mdxjs_esm.value.clone(),
position: mdxjs_esm.position.clone(),
stops: mdxjs_esm.stops.clone(),
}))
}
/// [`Paragraph`][mdast::Paragraph].
fn transform_paragraph(
state: &mut State,
node: &mdast::Node,
paragraph: &mdast::Paragraph,
) -> Result {
let children = all(state, node);
let mut all = true;
let mut one_or_more = false;
let mut index = 0;
while index < children.len() {
match &children[index] {
hast::Node::MdxJsxElement(_) | hast::Node::MdxExpression(_) => {
one_or_more = true;
index += 1;
continue;
}
hast::Node::Text(node) => {
if inter_element_whitespace(&node.value) {
index += 1;
continue;
}
}
_ => {}
}
all = false;
break;
}
if all && one_or_more {
Result::Fragment(children)
} else {
Result::Node(hast::Node::Element(hast::Element {
tag_name: "p".into(),
properties: vec![],
children,
position: paragraph.position.clone(),
}))
}
}
/// [`Root`][mdast::Root].
fn transform_root(state: &mut State, node: &mdast::Node, root: &mdast::Root) -> Result {
Result::Node(hast::Node::Root(hast::Root {
children: wrap(all(state, node), false),
position: root.position.clone(),
}))
}
/// [`Strong`][mdast::Strong].
fn transform_strong(state: &mut State, node: &mdast::Node, strong: &mdast::Strong) -> Result {
Result::Node(hast::Node::Element(hast::Element {
tag_name: "strong".into(),
properties: vec![],
children: all(state, node),
position: strong.position.clone(),
}))
}
/// [`TableCell`][mdast::TableCell].
fn transform_table_cell(
state: &mut State,
node: &mdast::Node,
head: bool,
align: mdast::AlignKind,
table_cell: &mdast::TableCell,
) -> Result {
let align_value = match align {
mdast::AlignKind::None => None,
mdast::AlignKind::Left => Some("left"),
mdast::AlignKind::Right => Some("right"),
mdast::AlignKind::Center => Some("center"),
};
let mut properties = vec![];
if let Some(value) = align_value {
properties.push(("align".into(), hast::PropertyValue::String(value.into())));
}
Result::Node(hast::Node::Element(hast::Element {
tag_name: if head { "th".into() } else { "td".into() },
properties,
children: all(state, node),
position: table_cell.position.clone(),
}))
}
/// [`TableRow`][mdast::TableRow].
fn transform_table_row(
state: &mut State,
_node: &mdast::Node,
head: bool,
align: Option<&[mdast::AlignKind]>,
table_row: &mdast::TableRow,
) -> Result {
let mut children = vec![];
let mut index = 0;
#[allow(clippy::redundant_closure_for_method_calls)]
let len = align.map_or(table_row.children.len(), |d| d.len());
let empty_cell = mdast::Node::TableCell(mdast::TableCell {
children: vec![],
position: None,
});
while index < len {
let align_value = align
.and_then(|d| d.get(index))
.unwrap_or(&mdast::AlignKind::None);
let child = table_row.children.get(index).unwrap_or(&empty_cell);
let result = if let mdast::Node::TableCell(table_cell) = child {
transform_table_cell(state, child, head, *align_value, table_cell)
} else {
unreachable!("expected tale cell in table row")
};
append_result(&mut children, result);
index += 1;
}
Result::Node(hast::Node::Element(hast::Element {
tag_name: "tr".into(),
properties: vec![],
children: wrap(children, true),
position: table_row.position.clone(),
}))
}
/// [`Table`][mdast::Table].
fn transform_table(state: &mut State, _node: &mdast::Node, table: &mdast::Table) -> Result {
let mut rows = vec![];
let mut index = 0;
while index < table.children.len() {
let child = &table.children[index];
let result = if let mdast::Node::TableRow(table_row) = child {
transform_table_row(
state,
&table.children[index],
index == 0,
Some(&table.align),
table_row,
)
} else {
unreachable!("expected table row as child of table")
};
append_result(&mut rows, result);
index += 1;
}
let body_rows = rows.split_off(1);
let head_row = rows.pop();
let mut children = vec![];
if let Some(row) = head_row {
let position = row.position().cloned();
children.push(hast::Node::Element(hast::Element {
tag_name: "thead".into(),
properties: vec![],
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | true |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/plugin_external_link/src/lib.rs | crates/plugin_external_link/src/lib.rs | //! Author: sanyuan0704
//!
//! This plugin is used to handle the external link in mdx.
//!
//! If the link is external, we will add the `target="_blank"` and `rel="noopener noreferrer"` attribute to the link element.
fn is_external_url(url: &str) -> bool {
if url.starts_with("http://") || url.starts_with("https://") {
return true;
}
false
}
fn transform_link_element(node: &mut hast::Node) {
if let hast::Node::Element(node) = node {
if node.tag_name == "a" {
if let Some((_, hast::PropertyValue::String(url))) =
node.properties.iter().find(|(key, _)| key == "href")
{
if is_external_url(url) {
node.properties.push((
"target".into(),
hast::PropertyValue::String("_blank".into()),
));
node.properties.push((
"rel".into(),
hast::PropertyValue::String("noopener noreferrer".into()),
));
}
}
}
}
}
fn mdx_plugin_external_link_impl(node: &mut hast::Node) {
transform_link_element(node);
if let Some(children) = node.children_mut() {
for child in children {
mdx_plugin_external_link_impl(child);
}
}
}
pub fn mdx_plugin_external_link(node: &mut hast::Node) {
mdx_plugin_external_link_impl(node);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_is_external_url() {
assert!(is_external_url("http://example.com"));
assert!(is_external_url("https://example.com"));
assert!(!is_external_url("doc/zh/config"));
assert!(!is_external_url("http:/example.com"));
assert!(!is_external_url("https:/example.com"));
}
#[test]
fn test_transform_link_element() {
let mut node = hast::Node::Element(hast::Element {
tag_name: "a".into(),
properties: vec![(
"href".into(),
hast::PropertyValue::String("http://example.com".into()),
)],
children: vec![],
position: None,
});
transform_link_element(&mut node);
assert_eq!(
node,
hast::Node::Element(hast::Element {
tag_name: "a".into(),
properties: vec![
(
"href".into(),
hast::PropertyValue::String("http://example.com".into()),
),
(
"target".into(),
hast::PropertyValue::String("_blank".into())
),
(
"rel".into(),
hast::PropertyValue::String("noopener noreferrer".into()),
),
],
children: vec![],
position: None,
})
);
}
}
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | false |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/utils/src/lib.rs | crates/utils/src/lib.rs | //! Author: sanyuan0704
pub fn extract_title_and_id(text_value: &str) -> (String, String) {
let mut title = String::new();
let mut custom_id = String::new();
if let Some(index) = text_value.find("{#") {
let (mut title_part, id_part) = text_value.split_at(index);
title_part = title_part.trim_end();
title.push_str(title_part);
let id_part = id_part[2..].to_string();
if let Some(index) = id_part.find('}') {
let (id_part, _) = id_part.split_at(index);
custom_id.push_str(id_part);
}
} else {
title.push_str(text_value);
}
(title, custom_id)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_extract_title_and_id_with_custom_id() {
let (title, custom_id) = extract_title_and_id("Hello World {#id123}");
assert_eq!(title, "Hello World");
assert_eq!(custom_id, "id123");
}
#[test]
fn test_extract_title_and_id_without_custom_id() {
let (title, custom_id) = extract_title_and_id("Hello World");
assert_eq!(title, "Hello World");
assert_eq!(custom_id, "");
}
#[test]
fn test_extract_title_and_id_with_quotes() {
let (title, custom_id) = extract_title_and_id("\"Hello' World\" {#id123}");
assert_eq!(title, "\"Hello\' World\"");
assert_eq!(custom_id, "id123");
}
}
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | false |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/hast/src/lib.rs | crates/hast/src/lib.rs | //! HTML syntax tree: [hast][].
//!
//! [hast]: https://github.com/syntax-tree/hast
#![allow(dead_code)]
extern crate alloc;
extern crate markdown;
pub use markdown::mdast::{AttributeContent, AttributeValue, MdxJsxAttribute, Stop};
use markdown::unist::Position;
/// Nodes.
#[derive(Clone, PartialEq, Eq)]
pub enum Node {
/// Root.
Root(Root),
/// Element.
Element(Element),
/// Document type.
Doctype(Doctype),
/// Comment.
Comment(Comment),
/// Text.
Text(Text),
// MDX being passed through.
/// MDX: JSX element.
MdxJsxElement(MdxJsxElement),
/// MDX.js ESM.
MdxjsEsm(MdxjsEsm),
// MDX: expression.
MdxExpression(MdxExpression),
}
impl alloc::fmt::Debug for Node {
/// Debug the wrapped struct.
fn fmt(&self, f: &mut alloc::fmt::Formatter<'_>) -> alloc::fmt::Result {
match self {
Node::Root(x) => write!(f, "{:?}", x),
Node::Element(x) => write!(f, "{:?}", x),
Node::Doctype(x) => write!(f, "{:?}", x),
Node::Comment(x) => write!(f, "{:?}", x),
Node::Text(x) => write!(f, "{:?}", x),
Node::MdxJsxElement(x) => write!(f, "{:?}", x),
Node::MdxExpression(x) => write!(f, "{:?}", x),
Node::MdxjsEsm(x) => write!(f, "{:?}", x),
}
}
}
/// Turn a slice of hast nodes into a string.
fn children_to_string(children: &[Node]) -> String {
children.iter().map(ToString::to_string).collect()
}
impl ToString for Node {
/// Turn a hast node into a string.
fn to_string(&self) -> String {
match self {
// Parents.
Node::Root(x) => children_to_string(&x.children),
Node::Element(x) => children_to_string(&x.children),
Node::MdxJsxElement(x) => children_to_string(&x.children),
// Literals.
Node::Comment(x) => x.value.clone(),
Node::Text(x) => x.value.clone(),
Node::MdxExpression(x) => x.value.clone(),
Node::MdxjsEsm(x) => x.value.clone(),
// Voids.
Node::Doctype(_) => String::new(),
}
}
}
impl Node {
/// Get children of a hast node.
#[must_use]
pub fn children(&self) -> Option<&Vec<Node>> {
match self {
// Parent.
Node::Root(x) => Some(&x.children),
Node::Element(x) => Some(&x.children),
Node::MdxJsxElement(x) => Some(&x.children),
// Non-parent.
_ => None,
}
}
/// Get children of a hast node, mutably.
pub fn children_mut(&mut self) -> Option<&mut Vec<Node>> {
match self {
// Parent.
Node::Root(x) => Some(&mut x.children),
Node::Element(x) => Some(&mut x.children),
Node::MdxJsxElement(x) => Some(&mut x.children),
// Non-parent.
_ => None,
}
}
/// Get the position of a hast node.
pub fn position(&self) -> Option<&Position> {
match self {
Node::Root(x) => x.position.as_ref(),
Node::Element(x) => x.position.as_ref(),
Node::Doctype(x) => x.position.as_ref(),
Node::Comment(x) => x.position.as_ref(),
Node::Text(x) => x.position.as_ref(),
Node::MdxJsxElement(x) => x.position.as_ref(),
Node::MdxExpression(x) => x.position.as_ref(),
Node::MdxjsEsm(x) => x.position.as_ref(),
}
}
/// Get the position of a hast node, mutably.
pub fn position_mut(&mut self) -> Option<&mut Position> {
match self {
Node::Root(x) => x.position.as_mut(),
Node::Element(x) => x.position.as_mut(),
Node::Doctype(x) => x.position.as_mut(),
Node::Comment(x) => x.position.as_mut(),
Node::Text(x) => x.position.as_mut(),
Node::MdxJsxElement(x) => x.position.as_mut(),
Node::MdxExpression(x) => x.position.as_mut(),
Node::MdxjsEsm(x) => x.position.as_mut(),
}
}
/// Set the position of a hast node.
pub fn position_set(&mut self, position: Option<Position>) {
match self {
Node::Root(x) => x.position = position,
Node::Element(x) => x.position = position,
Node::Doctype(x) => x.position = position,
Node::Comment(x) => x.position = position,
Node::Text(x) => x.position = position,
Node::MdxJsxElement(x) => x.position = position,
Node::MdxExpression(x) => x.position = position,
Node::MdxjsEsm(x) => x.position = position,
}
}
}
/// Document.
///
/// ```html
/// > | a
/// ^
/// ```
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Root {
// Parent.
/// Content model.
pub children: Vec<Node>,
/// Positional info.
pub position: Option<Position>,
}
/// Document type.
///
/// ```html
/// > | <!doctype html>
/// ^^^^^^^^^^^^^^^
/// ```
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Element {
/// Tag name.
pub tag_name: String,
/// Properties.
pub properties: Vec<(String, PropertyValue)>,
// Parent.
/// Children.
pub children: Vec<Node>,
/// Positional info.
pub position: Option<Position>,
}
/// Property value.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum PropertyValue {
/// A boolean.
Boolean(bool),
/// A string.
String(String),
/// A comma-separated list of strings.
CommaSeparated(Vec<String>),
/// A space-separated list of strings.
SpaceSeparated(Vec<String>),
}
/// Document type.
///
/// ```html
/// > | <!doctype html>
/// ^^^^^^^^^^^^^^^
/// ```
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Doctype {
// Void.
/// Positional info.
pub position: Option<Position>,
}
/// Comment.
///
/// ```html
/// > | <!-- a -->
/// ^^^^^^^^^^
/// ```
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Comment {
// Text.
/// Content model.
pub value: String,
/// Positional info.
pub position: Option<Position>,
}
/// Text.
///
/// ```html
/// > | a
/// ^
/// ```
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Text {
// Text.
/// Content model.
pub value: String,
/// Positional info.
pub position: Option<Position>,
}
/// MDX: JSX element.
///
/// ```markdown
/// > | <a />
/// ^^^^^
/// ```
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct MdxJsxElement {
// JSX element.
/// Name.
///
/// Fragments have no name.
pub name: Option<String>,
/// Attributes.
pub attributes: Vec<AttributeContent>,
// Parent.
/// Content model.
pub children: Vec<Node>,
/// Positional info.
pub position: Option<Position>,
}
/// MDX: expression.
///
/// ```markdown
/// > | {a}
/// ^^^
/// ```
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct MdxExpression {
// Literal.
/// Content model.
pub value: String,
/// Positional info.
pub position: Option<Position>,
/// Custom data on where each slice of `value` came from.
pub stops: Vec<Stop>,
}
/// MDX: ESM.
///
/// ```markdown
/// > | import a from 'b'
/// ^^^^^^^^^^^^^^^^^
/// ```
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct MdxjsEsm {
// Literal.
/// Content model.
pub value: String,
/// Positional info.
pub position: Option<Position>,
/// Custom data on where each slice of `value` came from.
pub stops: Vec<Stop>,
}
#[cfg(test)]
mod tests {
use super::*;
use markdown::unist::Position;
use pretty_assertions::assert_eq;
// Literals.
#[test]
fn text() {
let mut node = Node::Text(Text {
value: "a".into(),
position: None,
});
assert_eq!(
format!("{:?}", node),
"Text { value: \"a\", position: None }",
"should support `Debug`"
);
assert_eq!(node.to_string(), "a", "should support `ToString`");
assert_eq!(node.children_mut(), None, "should support `children_mut`");
assert_eq!(node.children(), None, "should support `children`");
assert_eq!(node.position(), None, "should support `position`");
assert_eq!(node.position_mut(), None, "should support `position`");
node.position_set(Some(Position::new(1, 1, 0, 1, 2, 1)));
assert_eq!(
format!("{:?}", node),
"Text { value: \"a\", position: Some(1:1-1:2 (0-1)) }",
"should support `position_set`"
);
}
#[test]
fn comment() {
let mut node = Node::Comment(Comment {
value: "a".into(),
position: None,
});
assert_eq!(
format!("{:?}", node),
"Comment { value: \"a\", position: None }",
"should support `Debug`"
);
assert_eq!(node.to_string(), "a", "should support `ToString`");
assert_eq!(node.children_mut(), None, "should support `children_mut`");
assert_eq!(node.children(), None, "should support `children`");
assert_eq!(node.position(), None, "should support `position`");
assert_eq!(node.position_mut(), None, "should support `position`");
node.position_set(Some(Position::new(1, 1, 0, 1, 2, 1)));
assert_eq!(
format!("{:?}", node),
"Comment { value: \"a\", position: Some(1:1-1:2 (0-1)) }",
"should support `position_set`"
);
}
#[test]
fn mdx_expression() {
let mut node = Node::MdxExpression(MdxExpression {
value: "a".into(),
stops: vec![],
position: None,
});
assert_eq!(
format!("{:?}", node),
"MdxExpression { value: \"a\", position: None, stops: [] }",
"should support `Debug`"
);
assert_eq!(node.to_string(), "a", "should support `ToString`");
assert_eq!(node.children_mut(), None, "should support `children_mut`");
assert_eq!(node.children(), None, "should support `children`");
assert_eq!(node.position(), None, "should support `position`");
assert_eq!(node.position_mut(), None, "should support `position`");
node.position_set(Some(Position::new(1, 1, 0, 1, 2, 1)));
assert_eq!(
format!("{:?}", node),
"MdxExpression { value: \"a\", position: Some(1:1-1:2 (0-1)), stops: [] }",
"should support `position_set`"
);
}
#[test]
fn mdxjs_esm() {
let mut node = Node::MdxjsEsm(MdxjsEsm {
value: "a".into(),
stops: vec![],
position: None,
});
assert_eq!(
format!("{:?}", node),
"MdxjsEsm { value: \"a\", position: None, stops: [] }",
"should support `Debug`"
);
assert_eq!(node.to_string(), "a", "should support `ToString`");
assert_eq!(node.children_mut(), None, "should support `children_mut`");
assert_eq!(node.children(), None, "should support `children`");
assert_eq!(node.position(), None, "should support `position`");
assert_eq!(node.position_mut(), None, "should support `position`");
node.position_set(Some(Position::new(1, 1, 0, 1, 2, 1)));
assert_eq!(
format!("{:?}", node),
"MdxjsEsm { value: \"a\", position: Some(1:1-1:2 (0-1)), stops: [] }",
"should support `position_set`"
);
}
// Voids.
#[test]
fn doctype() {
let mut node = Node::Doctype(Doctype { position: None });
assert_eq!(
format!("{:?}", node),
"Doctype { position: None }",
"should support `Debug`"
);
assert_eq!(node.to_string(), "", "should support `ToString`");
assert_eq!(node.children_mut(), None, "should support `children_mut`");
assert_eq!(node.children(), None, "should support `children`");
assert_eq!(node.position(), None, "should support `position`");
assert_eq!(node.position_mut(), None, "should support `position`");
node.position_set(Some(Position::new(1, 1, 0, 1, 2, 1)));
assert_eq!(
format!("{:?}", node),
"Doctype { position: Some(1:1-1:2 (0-1)) }",
"should support `position_set`"
);
}
// Parents.
#[test]
fn root() {
let mut node = Node::Root(Root {
position: None,
children: vec![],
});
assert_eq!(
format!("{:?}", node),
"Root { children: [], position: None }",
"should support `Debug`"
);
assert_eq!(node.to_string(), "", "should support `ToString`");
assert_eq!(
node.children_mut(),
Some(&mut vec![]),
"should support `children_mut`"
);
assert_eq!(node.children(), Some(&vec![]), "should support `children`");
assert_eq!(node.position(), None, "should support `position`");
assert_eq!(node.position_mut(), None, "should support `position`");
node.position_set(Some(Position::new(1, 1, 0, 1, 2, 1)));
assert_eq!(
format!("{:?}", node),
"Root { children: [], position: Some(1:1-1:2 (0-1)) }",
"should support `position_set`"
);
}
#[test]
fn element() {
let mut node = Node::Element(Element {
tag_name: "a".into(),
properties: vec![],
position: None,
children: vec![],
});
assert_eq!(
format!("{:?}", node),
"Element { tag_name: \"a\", properties: [], children: [], position: None }",
"should support `Debug`"
);
assert_eq!(node.to_string(), "", "should support `ToString`");
assert_eq!(
node.children_mut(),
Some(&mut vec![]),
"should support `children_mut`"
);
assert_eq!(node.children(), Some(&vec![]), "should support `children`");
assert_eq!(node.position(), None, "should support `position`");
assert_eq!(node.position_mut(), None, "should support `position`");
node.position_set(Some(Position::new(1, 1, 0, 1, 2, 1)));
assert_eq!(
format!("{:?}", node),
"Element { tag_name: \"a\", properties: [], children: [], position: Some(1:1-1:2 (0-1)) }",
"should support `position_set`"
);
}
#[test]
fn mdx_jsx_element() {
let mut node = Node::MdxJsxElement(MdxJsxElement {
name: None,
attributes: vec![],
position: None,
children: vec![],
});
assert_eq!(
format!("{:?}", node),
"MdxJsxElement { name: None, attributes: [], children: [], position: None }",
"should support `Debug`"
);
assert_eq!(node.to_string(), "", "should support `ToString`");
assert_eq!(
node.children_mut(),
Some(&mut vec![]),
"should support `children_mut`"
);
assert_eq!(node.children(), Some(&vec![]), "should support `children`");
assert_eq!(node.position(), None, "should support `position`");
assert_eq!(node.position_mut(), None, "should support `position`");
node.position_set(Some(Position::new(1, 1, 0, 1, 2, 1)));
assert_eq!(
format!("{:?}", node),
"MdxJsxElement { name: None, attributes: [], children: [], position: Some(1:1-1:2 (0-1)) }",
"should support `position_set`"
);
}
}
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | false |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/plugin_frontmatter/src/lib.rs | crates/plugin_frontmatter/src/lib.rs | //! Author: sanyuan0704
//!
//! This plugin is used to parse the front matter in markdown and export it in mdx file.
use markdown::mdast;
use serde_yaml::Value;
fn yaml_to_json(yaml_str: &str) -> String {
if yaml_str.is_empty() {
return "{}".into();
}
let parsed_value: Value = serde_yaml::from_str(yaml_str)
.unwrap_or_else(|_| panic!("Failed to parse yaml: {}。", yaml_str));
serde_json::to_string(&parsed_value).unwrap()
}
pub fn mdx_plugin_frontmatter(node: &mut mdast::Node) -> String {
if let mdast::Node::Root(root) = node {
let mut front_matter = String::new();
let mut front_matter_node_index = None;
for (i, child) in root.children.iter().enumerate() {
if let mdast::Node::Yaml(yaml) = child {
front_matter_node_index = Some(i);
front_matter = yaml.value.clone();
}
}
if let Some(i) = front_matter_node_index {
root.children.remove(i);
}
return yaml_to_json(&front_matter);
}
"{}".into()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_yaml_to_json_empty() {
let yaml = "";
let expected = "{}".to_string();
assert_eq!(yaml_to_json(yaml), expected);
}
}
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | false |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/plugin_code_block/src/lib.rs | crates/plugin_code_block/src/lib.rs | //! Author: sanyuan0704
//!
//! [Deprecated]
//! This plugin is used to construct the code block in mdx.
fn transform_pre_code_element(node: &mut hast::Node) {
// find the <pre><code className="language-jsx">
// and then transform it
if let hast::Node::Element(node) = node {
if node.tag_name == "pre" {
// Check the `className` property for the `code` node
// If the `className` is `code`, we stop the transformation
if let Some((_, hast::PropertyValue::SpaceSeparated(class_names))) =
node.properties.iter().find(|(key, _)| key == "className")
{
if class_names.contains(&"code".into()) {
return;
}
}
let mut code_node = None;
if let hast::Node::Element(child) = node.children.first().unwrap() {
if child.tag_name == "code" {
code_node = Some(child);
}
}
if let Some(code_node) = code_node {
// get the className and meta of the code node, from its properties
let mut meta = String::new();
let mut title = None;
for (key, value) in &code_node.properties {
if key == "meta" {
if let hast::PropertyValue::SpaceSeparated(values) = value {
if let Some(value) = values.first() {
meta = value.to_string();
}
}
}
}
for part in meta.split(' ') {
let part = part.trim();
if let Some(stripped) = part.strip_prefix("title=") {
title = Some(stripped.trim_matches('"'));
}
}
let title_node = title.map(|title| {
hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-code-title".into()]),
)],
children: vec![hast::Node::Text(hast::Text {
value: title.to_string(),
position: None,
})],
position: None,
})
});
let content_node = hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-code-content".into()]),
)],
children: vec![hast::Node::Element(hast::Element {
tag_name: "pre".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["code".into()]),
)],
children: vec![hast::Node::Element(code_node.clone())],
position: None,
})],
position: None,
});
node.tag_name = "div".into();
node.properties = vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["language-".into()]),
)];
node.children = vec![
title_node.unwrap_or(hast::Node::Text(hast::Text {
value: "".into(),
position: None,
})),
content_node,
]
}
// if the className is "language-jsx", we regard the lang as "jsx"
// and parse the title from the meta
}
}
}
fn mdx_plugin_code_block_impl(node: &mut hast::Node) {
transform_pre_code_element(node);
if let Some(children) = node.children_mut() {
for child in children {
mdx_plugin_code_block_impl(child);
}
}
}
pub fn mdx_plugin_code_block(root: &mut hast::Node) {
// Traverse all the hast node, and find the code element within pre, and then find the className of the code element
// If the className is "language-jsx", we regard the lang as "jsx"
// for example:
// <pre>
// <code className="language-jsx">
// <p>hello world</p>
// </code>
// </pre>
// Will be transformed to:
// <div className="language-jsx">
// <div className="rspress-code-title">title</div>
// <div className="rspress-code-content">
// <pre>
// <code className="language-jsx">
// <p>hello world</p>
// </code>
// </pre>
// </div>
// </div>
mdx_plugin_code_block_impl(root);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_transform_pre_code_element() {
// Create a sample hast node
let mut root = hast::Node::Element(hast::Element {
tag_name: "pre".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["language-rust".into()]),
)],
children: vec![hast::Node::Element(hast::Element {
tag_name: "code".into(),
properties: vec![(
"meta".into(),
hast::PropertyValue::SpaceSeparated(vec!["title=\"My-Rust-Code\"".into()]),
)],
children: vec![hast::Node::Text(hast::Text {
value: "fn main() {\n println!(\"Hello, world!\");\n}".into(),
position: None,
})],
position: None,
})],
position: None,
});
mdx_plugin_code_block_impl(&mut root);
// Check if the transformation was successful
assert_eq!(
root,
hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["language-".into()]),
)],
children: vec![
hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-code-title".into()]),
)],
children: vec![hast::Node::Text(hast::Text {
value: "My-Rust-Code".into(),
position: None,
})],
position: None,
}),
hast::Node::Element(hast::Element {
tag_name: "div".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["rspress-code-content".into()]),
)],
children: vec![hast::Node::Element(hast::Element {
tag_name: "pre".into(),
properties: vec![(
"className".into(),
hast::PropertyValue::SpaceSeparated(vec!["code".into()]),
)],
children: vec![hast::Node::Element(hast::Element {
tag_name: "code".into(),
properties: vec![(
"meta".into(),
hast::PropertyValue::SpaceSeparated(vec!["title=\"My-Rust-Code\"".into()]),
)],
children: vec![hast::Node::Text(hast::Text {
value: "fn main() {\n println!(\"Hello, world!\");\n}".into(),
position: None,
}),],
position: None,
}),],
position: None,
}),],
position: None,
})
],
position: None,
})
);
}
}
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | false |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/plugin_header_anchor/src/lib.rs | crates/plugin_header_anchor/src/lib.rs | //! Author: sanyuan0704
//!
//! This plugin is used to add anchor to the header in link element.
use slugger::Slugger;
use utils::extract_title_and_id;
fn collect_title_in_hast(node: &mut hast::Element) -> (String, String) {
let mut title = String::new();
let mut id = String::new();
let mut custom_id_expression_index = None;
for (index, child) in &mut node.children.iter_mut().enumerate() {
match child {
// example: hello world {#custom-id}
// Then we extract the `hello world` as title and `custom-id` as id
// .md case:
hast::Node::Text(text) => {
let (title_part, id_part) = extract_title_and_id(&text.value);
let title_str: &String = &title_part;
title.push_str(title_str);
text.value = title_str.to_string();
id = id_part;
}
// .mdx case:
hast::Node::MdxExpression(expression) => {
if expression.value.starts_with('#') {
id.push_str(&expression.value[1..]);
custom_id_expression_index = Some(index);
}
}
hast::Node::Element(element) => {
if element.tag_name == "code"
|| element.tag_name == "a"
|| element.tag_name == "strong"
|| element.tag_name == "em"
|| element.tag_name == "del"
{
for child in &element.children {
if let hast::Node::Text(text) = child {
title.push_str(&text.value);
}
}
}
}
_ => continue, // Continue if node is not Text or Code
}
}
if let Some(index) = custom_id_expression_index {
node.children.remove(index);
}
title = title.trim_end().to_string();
(title, id)
}
fn create_anchor_element(id: &str) -> hast::Element {
hast::Element {
tag_name: "a".to_string(),
properties: vec![
// Add the class name: `header-anchor`
(
"className".to_string(),
hast::PropertyValue::SpaceSeparated(vec!["header-anchor".to_string()]),
),
// Add the attribute: `aria-hidden="true"`
(
"aria-hidden".to_string(),
hast::PropertyValue::String("true".to_string()),
),
// Add the attribute: `href="#${id}"`
(
"href".to_string(),
hast::PropertyValue::String(format!("#{}", id)),
),
],
children: vec![
// # is the content of the anchor element
hast::Node::Text(hast::Text {
value: "#".to_string(),
position: None,
}),
],
position: None,
}
}
// In this plugin, we do the following things:
// 1. add header anchor for every header element
// 2. add target="_blank" and rel="noopener noreferrer" for every external link element
pub fn mdx_plugin_header_anchor(node: &mut hast::Node) {
let mut slugger: Slugger = Slugger::new();
if let hast::Node::Root(root) = node {
for child in &mut root.children {
if let hast::Node::Element(element) = child {
if let Some(h_tag) = element.tag_name.chars().nth(1).and_then(|c| c.to_digit(10)) {
// h1 ~ h6
if (1..=6).contains(&h_tag) {
// get the text of the header element
let (header_text, mut id) = collect_title_in_hast(element);
if id.is_empty() {
id = slugger.slug(&header_text, false);
}
let id_property = ("id".to_string(), hast::PropertyValue::String(id.clone()));
// add the id attribute to the header element
element.properties.push(id_property);
// add the anchor element to the header element
element
.children
.push(hast::Node::Element(create_anchor_element(&id)));
}
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use hast::Node;
#[test]
fn test_collect_title_in_hast() {
let mut element1 = hast::Element {
tag_name: "h1".to_string(),
properties: vec![],
children: vec![
Node::Text(hast::Text {
value: "Hello".to_string(),
position: None,
}),
Node::Element(hast::Element {
tag_name: "code".to_string(),
properties: vec![],
children: vec![Node::Text(hast::Text {
value: "World".to_string(),
position: None,
})],
position: None,
}),
Node::Element(hast::Element {
tag_name: "a".to_string(),
properties: vec![(
"href".to_string(),
hast::PropertyValue::String("https://example.com".to_string()),
)],
children: vec![Node::Text(hast::Text {
value: "World".to_string(),
position: None,
})],
position: None,
}),
],
position: None,
};
let mut element2 = hast::Element {
tag_name: "h2".to_string(),
properties: vec![],
children: vec![
Node::Text(hast::Text {
value: "Hello World ".to_string(),
position: None,
}),
Node::MdxJsxElement(hast::MdxJsxElement {
name: Some("foo".to_string()),
attributes: vec![],
children: vec![Node::Text(hast::Text {
value: "bar".to_string(),
position: None,
})],
position: None,
}),
Node::Text(hast::Text {
value: " ".to_string(),
position: None,
}),
],
position: None,
};
let mut element3 = hast::Element {
tag_name: "h3".to_string(),
properties: vec![],
children: vec![Node::Element(hast::Element {
tag_name: "strong".to_string(),
properties: vec![],
children: vec![Node::Text(hast::Text {
value: "Bold".to_string(),
position: None,
})],
position: None,
})],
position: None,
};
let mut element4 = hast::Element {
tag_name: "h4".to_string(),
properties: vec![],
children: vec![Node::Element(hast::Element {
tag_name: "em".to_string(),
properties: vec![],
children: vec![Node::Text(hast::Text {
value: "Italic".to_string(),
position: None,
})],
position: None,
})],
position: None,
};
let mut element5 = hast::Element {
tag_name: "h5".to_string(),
properties: vec![],
children: vec![Node::Element(hast::Element {
tag_name: "del".to_string(),
properties: vec![],
children: vec![Node::Text(hast::Text {
value: "Strikethrough".to_string(),
position: None,
})],
position: None,
})],
position: None,
};
assert_eq!(
collect_title_in_hast(&mut element1),
("HelloWorldWorld".to_string(), "".to_string())
);
assert_eq!(
collect_title_in_hast(&mut element2),
("Hello World".to_string(), "".to_string())
);
assert_eq!(
collect_title_in_hast(&mut element3),
("Bold".to_string(), "".to_string())
);
assert_eq!(
collect_title_in_hast(&mut element4),
("Italic".to_string(), "".to_string())
);
assert_eq!(
collect_title_in_hast(&mut element5),
("Strikethrough".to_string(), "".to_string())
);
}
#[test]
fn test_create_anchor_element() {
let element = create_anchor_element("hello-world");
assert_eq!(element.tag_name, "a");
assert_eq!(
element.properties,
vec![
(
"className".to_string(),
hast::PropertyValue::SpaceSeparated(vec!["header-anchor".to_string()])
),
(
"aria-hidden".to_string(),
hast::PropertyValue::String("true".to_string())
),
(
"href".to_string(),
hast::PropertyValue::String("#hello-world".to_string())
),
]
);
assert_eq!(
element.children,
vec![Node::Text(hast::Text {
value: "#".to_string(),
position: None,
})]
);
}
#[test]
fn test_mdx_plugin_header_anchor() {
let mut root = hast::Node::Root(hast::Root {
children: vec![
Node::Element(hast::Element {
tag_name: "h1".to_string(),
properties: vec![],
children: vec![Node::Text(hast::Text {
value: "Hello World".to_string(),
position: None,
})],
position: None,
}),
Node::Element(hast::Element {
tag_name: "h2".to_string(),
properties: vec![],
children: vec![Node::Text(hast::Text {
value: "Hello World".to_string(),
position: None,
})],
position: None,
}),
],
position: None,
});
mdx_plugin_header_anchor(&mut root);
let children = match root {
hast::Node::Root(root) => root.children,
_ => panic!("root should be a Root node"),
};
assert_eq!(
children,
vec![
Node::Element(hast::Element {
tag_name: "h1".to_string(),
properties: vec![(
"id".to_string(),
hast::PropertyValue::String("hello-world".to_string())
),],
children: vec![
Node::Text(hast::Text {
value: "Hello World".to_string(),
position: None,
}),
Node::Element(hast::Element {
tag_name: "a".to_string(),
properties: vec![
(
"className".to_string(),
hast::PropertyValue::SpaceSeparated(vec!["header-anchor".to_string()])
),
(
"aria-hidden".to_string(),
hast::PropertyValue::String("true".to_string())
),
(
"href".to_string(),
hast::PropertyValue::String("#hello-world".to_string())
),
],
children: vec![Node::Text(hast::Text {
value: "#".to_string(),
position: None,
})],
position: None,
}),
],
position: None,
}),
Node::Element(hast::Element {
tag_name: "h2".to_string(),
properties: vec![(
"id".to_string(),
hast::PropertyValue::String("hello-world-1".to_string())
),],
children: vec![
Node::Text(hast::Text {
value: "Hello World".to_string(),
position: None,
}),
Node::Element(hast::Element {
tag_name: "a".to_string(),
properties: vec![
(
"className".to_string(),
hast::PropertyValue::SpaceSeparated(vec!["header-anchor".to_string()])
),
(
"aria-hidden".to_string(),
hast::PropertyValue::String("true".to_string())
),
(
"href".to_string(),
hast::PropertyValue::String("#hello-world-1".to_string())
),
],
children: vec![Node::Text(hast::Text {
value: "#".to_string(),
position: None,
})],
position: None,
}),
],
position: None,
}),
]
);
}
}
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | false |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/binding/build.rs | crates/binding/build.rs | extern crate napi_build;
fn main() {
napi_build::setup();
}
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | false |
web-infra-dev/mdx-rs | https://github.com/web-infra-dev/mdx-rs/blob/04633f3cb2d8062e2578ea02213db42c2e4a5952/crates/binding/src/lib.rs | crates/binding/src/lib.rs | #[cfg(not(all(target_os = "linux", target_env = "musl", target_arch = "aarch64")))]
#[global_allocator]
static ALLOC: mimalloc_rust::GlobalMiMalloc = mimalloc_rust::GlobalMiMalloc;
use mdx_plugin_toc::TocItem;
use mdx_rs::{self, CompileResult};
#[macro_use]
extern crate napi_derive;
use napi::{
bindgen_prelude::{AsyncTask, Result, Task},
JsObject,
};
#[napi(object)]
pub struct Toc {
pub text: String,
pub id: String,
pub depth: u8,
}
#[napi(object)]
pub struct Output {
pub code: String,
pub links: Vec<String>,
pub html: String,
pub title: String,
pub toc: Vec<Toc>,
pub languages: Vec<String>,
pub frontmatter: String,
}
#[napi(object)]
pub struct CompileOptions {
pub value: String,
pub filepath: String,
pub development: bool,
pub root: String,
}
impl From<TocItem> for Toc {
fn from(item: TocItem) -> Self {
Self {
text: item.text,
id: item.id,
depth: item.depth,
}
}
}
impl From<CompileResult> for Output {
fn from(res: CompileResult) -> Self {
Self {
code: res.code,
links: res.links,
html: res.html,
title: res.title,
toc: res.toc.into_iter().map(|item| item.into()).collect(),
languages: res.languages,
frontmatter: res.frontmatter,
}
}
}
impl Task for Compiler {
type Output = CompileResult;
type JsValue = JsObject;
fn compute(&mut self) -> Result<Self::Output> {
Ok(self.compile())
}
fn resolve(&mut self, env: napi::Env, output: CompileResult) -> Result<Self::JsValue> {
let mut obj = env.create_object()?;
obj.set_named_property("code", output.code)?;
obj.set_named_property("links", output.links)?;
obj.set_named_property("html", output.html)?;
obj.set_named_property("title", output.title)?;
obj.set_named_property(
"toc",
output
.toc
.into_iter()
.map(|item| item.into())
.collect::<Vec<Toc>>(),
)?;
obj.set_named_property("languages", output.languages)?;
obj.set_named_property("frontmatter", output.frontmatter)?;
Ok(obj)
}
}
pub struct Compiler {
value: String,
filepath: String,
development: bool,
root: String,
}
impl Compiler {
pub fn new(value: String, filepath: String, development: bool, root: String) -> Self {
Self {
value,
filepath,
development,
root,
}
}
fn compile(&mut self) -> CompileResult {
mdx_rs::compile(&self.value, &self.filepath, self.development, &self.root)
}
}
/// Turn MDX into JavaScript.
#[napi(ts_return_type = "Promise<Output>")]
pub fn compile(options: CompileOptions) -> AsyncTask<Compiler> {
let CompileOptions {
value,
filepath,
development,
root,
} = options;
AsyncTask::new(Compiler::new(value, filepath, development, root))
}
#[napi]
pub fn compile_sync(options: CompileOptions) -> Output {
let CompileOptions {
value,
filepath,
development,
root,
} = options;
let mut compiler = Compiler::new(value, filepath, development, root);
compiler.compile().into()
}
| rust | MIT | 04633f3cb2d8062e2578ea02213db42c2e4a5952 | 2026-01-04T20:17:17.706328Z | false |
rust-mobile/android_logger-rs | https://github.com/rust-mobile/android_logger-rs/blob/71fd4fd2ab3788063886fb56017597f424e511ac/src/config.rs | src/config.rs | use crate::{FormatFn, LogId};
use log::{Level, LevelFilter, Record};
use std::ffi::CString;
use std::fmt;
/// Filter for android logger.
#[derive(Default)]
pub struct Config {
pub(crate) log_level: Option<LevelFilter>,
pub(crate) buf_id: Option<LogId>,
filter: Option<env_filter::Filter>,
pub(crate) tag: Option<CString>,
pub(crate) custom_format: Option<FormatFn>,
}
impl fmt::Debug for Config {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Config")
.field("log_level", &self.log_level)
.field("buf_id", &self.buf_id)
.field("filter", &self.filter)
.field("tag", &self.tag)
.field(
"custom_format",
match &self.custom_format {
Some(_) => &"Some(_)",
None => &"None",
},
)
.finish()
}
}
#[cfg(all(target_os = "android", feature = "android-api-30"))]
fn android_log_priority_from_level(level: Level) -> android_log_sys::LogPriority {
match level {
Level::Warn => android_log_sys::LogPriority::WARN,
Level::Info => android_log_sys::LogPriority::INFO,
Level::Debug => android_log_sys::LogPriority::DEBUG,
Level::Error => android_log_sys::LogPriority::ERROR,
Level::Trace => android_log_sys::LogPriority::VERBOSE,
}
}
/// Asks Android liblog if a message with given `tag` and `priority` should be logged, using
/// `default_prio` as the level filter in case no system- or process-wide overrides are set.
#[cfg(all(target_os = "android", feature = "android-api-30"))]
fn android_is_loggable_len(
prio: log_ffi::LogPriority,
tag: &str,
default_prio: log_ffi::LogPriority,
) -> bool {
// SAFETY: tag points to a valid string tag.len() bytes long.
unsafe {
log_ffi::__android_log_is_loggable_len(
prio as log_ffi::c_int,
tag.as_ptr() as *const log_ffi::c_char,
tag.len() as log_ffi::c_size_t,
default_prio as log_ffi::c_int,
) != 0
}
}
#[cfg(not(all(target_os = "android", feature = "android-api-30")))]
fn default_is_loggable(_tag: &str, record_level: Level, config_level: Option<LevelFilter>) -> bool {
record_level <= config_level.unwrap_or_else(log::max_level)
}
#[cfg(all(target_os = "android", feature = "android-api-30"))]
fn android_is_loggable(tag: &str, record_level: Level, config_level: Option<LevelFilter>) -> bool {
let prio = android_log_priority_from_level(record_level);
// Priority to use in case no system-wide or process-wide overrides are set.
let default_prio = match config_level {
Some(level_filter) => match level_filter.to_level() {
Some(level) => android_log_priority_from_level(level),
// LevelFilter::to_level() returns None only for LevelFilter::Off
None => android_log_sys::LogPriority::SILENT,
},
None => android_log_sys::LogPriority::INFO,
};
android_is_loggable_len(prio, tag, default_prio)
}
impl Config {
/// Changes the maximum log level.
///
/// Note, that `Trace` is the maximum level, because it provides the
/// maximum amount of detail in the emitted logs.
///
/// If `Off` level is provided, then nothing is logged at all.
///
/// [`log::max_level()`] is considered as the default level.
pub fn with_max_level(mut self, level: LevelFilter) -> Self {
self.log_level = Some(level);
self
}
/// Changes the Android logging system buffer to be used.
///
/// By default, logs are sent to the [`Main`] log. Other logging buffers may
/// only be accessible to certain processes.
///
/// [`Main`]: LogId::Main
pub fn with_log_buffer(mut self, buf_id: LogId) -> Self {
self.buf_id = Some(buf_id);
self
}
pub(crate) fn filter_matches(&self, record: &Record) -> bool {
if let Some(ref filter) = self.filter {
filter.matches(record)
} else {
true
}
}
pub(crate) fn is_loggable(&self, tag: &str, level: Level) -> bool {
#[cfg(all(target_os = "android", feature = "android-api-30"))]
use android_is_loggable as is_loggable;
#[cfg(not(all(target_os = "android", feature = "android-api-30")))]
use default_is_loggable as is_loggable;
is_loggable(tag, level, self.log_level)
}
pub fn with_filter(mut self, filter: env_filter::Filter) -> Self {
self.filter = Some(filter);
self
}
pub fn with_tag<S: Into<Vec<u8>>>(mut self, tag: S) -> Self {
self.tag = Some(CString::new(tag).expect("Can't convert tag to CString"));
self
}
/// Sets the format function for formatting the log output.
/// ```
/// # use android_logger::Config;
/// android_logger::init_once(
/// Config::default()
/// .with_max_level(log::LevelFilter::Trace)
/// .format(|f, record| write!(f, "my_app: {}", record.args()))
/// )
/// ```
pub fn format<F>(mut self, format: F) -> Self
where
F: Fn(&mut dyn fmt::Write, &Record) -> fmt::Result + Sync + Send + 'static,
{
self.custom_format = Some(Box::new(format));
self
}
}
| rust | Apache-2.0 | 71fd4fd2ab3788063886fb56017597f424e511ac | 2026-01-04T20:17:15.588112Z | false |
rust-mobile/android_logger-rs | https://github.com/rust-mobile/android_logger-rs/blob/71fd4fd2ab3788063886fb56017597f424e511ac/src/lib.rs | src/lib.rs | // Copyright 2016 The android_logger Developers
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
//! A logger which writes to android output.
//!
//! ## Example
//!
//! ```
//! #[macro_use] extern crate log;
//! extern crate android_logger;
//!
//! use log::LevelFilter;
//! use android_logger::Config;
//!
//! /// Android code may not have obvious "main", this is just an example.
//! fn main() {
//! android_logger::init_once(
//! Config::default().with_max_level(LevelFilter::Trace),
//! );
//!
//! debug!("this is a debug {}", "message");
//! error!("this is printed by default");
//! }
//! ```
//!
//! ## Example with module path filter
//!
//! It is possible to limit log messages to output from a specific crate,
//! and override the logcat tag name (by default, the crate name is used):
//!
//! ```
//! #[macro_use] extern crate log;
//! extern crate android_logger;
//!
//! use log::LevelFilter;
//! use android_logger::{Config,FilterBuilder};
//!
//! fn main() {
//! android_logger::init_once(
//! Config::default()
//! .with_max_level(LevelFilter::Trace)
//! .with_tag("mytag")
//! .with_filter(FilterBuilder::new().parse("debug,hello::crate=trace").build()),
//! );
//!
//! // ..
//! }
//! ```
//!
//! ## Example with a custom log formatter
//!
//! ```
//! use android_logger::Config;
//!
//! android_logger::init_once(
//! Config::default()
//! .with_max_level(log::LevelFilter::Trace)
//! .format(|f, record| write!(f, "my_app: {}", record.args()))
//! )
//! ```
#[cfg(target_os = "android")]
extern crate android_log_sys as log_ffi;
use log::{Log, Metadata, Record};
use std::ffi::{CStr, CString};
use std::fmt;
use std::mem::MaybeUninit;
use std::sync::OnceLock;
use crate::arrays::{fill_tag_bytes, uninit_array};
use crate::platform_log_writer::PlatformLogWriter;
pub use config::Config;
pub use env_filter::{Builder as FilterBuilder, Filter};
pub use id::LogId;
pub(crate) type FormatFn = Box<dyn Fn(&mut dyn fmt::Write, &Record) -> fmt::Result + Sync + Send>;
mod arrays;
mod config;
mod id;
mod platform_log_writer;
#[cfg(test)]
mod tests;
/// Outputs log to Android system.
#[cfg(target_os = "android")]
fn android_log(
buf_id: Option<log_ffi::log_id_t>,
prio: log_ffi::LogPriority,
tag: &CStr,
msg: &CStr,
) {
if let Some(buf_id) = buf_id {
unsafe {
log_ffi::__android_log_buf_write(
buf_id as log_ffi::c_int,
prio as log_ffi::c_int,
tag.as_ptr() as *const log_ffi::c_char,
msg.as_ptr() as *const log_ffi::c_char,
);
};
} else {
unsafe {
log_ffi::__android_log_write(
prio as log_ffi::c_int,
tag.as_ptr() as *const log_ffi::c_char,
msg.as_ptr() as *const log_ffi::c_char,
);
};
}
}
/// Dummy output placeholder for tests.
#[cfg(not(target_os = "android"))]
fn android_log(_buf_id: Option<LogId>, _priority: log::Level, _tag: &CStr, _msg: &CStr) {}
/// Underlying android logger backend
#[derive(Debug, Default)]
pub struct AndroidLogger {
config: OnceLock<Config>,
}
impl AndroidLogger {
/// Create new logger instance from config
pub fn new(config: Config) -> AndroidLogger {
AndroidLogger {
config: OnceLock::from(config),
}
}
fn config(&self) -> &Config {
self.config.get_or_init(Config::default)
}
}
static ANDROID_LOGGER: OnceLock<AndroidLogger> = OnceLock::new();
/// Maximum length of a tag that does not require allocation.
///
/// Tags configured explicitly in [Config] will not cause an extra allocation. When the tag is
/// derived from the module path, paths longer than this limit will trigger an allocation for each
/// log statement.
///
/// The terminating nullbyte does not count towards this limit.
const LOGGING_TAG_MAX_LEN: usize = 127;
const LOGGING_MSG_MAX_LEN: usize = 4000;
impl Log for AndroidLogger {
fn enabled(&self, metadata: &Metadata) -> bool {
self.config()
.is_loggable(metadata.target(), metadata.level())
}
fn log(&self, record: &Record) {
let config = self.config();
if !self.enabled(record.metadata()) {
return;
}
// this also checks the level, but only if a filter was
// installed.
if !config.filter_matches(record) {
return;
}
// Temporary storage for null-terminating record.module_path() if it's needed.
// Tags too long to fit here cause allocation.
let mut tag_bytes: [MaybeUninit<u8>; LOGGING_TAG_MAX_LEN + 1] = uninit_array();
// In case we end up allocating, keep the CString alive.
let _owned_tag;
let module_path = record.module_path().unwrap_or_default();
let tag = if let Some(tag) = &config.tag {
tag
} else if module_path.len() < tag_bytes.len() {
fill_tag_bytes(&mut tag_bytes, module_path.as_bytes())
} else {
// Tag longer than available stack buffer; allocate.
_owned_tag = CString::new(module_path.as_bytes())
.expect("record.module_path() shouldn't contain nullbytes");
_owned_tag.as_ref()
};
// message must not exceed LOGGING_MSG_MAX_LEN
// therefore split log message into multiple log calls
let mut writer = PlatformLogWriter::new(config.buf_id, record.level(), tag);
// If a custom tag is used, add the module path to the message.
// Use PlatformLogWriter to output chunks if they exceed max size.
let _ = match (&config.tag, &config.custom_format) {
(_, Some(format)) => format(&mut writer, record),
(Some(_), _) => fmt::write(
&mut writer,
format_args!("{}: {}", module_path, *record.args()),
),
_ => fmt::write(&mut writer, *record.args()),
};
// output the remaining message (this would usually be the most common case)
writer.flush();
}
fn flush(&self) {}
}
/// Send a log record to Android logging backend.
///
/// This action does not require initialization. However, without initialization it
/// will use the default filter, which allows all logs.
pub fn log(record: &Record) {
ANDROID_LOGGER
.get_or_init(AndroidLogger::default)
.log(record)
}
/// Initializes the global logger with an android logger.
///
/// This can be called many times, but will only initialize logging once,
/// and will not replace any other previously initialized logger.
///
/// It is ok to call this at the activity creation, and it will be
/// repeatedly called on every lifecycle restart (i.e. screen rotation).
pub fn init_once(config: Config) {
let log_level = config.log_level;
let logger = ANDROID_LOGGER.get_or_init(|| AndroidLogger::new(config));
if let Err(err) = log::set_logger(logger) {
log::debug!("android_logger: log::set_logger failed: {}", err);
} else if let Some(level) = log_level {
log::set_max_level(level);
}
}
| rust | Apache-2.0 | 71fd4fd2ab3788063886fb56017597f424e511ac | 2026-01-04T20:17:15.588112Z | false |
rust-mobile/android_logger-rs | https://github.com/rust-mobile/android_logger-rs/blob/71fd4fd2ab3788063886fb56017597f424e511ac/src/arrays.rs | src/arrays.rs | use crate::LOGGING_TAG_MAX_LEN;
use std::ffi::CStr;
use std::mem::MaybeUninit;
// FIXME: When `maybe_uninit_uninit_array` is stabilized, use it instead of this helper
pub fn uninit_array<const N: usize, T>() -> [MaybeUninit<T>; N] {
// SAFETY: Array contains MaybeUninit, which is fine to be uninit
unsafe { MaybeUninit::uninit().assume_init() }
}
// FIXME: Remove when maybe_uninit_slice is stabilized to provide MaybeUninit::slice_assume_init_ref()
pub unsafe fn slice_assume_init_ref<T>(slice: &[MaybeUninit<T>]) -> &[T] {
&*(slice as *const [MaybeUninit<T>] as *const [T])
}
/// Fills up `storage` with `tag` and a necessary NUL terminator, optionally ellipsizing the input
/// `tag` if it's too large.
///
/// Returns a [`CStr`] containing the initialized portion of `storage`, including its NUL
/// terminator.
pub fn fill_tag_bytes<'a>(
storage: &'a mut [MaybeUninit<u8>; LOGGING_TAG_MAX_LEN + 1],
tag: &[u8],
) -> &'a CStr {
// FIXME: Simplify when maybe_uninit_fill with MaybeUninit::fill_from() is stabilized
let initialized = if tag.len() > LOGGING_TAG_MAX_LEN {
for (input, output) in tag
.iter()
// Elipsize the last two characters (TODO: use special … character)?
.take(LOGGING_TAG_MAX_LEN - 2)
.chain(b"..\0")
.zip(storage.iter_mut())
{
output.write(*input);
}
storage.as_slice()
} else {
for (input, output) in tag.iter().chain(b"\0").zip(storage.iter_mut()) {
output.write(*input);
}
&storage[..tag.len() + 1]
};
// SAFETY: The above code ensures that `initialized` only refers to a portion of the `array`
// slice that was initialized, thus it is safe to cast those `MaybeUninit<u8>`s to `u8`:
let initialized = unsafe { slice_assume_init_ref(initialized) };
CStr::from_bytes_with_nul(initialized).expect("Unreachable: we wrote a nul terminator")
}
| rust | Apache-2.0 | 71fd4fd2ab3788063886fb56017597f424e511ac | 2026-01-04T20:17:15.588112Z | false |
rust-mobile/android_logger-rs | https://github.com/rust-mobile/android_logger-rs/blob/71fd4fd2ab3788063886fb56017597f424e511ac/src/tests.rs | src/tests.rs | use super::*;
use log::LevelFilter;
use std::sync::atomic::{AtomicBool, Ordering};
#[test]
fn check_config_values() {
// Filter is checked in config_filter_match below.
let config = Config::default()
.with_max_level(LevelFilter::Trace)
.with_log_buffer(LogId::System)
.with_tag("my_app");
assert_eq!(config.log_level, Some(LevelFilter::Trace));
assert_eq!(config.buf_id, Some(LogId::System));
assert_eq!(config.tag, Some(CString::new("my_app").unwrap()));
}
#[test]
fn log_calls_formatter() {
static FORMAT_FN_WAS_CALLED: AtomicBool = AtomicBool::new(false);
let config = Config::default()
.with_max_level(LevelFilter::Info)
.format(|_, _| {
FORMAT_FN_WAS_CALLED.store(true, Ordering::SeqCst);
Ok(())
});
let logger = AndroidLogger::new(config);
logger.log(&Record::builder().level(log::Level::Info).build());
assert!(FORMAT_FN_WAS_CALLED.load(Ordering::SeqCst));
}
#[test]
fn logger_enabled_threshold() {
let logger = AndroidLogger::new(Config::default().with_max_level(LevelFilter::Info));
assert!(logger.enabled(&log::MetadataBuilder::new().level(log::Level::Warn).build()));
assert!(logger.enabled(&log::MetadataBuilder::new().level(log::Level::Info).build()));
assert!(!logger.enabled(&log::MetadataBuilder::new().level(log::Level::Debug).build()));
}
// Test whether the filter gets called correctly. Not meant to be exhaustive for all filter
// options, as these are handled directly by the filter itself.
#[test]
fn config_filter_match() {
let info_record = Record::builder().level(log::Level::Info).build();
let debug_record = Record::builder().level(log::Level::Debug).build();
let info_all_filter = env_filter::Builder::new().parse("info").build();
let info_all_config = Config::default().with_filter(info_all_filter);
assert!(info_all_config.filter_matches(&info_record));
assert!(!info_all_config.filter_matches(&debug_record));
}
#[test]
fn fill_tag_bytes_truncates_long_tag() {
let too_long_tag = [b'a'; LOGGING_TAG_MAX_LEN + 20];
let mut result = uninit_array();
let tag = fill_tag_bytes(&mut result, &too_long_tag);
let mut expected_result = vec![b'a'; LOGGING_TAG_MAX_LEN - 2];
expected_result.extend("..\0".as_bytes());
assert_eq!(tag.to_bytes_with_nul(), expected_result);
}
#[test]
fn fill_tag_bytes_keeps_short_tag() {
let short_tag = [b'a'; 3];
let mut result = uninit_array();
let tag = fill_tag_bytes(&mut result, &short_tag);
let mut expected_result = short_tag.to_vec();
expected_result.push(0);
assert_eq!(tag.to_bytes_with_nul(), expected_result);
}
| rust | Apache-2.0 | 71fd4fd2ab3788063886fb56017597f424e511ac | 2026-01-04T20:17:15.588112Z | false |
rust-mobile/android_logger-rs | https://github.com/rust-mobile/android_logger-rs/blob/71fd4fd2ab3788063886fb56017597f424e511ac/src/id.rs | src/id.rs | /// Possible identifiers of a specific buffer of Android logging system for
/// logging a message.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum LogId {
/// Main log buffer.
///
/// This is the only log buffer available to apps.
Main,
/// Radio log buffer.
Radio,
/// Event log buffer.
Events,
/// System log buffer.
System,
/// Crash log buffer.
Crash,
/// Kernel log buffer.
Kernel,
/// Security log buffer.
Security,
/// Statistics log buffer.
Stats,
}
#[cfg(target_os = "android")]
impl LogId {
pub(crate) const fn to_native(log_id: Option<Self>) -> Option<log_ffi::log_id_t> {
match log_id {
Some(Self::Main) => Some(log_ffi::log_id_t::MAIN),
Some(Self::Radio) => Some(log_ffi::log_id_t::RADIO),
Some(Self::Events) => Some(log_ffi::log_id_t::EVENTS),
Some(Self::System) => Some(log_ffi::log_id_t::SYSTEM),
Some(Self::Crash) => Some(log_ffi::log_id_t::CRASH),
Some(Self::Kernel) => Some(log_ffi::log_id_t::KERNEL),
Some(Self::Security) => Some(log_ffi::log_id_t::SECURITY),
Some(Self::Stats) => Some(log_ffi::log_id_t::STATS),
None => None,
}
}
}
| rust | Apache-2.0 | 71fd4fd2ab3788063886fb56017597f424e511ac | 2026-01-04T20:17:15.588112Z | false |
rust-mobile/android_logger-rs | https://github.com/rust-mobile/android_logger-rs/blob/71fd4fd2ab3788063886fb56017597f424e511ac/src/platform_log_writer.rs | src/platform_log_writer.rs | use crate::arrays::slice_assume_init_ref;
use crate::{LOGGING_MSG_MAX_LEN, LogId, android_log, uninit_array};
use log::Level;
#[cfg(target_os = "android")]
use log_ffi::LogPriority;
use std::ffi::CStr;
use std::mem::MaybeUninit;
use std::{fmt, mem, ptr};
/// The purpose of this "writer" is to split logged messages on whitespace when the log message
/// length exceeds the maximum. Without allocations.
pub struct PlatformLogWriter<'a> {
#[cfg(target_os = "android")]
priority: LogPriority,
#[cfg(not(target_os = "android"))]
priority: Level,
#[cfg(target_os = "android")]
buf_id: Option<log_ffi::log_id_t>,
#[cfg(not(target_os = "android"))]
buf_id: Option<LogId>,
len: usize,
last_newline_index: usize,
tag: &'a CStr,
buffer: [MaybeUninit<u8>; LOGGING_MSG_MAX_LEN + 1],
}
impl PlatformLogWriter<'_> {
#[cfg(target_os = "android")]
pub fn new_with_priority(
buf_id: Option<LogId>,
priority: log_ffi::LogPriority,
tag: &CStr,
) -> PlatformLogWriter<'_> {
#[allow(deprecated)] // created an issue #35 for this
PlatformLogWriter {
priority,
buf_id: LogId::to_native(buf_id),
len: 0,
last_newline_index: 0,
tag,
buffer: uninit_array(),
}
}
#[cfg(target_os = "android")]
pub fn new(buf_id: Option<LogId>, level: Level, tag: &CStr) -> PlatformLogWriter<'_> {
PlatformLogWriter::new_with_priority(
buf_id,
match level {
Level::Warn => LogPriority::WARN,
Level::Info => LogPriority::INFO,
Level::Debug => LogPriority::DEBUG,
Level::Error => LogPriority::ERROR,
Level::Trace => LogPriority::VERBOSE,
},
tag,
)
}
#[cfg(not(target_os = "android"))]
pub fn new(buf_id: Option<LogId>, level: Level, tag: &CStr) -> PlatformLogWriter<'_> {
#[allow(deprecated)] // created an issue #35 for this
PlatformLogWriter {
priority: level,
buf_id,
len: 0,
last_newline_index: 0,
tag,
buffer: uninit_array(),
}
}
/// Flush some bytes to android logger.
///
/// If there is a newline, flush up to it.
/// If there was no newline, flush all.
///
/// Not guaranteed to flush everything.
fn temporal_flush(&mut self) {
let total_len = self.len;
if total_len == 0 {
return;
}
if self.last_newline_index > 0 {
let copy_from_index = self.last_newline_index;
let remaining_chunk_len = total_len - copy_from_index;
unsafe { self.output_specified_len(copy_from_index) };
self.copy_bytes_to_start(copy_from_index, remaining_chunk_len);
self.len = remaining_chunk_len;
} else {
unsafe { self.output_specified_len(total_len) };
self.len = 0;
}
self.last_newline_index = 0;
}
/// Flush everything remaining to android logger.
pub fn flush(&mut self) {
let total_len = self.len;
if total_len == 0 {
return;
}
unsafe { self.output_specified_len(total_len) };
self.len = 0;
self.last_newline_index = 0;
}
/// Output buffer up until the \0 which will be placed at `len` position.
///
/// # Safety
/// The first `len` bytes of `self.buffer` must be initialized and not contain nullbytes.
unsafe fn output_specified_len(&mut self, len: usize) {
let mut last_byte = MaybeUninit::new(b'\0');
mem::swap(
&mut last_byte,
self.buffer.get_mut(len).expect("`len` is out of bounds"),
);
let initialized = unsafe { slice_assume_init_ref(&self.buffer[..len + 1]) };
let msg = CStr::from_bytes_with_nul(initialized)
.expect("Unreachable: nul terminator was placed at `len`");
android_log(self.buf_id, self.priority, self.tag, msg);
unsafe { *self.buffer.get_unchecked_mut(len) = last_byte };
}
/// Copy `len` bytes from `index` position to starting position.
fn copy_bytes_to_start(&mut self, index: usize, len: usize) {
let dst = self.buffer.as_mut_ptr();
let src = unsafe { self.buffer.as_ptr().add(index) };
unsafe { ptr::copy(src, dst, len) };
}
}
impl fmt::Write for PlatformLogWriter<'_> {
fn write_str(&mut self, s: &str) -> fmt::Result {
let mut incoming_bytes = s.as_bytes();
while !incoming_bytes.is_empty() {
let len = self.len;
// write everything possible to buffer and mark last \n
let new_len = len + incoming_bytes.len();
let last_newline = self.buffer[len..LOGGING_MSG_MAX_LEN]
.iter_mut()
.zip(incoming_bytes)
.enumerate()
.fold(None, |acc, (i, (output, input))| {
if *input == b'\0' {
// Replace nullbytes with whitespace, so we can put the message in a CStr
// later to pass it through a const char*.
output.write(b' ');
} else {
output.write(*input);
}
if *input == b'\n' { Some(i) } else { acc }
});
// update last \n index
if let Some(newline) = last_newline {
self.last_newline_index = len + newline;
}
// calculate how many bytes were written
let written_len = if new_len <= LOGGING_MSG_MAX_LEN {
// if the len was not exceeded
self.len = new_len;
new_len - len // written len
} else {
// if new length was exceeded
self.len = LOGGING_MSG_MAX_LEN;
self.temporal_flush();
LOGGING_MSG_MAX_LEN - len // written len
};
incoming_bytes = &incoming_bytes[written_len..];
}
Ok(())
}
}
#[cfg(test)]
pub mod tests {
use crate::arrays::slice_assume_init_ref;
use crate::platform_log_writer::PlatformLogWriter;
use log::Level;
use std::ffi::CStr;
use std::fmt::Write;
#[test]
fn platform_log_writer_init_values() {
let tag = CStr::from_bytes_with_nul(b"tag\0").unwrap();
let writer = PlatformLogWriter::new(None, Level::Warn, tag);
assert_eq!(writer.tag, tag);
// Android uses LogPriority instead, which doesn't implement equality checks
#[cfg(not(target_os = "android"))]
assert_eq!(writer.priority, Level::Warn);
}
#[test]
fn temporal_flush() {
let mut writer = get_tag_writer();
writer
.write_str("12\n\n567\n90")
.expect("Unable to write to PlatformLogWriter");
assert_eq!(writer.len, 10);
writer.temporal_flush();
// Should have flushed up until the last newline.
assert_eq!(writer.len, 3);
assert_eq!(writer.last_newline_index, 0);
assert_eq!(
unsafe { slice_assume_init_ref(&writer.buffer[..writer.len]) },
"\n90".as_bytes()
);
writer.temporal_flush();
// Should have flushed all remaining bytes.
assert_eq!(writer.len, 0);
assert_eq!(writer.last_newline_index, 0);
}
#[test]
fn flush() {
let mut writer = get_tag_writer();
writer
.write_str("abcdefghij\n\nklm\nnopqr\nstuvwxyz")
.expect("Unable to write to PlatformLogWriter");
writer.flush();
assert_eq!(writer.last_newline_index, 0);
assert_eq!(writer.len, 0);
}
#[test]
fn last_newline_index() {
let mut writer = get_tag_writer();
writer
.write_str("12\n\n567\n90")
.expect("Unable to write to PlatformLogWriter");
assert_eq!(writer.last_newline_index, 7);
}
#[test]
fn output_specified_len_leaves_buffer_unchanged() {
let mut writer = get_tag_writer();
let log_string = "abcdefghij\n\nklm\nnopqr\nstuvwxyz";
writer
.write_str(log_string)
.expect("Unable to write to PlatformLogWriter");
unsafe { writer.output_specified_len(5) };
assert_eq!(
unsafe { slice_assume_init_ref(&writer.buffer[..log_string.len()]) },
log_string.as_bytes()
);
}
#[test]
fn copy_bytes_to_start() {
let mut writer = get_tag_writer();
writer
.write_str("0123456789")
.expect("Unable to write to PlatformLogWriter");
writer.copy_bytes_to_start(3, 2);
assert_eq!(
unsafe { slice_assume_init_ref(&writer.buffer[..10]) },
"3423456789".as_bytes()
);
}
#[test]
fn copy_bytes_to_start_nop() {
let test_string = "Test_string_with\n\n\n\nnewlines\n";
let mut writer = get_tag_writer();
writer
.write_str(test_string)
.expect("Unable to write to PlatformLogWriter");
writer.copy_bytes_to_start(0, 20);
writer.copy_bytes_to_start(10, 0);
assert_eq!(
unsafe { slice_assume_init_ref(&writer.buffer[..test_string.len()]) },
test_string.as_bytes()
);
}
#[test]
fn writer_substitutes_nullbytes_with_spaces() {
let test_string = "Test_string_with\0\0\0\0nullbytes\0";
let mut writer = get_tag_writer();
writer
.write_str(test_string)
.expect("Unable to write to PlatformLogWriter");
assert_eq!(
unsafe { slice_assume_init_ref(&writer.buffer[..test_string.len()]) },
test_string.replace("\0", " ").as_bytes()
);
}
fn get_tag_writer() -> PlatformLogWriter<'static> {
PlatformLogWriter::new(
None,
Level::Warn,
CStr::from_bytes_with_nul(b"tag\0").unwrap(),
)
}
}
| rust | Apache-2.0 | 71fd4fd2ab3788063886fb56017597f424e511ac | 2026-01-04T20:17:15.588112Z | false |
rust-mobile/android_logger-rs | https://github.com/rust-mobile/android_logger-rs/blob/71fd4fd2ab3788063886fb56017597f424e511ac/tests/multiple_init.rs | tests/multiple_init.rs | extern crate android_logger;
extern crate log;
#[test]
fn multiple_init() {
android_logger::init_once(
android_logger::Config::default().with_max_level(log::LevelFilter::Trace),
);
// Second initialization should be silently ignored
android_logger::init_once(
android_logger::Config::default().with_max_level(log::LevelFilter::Error),
);
assert_eq!(log::max_level(), log::LevelFilter::Trace);
}
| rust | Apache-2.0 | 71fd4fd2ab3788063886fb56017597f424e511ac | 2026-01-04T20:17:15.588112Z | false |
rust-mobile/android_logger-rs | https://github.com/rust-mobile/android_logger-rs/blob/71fd4fd2ab3788063886fb56017597f424e511ac/tests/config_log_level.rs | tests/config_log_level.rs | extern crate android_logger;
extern crate log;
#[test]
fn config_log_level() {
android_logger::init_once(
android_logger::Config::default().with_max_level(log::LevelFilter::Trace),
);
assert_eq!(log::max_level(), log::LevelFilter::Trace);
}
| rust | Apache-2.0 | 71fd4fd2ab3788063886fb56017597f424e511ac | 2026-01-04T20:17:15.588112Z | false |
rust-mobile/android_logger-rs | https://github.com/rust-mobile/android_logger-rs/blob/71fd4fd2ab3788063886fb56017597f424e511ac/tests/default_init.rs | tests/default_init.rs | extern crate android_logger;
extern crate log;
#[test]
fn default_init() {
android_logger::init_once(Default::default());
// android_logger has default log level "off"
assert_eq!(log::max_level(), log::LevelFilter::Off);
}
| rust | Apache-2.0 | 71fd4fd2ab3788063886fb56017597f424e511ac | 2026-01-04T20:17:15.588112Z | false |
rust-mobile/android_logger-rs | https://github.com/rust-mobile/android_logger-rs/blob/71fd4fd2ab3788063886fb56017597f424e511ac/examples/system_log_level_overrides.rs | examples/system_log_level_overrides.rs | //! An utility for testing the behavior of `android_logger` crate.
//!
//! ## Build
//!
//! 1. Setup [`cargo-ndk`](https://github.com/bbqsrc/cargo-ndk)
//!
//! ```
//! cargo install cargo-ndk
//! rustup target add x86_64-linux-android
//! ```
//!
//! 2. Build with `cargo ndk`:
//!
//! ```
//! ANDROID_NDK_HOME=/usr/lib/android-sdk/ndk/27.1.12297006 \
//! cargo ndk -t x86_64 build --release --features android-api-30 \
//! --example system_log_level_overrides
//! ```
//!
//! ## Run on emulator
//!
//! 1. Grab a [Cuttlefish](https://source.android.com/docs/devices/cuttlefish)
//! virtual device + Android build from [Android
//! CI](https://ci.android.com/builds/branches/aosp-main/grid?legacy=1). Select
//! the last green `aosp_cf_x86_64_phone` `trunk_staging-userdebug` build and
//! open "Artifacts" link, download:
//!
//! - `aosp_cf_x86_64_phone-img-BUILDNUMBER.zip`
//! - `cvd-host_package.tar.gz`
//!
//! 2. Unpack both archives & start the emulator.
//!
//! ```
//! cd $(mktemp -d)
//! unzip ~/Downloads/aosp_cf_x86_64_phone-img-*.zip
//! tar xf ~/Downloads/cvd-host_package.tar.gz
//! HOME=$PWD bin/launch_cvd
//! ```
//!
//! Once emulator launches, `adb` should detect it on `0.0.0.0:6520`
//! automatically. Shut down the `launch_cvd` command to exit the emulator.
//!
//! 3. Upload & run:
//!
//! ```
//! adb push ./target/x86_64-linux-android/release/examples/system_log_level_overrides /data/local/tmp/
//! adb shell /data/local/tmp/system_log_level_overrides
//! ```
//!
//! ## Test interaction with Android system properties
//!
//! See [`logd`
//! README](https://cs.android.com/android/platform/superproject/main/+/main:system/logging/logd/README.property)
//! in AOSP for details.
//!
//! ```
//! # default: should print info+ logs in `adb logcat -s log_test`
//! # hint: use `adb logcat -v color` is awesome too
//! adb shell /data/local/tmp/system_log_level_overrides
//!
//! # should print trace+ logs in `adb logcat -s log_test`
//! adb shell setprop log.tag V
//! adb shell /data/local/tmp/system_log_level_overrides
//!
//! # should print warn+ logs in `adb logcat -s log_test`
//! adb shell setprop log.tag.log_test W
//! adb shell /data/local/tmp/system_log_level_overrides
//! ```
fn main() {
android_logger::init_once(
android_logger::Config::default()
.with_tag("log_test")
// If set, this is the highest level to log unless overriddeby by the system.
// Note the verbosity can be *increased* through system properties.
.with_max_level(log::LevelFilter::Info),
);
// The log crate applies its filtering before we even get to android_logger.
// Pass everything down so that Android's liblog can determine the log level instead.
log::set_max_level(log::LevelFilter::Trace);
log::trace!("trace");
log::debug!("debug");
log::info!("info");
log::warn!("warn");
log::error!("error");
}
| rust | Apache-2.0 | 71fd4fd2ab3788063886fb56017597f424e511ac | 2026-01-04T20:17:15.588112Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/wql/src/test.rs | wql/src/test.rs | use super::*;
use std::collections::HashMap;
use uuid::Uuid;
#[cfg(test)]
mod test_create {
use std::str::FromStr;
use super::*;
#[test]
fn empty_wql() {
let wql = Wql::from_str("");
assert_eq!(wql.err(), Some(String::from("Empty WQL")));
}
#[test]
fn create_shit() {
let wql = Wql::from_str("CREATE SHIT oh_yeah");
assert_eq!(
wql.err(),
Some(String::from("Keyword ENTITY is required for CREATE"))
);
}
#[test]
fn create_misspelled() {
let wql = Wql::from_str("KREATE ENTITY misspelled");
assert_eq!(
wql.err(),
Some(String::from("Symbol `KREATE` not implemented"))
);
}
#[test]
fn create_entity() {
let wql = Wql::from_str("CREATE ENTITY entity");
assert_eq!(
wql.unwrap(),
Wql::CreateEntity(String::from("entity"), Vec::new(), Vec::new())
);
}
#[test]
fn create_entity_with_uniques() {
let wql = Wql::from_str("CREATE ENTITY entity UNIQUES #{name, ssn,something,}");
assert_eq!(
wql.unwrap(),
Wql::CreateEntity(
String::from("entity"),
vec![
"name".to_string(),
"ssn".to_string(),
"something".to_string()
],
Vec::new()
)
);
}
#[test]
fn create_entity_with_encrypt() {
let wql = Wql::from_str("CREATE ENTITY entity ENCRYPT #{name, ssn,something,}");
assert_eq!(
wql.unwrap(),
Wql::CreateEntity(
String::from("entity"),
Vec::new(),
vec![
"name".to_string(),
"ssn".to_string(),
"something".to_string()
],
)
);
}
#[test]
fn create_entity_with_encrypt_and_uniques() {
let wql = Wql::from_str(
"CREATE ENTITY entity ENCRYPT #{password,something,} UNIQUES #{name, ssn,}",
);
assert_eq!(
wql.unwrap(),
Wql::CreateEntity(
String::from("entity"),
vec!["name".to_string(), "ssn".to_string(),],
vec!["password".to_string(), "something".to_string()],
)
);
}
#[test]
fn create_uniques_in_encrypt() {
let wql = Wql::from_str(
"CREATE ENTITY entity ENCRYPT #{password,something,} UNIQUES #{name, something,}",
);
assert_eq!(
wql.err(),
Some(String::from("Encrypted arguments cannot be set to UNIQUE"))
);
}
#[test]
fn create_encrypts_in_uniques() {
let wql = Wql::from_str(
"CREATE ENTITY entity UNIQUES #{name, something,} ENCRYPT #{password,something,}",
);
assert_eq!(
wql.err(),
Some(String::from("Encrypted arguments cannot be set to UNIQUE"))
);
}
#[test]
fn create_entity_with_uniques_and_encrypt() {
let wql = Wql::from_str(
"CREATE ENTITY entity UNIQUES #{name, ssn,} ENCRYPT #{password,something,}",
);
assert_eq!(
wql.unwrap(),
Wql::CreateEntity(
String::from("entity"),
vec!["name".to_string(), "ssn".to_string(),],
vec!["password".to_string(), "something".to_string()],
)
);
}
}
#[cfg(test)]
mod test_delete {
use std::str::FromStr;
use super::*;
#[test]
fn delete_id() {
let wql = Wql::from_str("DELETE this-is-an-uuid FROM my_entity");
assert_eq!(
wql.unwrap(),
Wql::Delete("my_entity".to_string(), "this-is-an-uuid".to_string())
)
}
#[test]
fn delete_missing_id() {
let wql = Wql::from_str("DELETE FROM my_entity");
assert_eq!(
wql.err(),
Some(String::from("Entity UUID is required for DELETE"))
);
}
#[test]
fn delete_missing_keyword_from() {
let wql = Wql::from_str("DELETE this-is-an-uuid my_entity");
assert_eq!(
wql.err(),
Some(String::from("Keyword FROM is required for DELETE"))
);
}
#[test]
fn delete_missing_entity() {
let wql = Wql::from_str("DELETE this-is-an-uuid FROM");
assert_eq!(
wql.err(),
Some(String::from("Entity name is required after FROM"))
);
}
}
#[cfg(test)]
mod test_insert {
use std::str::FromStr;
use super::*;
#[test]
fn insert_entity() {
let wql = Wql::from_str(
"INSERT {
a: 123,
b: 12.3,
c: 'd' ,
d: true ,
e: false,
f: \"hello\",
g: NiL
} INTO my_entity",
);
assert_eq!(
wql.unwrap(),
Wql::Insert("my_entity".to_string(), hashmap(), None)
);
}
#[test]
fn insert_precise() {
let wql = Wql::from_str(
"INSERT {
a: 98347883122138743294728345738925783257325789353593473247832493483478935673.9347324783249348347893567393473247832493483478935673P,
} INTO my_entity",
);
let mut hm = HashMap::new();
hm.insert("a".to_string(), Types::Precise("98347883122138743294728345738925783257325789353593473247832493483478935673.9347324783249348347893567393473247832493483478935673".to_string()));
assert_eq!(wql.unwrap(), Wql::Insert("my_entity".to_string(), hm, None));
}
#[test]
fn insert_missing_into() {
let wql = Wql::from_str(
"INSERT {
a: 123,
} INTRO my_entity",
);
assert_eq!(
wql.err(),
Some(String::from("Keyword INTO is required for INSERT"))
);
}
#[test]
fn insert_missing_entity_name() {
let wql = Wql::from_str(
"INSERT {
a: 123,
} INTO ",
);
assert_eq!(
wql.err(),
Some(String::from("Entity name is required after INTO"))
);
}
fn hashmap() -> Entity {
let mut hm = HashMap::new();
hm.insert("a".to_string(), Types::Integer(123));
hm.insert("b".to_string(), Types::Float(12.3));
hm.insert("c".to_string(), Types::Char('d'));
hm.insert("d".to_string(), Types::Boolean(true));
hm.insert("e".to_string(), Types::Boolean(false));
hm.insert("f".to_string(), Types::String("hello".to_string()));
hm.insert("g".to_string(), Types::Nil);
hm
}
}
#[cfg(test)]
mod test_update {
use std::str::FromStr;
use super::*;
#[test]
fn update_set_entity() {
let wql = Wql::from_str(
"UPDATE this_entity
SET {
a: 123,
g: NiL
}
INTO d6ca73c0-41ff-4975-8a60-fc4a061ce536",
);
assert_eq!(
wql.unwrap(),
Wql::UpdateSet(
"this_entity".to_string(),
hashmap(),
Uuid::from_str("d6ca73c0-41ff-4975-8a60-fc4a061ce536").unwrap()
)
);
}
#[test]
fn update_content_entity() {
let wql = Wql::from_str(
"UPDATE this_entity
Content {
a: 123,
g: NiL
}
INTO d6ca73c0-41ff-4975-8a60-fc4a061ce536",
);
assert_eq!(
wql.unwrap(),
Wql::UpdateContent(
"this_entity".to_string(),
hashmap(),
Uuid::from_str("d6ca73c0-41ff-4975-8a60-fc4a061ce536").unwrap()
)
);
}
#[test]
fn update_set_missing_entity() {
let wql = Wql::from_str(
"UPDATE
SET {
a: 123,
g: NiL
}
INTO d6ca73c0-41ff-4975-8a60-fc4a061ce536",
);
assert_eq!(
wql.err(),
Some(String::from("Entity name is required for UPDATE"))
);
}
fn hashmap() -> Entity {
let mut hm = HashMap::new();
hm.insert("a".to_string(), Types::Integer(123));
hm.insert("g".to_string(), Types::Nil);
hm
}
#[test]
fn update_entity_misspelled_action() {
let wql = Wql::from_str(
"UPDATE this_entity
TO {
a: 123,
g: NiL
}
INTO d6ca73c0-41ff-4975-8a60-fc4a061ce536",
);
assert_eq!(
wql.err(),
Some(String::from(
"UPDATE type is required after entity. Keywords are SET or CONTENT"
))
);
}
#[test]
fn update_entity_missing_into() {
let wql = Wql::from_str(
"UPDATE this_entity
SET {
a: 123,
g: NiL
}
to d6ca73c0-41ff-4975-8a60-fc4a061ce536",
);
assert_eq!(
wql.err(),
Some(String::from("Keyword INTO is required for UPDATE"))
);
}
#[test]
fn update_entity_missing_uuid() {
let wql = Wql::from_str(
"UPDATE this_entity
SET {
a: 123,
g: NiL
}
into Some-crazy-id",
);
assert!(wql
.err()
.unwrap()
.starts_with("Couldn\'t create uuid from Some-crazy-id"));
}
}
#[cfg(test)]
mod test_match {
use super::*;
use std::str::FromStr;
#[test]
fn test_read_match_args() {
let mut args = "(a == 1, b != 2, c > 3, d >= 4, e < 5, f <= 6)".chars();
let actual = read_match_args(&mut args).unwrap();
let expected = vec![
MatchCondition::Eq("a".to_string(), Types::Integer(1)),
MatchCondition::NotEq("b".to_string(), Types::Integer(2)),
MatchCondition::G("c".to_string(), Types::Integer(3)),
MatchCondition::GEq("d".to_string(), Types::Integer(4)),
MatchCondition::L("e".to_string(), Types::Integer(5)),
MatchCondition::LEq("f".to_string(), Types::Integer(6)),
];
assert_eq!(actual, expected);
}
#[test]
fn match_update_set_entity() {
let wql = Wql::from_str(
" MATCH ALL(a == 1, b >= 3, c != \"hello\", d < 7,)
UPDATE this_entity
SET {
a: 123,
g: NiL
}
INTO d6ca73c0-41ff-4975-8a60-fc4a061ce536",
);
assert_eq!(
wql.unwrap(),
Wql::MatchUpdate(
"this_entity".to_string(),
hashmap(),
Uuid::from_str("d6ca73c0-41ff-4975-8a60-fc4a061ce536").unwrap(),
MatchCondition::All(vec![
MatchCondition::Eq("a".to_string(), Types::Integer(1)),
MatchCondition::GEq("b".to_string(), Types::Integer(3)),
MatchCondition::NotEq("c".to_string(), Types::String("hello".to_string())),
MatchCondition::L("d".to_string(), Types::Integer(7)),
])
)
);
}
#[test]
fn match_update_missing_logical_arg() {
let wql = Wql::from_str(
" MATCH (a == 1, b >= 3, c != \"hello\", d < 7)
UPDATE this_entity
SET {
a: 123,
g: NiL
}
INTO d6ca73c0-41ff-4975-8a60-fc4a061ce536",
);
assert_eq!(
wql.err().unwrap(),
String::from("MATCH requires ALL or ANY symbols")
);
}
#[test]
fn match_update_missing_update_key() {
let wql = Wql::from_str(
" MATCH Any(a == 1, b >= 3, c != \"hello\", d < 7)
this_entity
SET {
a: 123,
g: NiL
}
INTO d6ca73c0-41ff-4975-8a60-fc4a061ce536",
);
assert_eq!(
wql.err().unwrap(),
String::from("UPDATE keyword is required for MATCH UPDATE")
);
}
#[test]
fn match_update_missing_entity_name() {
let wql = Wql::from_str(
" MATCH All(a == 1, b >= 3, c != \"hello\", d < 7)
UPDATE
SET {
a: 123,
g: NiL
}
INTO d6ca73c0-41ff-4975-8a60-fc4a061ce536",
);
assert_eq!(
wql.err().unwrap(),
String::from("Entity name is required for MATCH UPDATE")
);
}
#[test]
fn match_update_missing_set() {
let wql = Wql::from_str(
" MATCH All(a == 1, b >= 3, c != \"hello\", d < 7)
UPDATE this_entity
{
a: 123,
g: NiL
}
INTO d6ca73c0-41ff-4975-8a60-fc4a061ce536",
);
assert_eq!(
wql.err().unwrap(),
String::from("MATCH UPDATE type is required after entity. Keyword is SET")
);
}
#[test]
fn match_update_missing_content() {
let wql = Wql::from_str(
" MATCH All(a == 1, b >= 3, c != \"hello\", d < 7)
UPDATE this_entity
SET
INTO d6ca73c0-41ff-4975-8a60-fc4a061ce536",
);
assert_eq!(
wql.err().unwrap(),
String::from("Entity map should start with `{` and end with `}`")
);
}
#[test]
fn match_update_missing_into() {
let wql = Wql::from_str(
" MATCH All(a == 1, b >= 3, c != \"hello\", d < 7)
UPDATE this_entity
SET {
a: 123,
g: NiL
}
d6ca73c0-41ff-4975-8a60-fc4a061ce536",
);
assert_eq!(
wql.err().unwrap(),
String::from("Keyword INTO is required for MATCH UPDATE")
);
}
#[test]
fn match_update_missing_id() {
let wql = Wql::from_str(
" MATCH All(a == 1, b >= 3, c != \"hello\", d < 7)
UPDATE this_entity
SET {
a: 123,
g: NiL
}
INTO",
);
assert!(wql
.err()
.unwrap()
.starts_with("Couldn\'t create uuid from "));
}
fn hashmap() -> Entity {
let mut hm = HashMap::new();
hm.insert("a".to_string(), Types::Integer(123));
hm.insert("g".to_string(), Types::Nil);
hm
}
}
#[cfg(test)]
mod evict {
use std::str::FromStr;
use super::*;
#[test]
fn evict_entity() {
let wql = Wql::from_str("EVICT my_entity");
assert_eq!(wql.unwrap(), Wql::Evict(String::from("my_entity"), None));
}
#[test]
fn evict_entity_with_dash() {
let wql = Wql::from_str("EVICT my-entity");
assert_eq!(
wql.err(),
Some(String::from("Entity name cannot contain `-`"))
);
}
#[test]
fn evict_entity_from_id() {
let wql = Wql::from_str("EVICT d6ca73c0-41ff-4975-8a60-fc4a061ce536 FROM my_entity");
assert_eq!(
wql.unwrap(),
Wql::Evict(
String::from("my_entity"),
Uuid::from_str("d6ca73c0-41ff-4975-8a60-fc4a061ce536").ok()
)
);
}
#[test]
fn evict_entity_without_from() {
let wql = Wql::from_str("EVICT d6ca73c0-41ff-4975-8a60-fc4a061ce536 my_entity");
assert_eq!(
wql.err(),
Some(String::from("Keyword FROM is required to EVICT an UUID"))
);
}
#[test]
fn evict_entity_without_entity_name() {
let wql = Wql::from_str("EVICT d6ca73c0-41ff-4975-8a60-fc4a061ce536 FROM");
assert_eq!(
wql.err(),
Some(String::from("Entity name is required for EVICT"))
);
}
}
#[cfg(test)]
mod test_data_sructures {
use super::*;
#[test]
fn insert_vec() {
let wql = Wql::from_str(
"INSERT {
a: 123,
b: [12.3, 34, \"hello\",]
} INTO my_entity",
);
assert_eq!(
wql.unwrap(),
Wql::Insert("my_entity".to_string(), hashmap(), None)
);
}
#[test]
fn insert_time() {
use chrono::{DateTime, Utc};
let wql = Wql::from_str(
"INSERT {
time: 2014-11-28T12:00:09Z,
} INTO my_entity",
);
let hm: HashMap<String, Types> = vec![(
"time".to_string(),
Types::DateTime("2014-11-28T12:00:09Z".parse::<DateTime<Utc>>().unwrap()),
)]
.iter()
.cloned()
.collect();
assert_eq!(wql.unwrap(), Wql::Insert("my_entity".to_string(), hm, None));
}
#[test]
fn insert_vec_in_vec() {
let wql = Wql::from_str(
"INSERT {
a: 123,
b: [12.3, 34, [\"hello\"]]
} INTO my_entity",
);
assert_eq!(
wql.unwrap(),
Wql::Insert("my_entity".to_string(), hashmap2(), None)
);
}
#[test]
fn insert_vec_err() {
let wql = Wql::from_str(
"INSERT {
a: 123,
b: [12.3, 34, \"hello\", nkjsld,]
} INTO my_entity",
);
assert_eq!(
wql.err(),
Some(String::from("Value Type could not be created from nkjsld"))
);
}
#[test]
fn insert_with_err() {
let wql = Wql::from_str(
"INSERT {
a: 123,
b: [12.3, 34, \"hello\",]
} INTO my_entity
ID 555555-5555-444444",
);
assert_eq!(
wql.err(),
Some(String::from(
"Keyword WITH is required for INSERT with Uuid"
))
);
}
#[test]
fn insert_vec_with_map() {
let wql = Wql::from_str(
"INSERT {
a: 123,
b: { a: 12.3, b: 34, }
} INTO my_entity",
);
assert_eq!(
wql.unwrap(),
Wql::Insert("my_entity".to_string(), hashmap3(), None)
);
}
#[test]
fn insert_vec_with_map_and_id() {
let uuid = Uuid::parse_str("13ca62fc-241b-4af6-87c3-0ae4015f9967").ok();
let wql = Wql::from_str(
"INSERT {
a: 123,
b: { a: 12.3, b: 34, }
} INTO my_entity
WITH 13ca62fc-241b-4af6-87c3-0ae4015f9967",
);
assert_eq!(
wql.unwrap(),
Wql::Insert("my_entity".to_string(), hashmap3(), uuid)
);
}
fn hashmap() -> HashMap<String, Types> {
let mut hm = HashMap::new();
hm.insert("a".to_string(), Types::Integer(123));
hm.insert(
"b".to_string(),
Types::Vector(vec![
Types::Float(12.3),
Types::Integer(34),
Types::String("hello".to_string()),
]),
);
hm
}
fn hashmap2() -> HashMap<String, Types> {
let mut hm = HashMap::new();
hm.insert("a".to_string(), Types::Integer(123));
hm.insert(
"b".to_string(),
Types::Vector(vec![
Types::Float(12.3),
Types::Integer(34),
Types::Vector(vec![Types::String("hello".to_string())]),
]),
);
hm
}
fn hashmap3() -> HashMap<String, Types> {
let mut inner_map = HashMap::new();
inner_map.insert("a".to_string(), Types::Float(12.3));
inner_map.insert("b".to_string(), Types::Integer(34));
let mut hm = HashMap::new();
hm.insert("a".to_string(), Types::Integer(123));
hm.insert("b".to_string(), Types::Map(inner_map));
hm
}
}
#[cfg(test)]
mod check {
use std::collections::HashMap;
use std::str::FromStr;
use super::*;
#[test]
fn check_encrypt_values() {
let wql = Wql::from_str(
"CHECK {
ssn: 123,
pswd: \"my-password\"
} FROM my_entity ID d6ca73c0-41ff-4975-8a60-fc4a061ce536",
);
let uuid = Uuid::from_str("d6ca73c0-41ff-4975-8a60-fc4a061ce536").unwrap();
assert_eq!(
wql.unwrap(),
Wql::CheckValue("my_entity".to_string(), uuid, hashmap())
);
}
fn hashmap() -> HashMap<String, String> {
let mut hm = HashMap::new();
hm.insert("ssn".to_string(), "123".to_string());
hm.insert("pswd".to_string(), "my-password".to_string());
hm
}
}
#[cfg(test)]
mod test_where {
use super::*;
#[test]
fn where_ok() {
let query = "Select * FROM my_entity WherE {
(in ?id 32434 45345 345346436),
(between ?age 30 35),
}";
let wql = Wql::from_str(query);
assert_eq!(
wql.unwrap(),
Wql::SelectWhere(
"my_entity".to_string(),
ToSelect::All,
vec![
Clause::ComplexComparisonFunctions(
where_clause::Function::In,
"?id".to_string(),
vec![
Types::Integer(32434),
Types::Integer(45345),
Types::Integer(345346436),
]
),
Clause::ComplexComparisonFunctions(
where_clause::Function::Between,
"?age".to_string(),
vec![Types::Integer(30), Types::Integer(35),]
)
],
HashMap::new()
)
)
}
#[test]
fn or_clause() {
let query = "Select * FROM my_entity WherE {
?* my_entity:a ?a,
?* my_entity:c ?c,
(== ?a 123),
(or
(>= c 4300.0)
(< c 6.9)
),
}";
let wql = Wql::from_str(query);
assert_eq!(
wql.unwrap(),
Wql::SelectWhere(
"my_entity".to_string(),
ToSelect::All,
vec![
Clause::ValueAttribution(
"my_entity".to_string(),
"a".to_string(),
Value("?a".to_string())
),
Clause::ValueAttribution(
"my_entity".to_string(),
"c".to_string(),
Value("?c".to_string())
),
Clause::SimpleComparisonFunction(
Function::Eq,
"?a".to_string(),
Types::Integer(123)
),
Clause::Or(
Function::Or,
vec![
Clause::SimpleComparisonFunction(
Function::GEq,
"c".to_string(),
Types::Float(4300.0)
),
Clause::SimpleComparisonFunction(
Function::L,
"c".to_string(),
Types::Float(6.9)
)
]
)
],
HashMap::new()
)
)
}
#[test]
fn select_where_groupby() {
let query = "Select * FROM my_entity WHERE {
?* my_entity:name \"julia\",
?* my_entity:id 349875325,
} GROUP BY amazing_key";
let wql = Wql::from_str(query);
let hm: HashMap<String, Algebra> = vec![(
"GROUP".to_string(),
Algebra::GroupBy(String::from("amazing_key")),
)]
.iter()
.cloned()
.collect();
assert_eq!(
wql.unwrap(),
Wql::SelectWhere(
"my_entity".to_string(),
ToSelect::All,
vec![
Clause::ContainsKeyValue(
"my_entity".to_string(),
"name".to_string(),
Types::String("julia".to_string())
),
Clause::ContainsKeyValue(
"my_entity".to_string(),
"id".to_string(),
Types::Integer(349875325)
),
],
hm
)
)
}
}
#[cfg(test)]
mod diff_intersect {
use std::str::FromStr;
use super::*;
#[test]
fn intersect_key() {
let f_uuid = Uuid::from_str("2df2b8cf-49da-474d-8a00-c596c0bb6fd1").ok();
let s_uuid = Uuid::from_str("49dab8cf-2df2-474d-6fd1-c596c0bb8a00").ok();
let query = "INTERSECT KEY SelEct * FROM my_entity ID 2df2b8cf-49da-474d-8a00-c596c0bb6fd1 | SelEct * FROM my_entity ID 49dab8cf-2df2-474d-6fd1-c596c0bb8a00";
let wql = Wql::from_str(query);
assert_eq!(
wql.unwrap(),
Wql::RelationQuery(
vec![
Wql::Select(
"my_entity".to_string(),
ToSelect::All,
f_uuid,
HashMap::new()
),
Wql::Select(
"my_entity".to_string(),
ToSelect::All,
s_uuid,
HashMap::new()
),
],
Relation::Intersect,
RelationType::Key
)
);
}
#[test]
fn diff_key_value() {
let f_uuid = Uuid::from_str("2df2b8cf-49da-474d-8a00-c596c0bb6fd1").ok();
let s_uuid = Uuid::from_str("49dab8cf-2df2-474d-6fd1-c596c0bb8a00").ok();
let query = "DIFFERENCE KEY-VALUE SelEct * FROM my_entity ID 2df2b8cf-49da-474d-8a00-c596c0bb6fd1 | SelEct * FROM my_entity ID 49dab8cf-2df2-474d-6fd1-c596c0bb8a00 WHEN AT 2020-01-01T00:00:00Z";
let wql = Wql::from_str(query);
assert_eq!(
wql.unwrap(),
Wql::RelationQuery(
vec![
Wql::Select(
"my_entity".to_string(),
ToSelect::All,
f_uuid,
HashMap::new()
),
Wql::SelectWhen(
"my_entity".to_string(),
ToSelect::All,
s_uuid,
"2020-01-01T00:00:00Z".to_string()
),
],
Relation::Difference,
RelationType::KeyValue
)
);
}
#[test]
fn union_key() {
let f_uuid = Uuid::from_str("2df2b8cf-49da-474d-8a00-c596c0bb6fd1").ok();
let s_uuid = Uuid::from_str("49dab8cf-2df2-474d-6fd1-c596c0bb8a00").ok();
let query = "UNION KEY SelEct * FROM my_entity ID 2df2b8cf-49da-474d-8a00-c596c0bb6fd1 | SelEct * FROM my_entity ID 49dab8cf-2df2-474d-6fd1-c596c0bb8a00 WHEN AT 2020-01-01T00:00:00Z";
let wql = Wql::from_str(query);
assert_eq!(
wql.unwrap(),
Wql::RelationQuery(
vec![
Wql::Select(
"my_entity".to_string(),
ToSelect::All,
f_uuid,
HashMap::new()
),
Wql::SelectWhen(
"my_entity".to_string(),
ToSelect::All,
s_uuid,
"2020-01-01T00:00:00Z".to_string()
),
],
Relation::Union,
RelationType::Key
)
);
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/wql/src/select.rs | wql/src/select.rs | use serde::{Deserialize, Serialize};
use std::{collections::HashMap, str::FromStr};
use uuid::Uuid;
const ALGEBRA: [&str; 6] = ["DEDUP", "GROUP", "ORDER", "OFFSET", "LIMIT", "COUNT"];
const OPERATORS: [&str; 10] = [
"ID", "IDS", "WHERE", "WHEN", "DEDUP", "GROUP", "ORDER", "OFFSET", "LIMIT", "COUNT",
];
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
pub enum Order {
Asc,
Desc,
}
impl std::str::FromStr for Order {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if s == ":asc" {
Ok(Self::Asc)
} else if s == ":desc" {
Ok(Self::Desc)
} else {
Err(String::from("Order parameter should be :asc/:desc"))
}
}
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
pub enum Algebra {
Dedup(String),
GroupBy(String),
OrderBy(String, Order),
Limit(usize),
Offset(usize),
Count,
}
use crate::where_clause::where_selector;
use super::{
logic::{read_select_args, read_uuids},
ToSelect, Wql,
};
#[allow(clippy::redundant_pub_crate)]
pub(crate) fn select_all(chars: &mut std::str::Chars) -> Result<Wql, String> {
let arg = ToSelect::All;
select_body(arg, chars)
}
#[allow(clippy::redundant_pub_crate)]
pub(crate) fn select_args(chars: &mut std::str::Chars) -> Result<Wql, String> {
let args: Vec<String> = read_select_args(chars)?;
let arg = ToSelect::Keys(args);
select_body(arg, chars)
}
fn select_body(arg: ToSelect, chars: &mut std::str::Chars) -> Result<Wql, String> {
let entity_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>();
if entity_symbol.to_uppercase() != "FROM" {
return Err(String::from("Keyword FROM is required for SELECT"));
}
let entity_name = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| c.is_alphanumeric() || c == &'_')
.collect::<String>();
if entity_name.is_empty() {
return Err(String::from("Entity name is required for SELECT"));
}
let next_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>()
.to_uppercase();
if next_symbol == "ID" {
let id = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| c.is_alphanumeric() || c == &'-')
.collect::<String>();
let uuid = uuid::Uuid::from_str(&id);
if uuid.is_err() {
return Err(String::from("Field ID must be a UUID v4"));
}
let next_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>()
.to_uppercase();
if next_symbol.to_uppercase() == "WHEN" {
return when_selector(entity_name, arg, uuid.ok(), chars);
}
Ok(Wql::Select(entity_name, arg, uuid.ok(), HashMap::new()))
} else if next_symbol == "IDS" {
let in_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>()
.to_uppercase();
if in_symbol == "IN" {
let uuids: Vec<Uuid> = read_uuids(chars)?;
let next_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>()
.to_uppercase();
if next_symbol.to_uppercase() == "WHEN" {
return Err(String::from("WHEN not allowed after IDS IN"));
}
Ok(Wql::SelectIds(
entity_name,
arg,
uuids,
algebra_functions(next_symbol, chars)?,
))
} else {
Err(String::from(
"Keyword IN is required after IDS to define a set of uuids",
))
}
} else if next_symbol.to_uppercase() == "WHEN" {
when_selector(entity_name, arg, None, chars)
} else if next_symbol.to_uppercase() == "WHERE" {
where_selector(entity_name, arg, chars)
} else if ALGEBRA.contains(&&next_symbol.to_uppercase()[..]) {
Ok(Wql::Select(
entity_name,
arg,
None,
algebra_functions(next_symbol, chars)?,
))
} else if !next_symbol.is_empty() && !OPERATORS.contains(&&next_symbol.to_uppercase()[..]) {
Err(String::from(
"Keyword ID/IDS is required to set an uuid in SELECT or functions WHEN/WHERE/OFFSET/LIMIT/DEDUP/GROUP BY/ORDER BY. Key was ",
) + &next_symbol)
} else {
Ok(Wql::Select(entity_name, arg, None, HashMap::new()))
}
}
pub fn algebra_functions(
next: String,
chars: &mut std::str::Chars,
) -> Result<HashMap<String, Algebra>, String> {
let mut functions = HashMap::new();
let mut next_symbol = next;
loop {
if ALGEBRA.contains(&&next_symbol[..]) {
if next_symbol == "GROUP" || next_symbol == "ORDER" {
let by = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>()
.to_uppercase();
if by != "BY" {
return Err(String::from("ORDER and GROUP must be followed by BY"));
}
}
let next_value = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>();
match &next_symbol[..] {
"DEDUP" => functions.insert("DEDUP".to_string(), Algebra::Dedup(next_value)),
"GROUP" => functions.insert("GROUP".to_string(), Algebra::GroupBy(next_value)),
"ORDER" => {
let order = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>()
.to_lowercase();
let order = Order::from_str(&order)?;
functions.insert("ORDER".to_string(), Algebra::OrderBy(next_value, order))
}
"OFFSET" => {
let value = next_value
.parse::<usize>()
.map_err(|e| format!("Error parsing value: {:?}", e))?;
functions.insert("OFFSET".to_string(), Algebra::Offset(value))
}
"LIMIT" => {
let value = next_value
.parse::<usize>()
.map_err(|e| format!("Error parsing value: {:?}", e))?;
functions.insert("LIMIT".to_string(), Algebra::Limit(value))
}
"COUNT" => functions.insert("COUNT".to_string(), Algebra::Count),
_ => {
return Err(String::from(
"Available functions are DEDUP, GROUP BY, ORDER BY, OFFSET, LIMIT, COUNT",
))
}
};
next_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>()
.to_uppercase();
} else if chars.count() == 0 {
break;
} else {
return Err(String::from(
"Available functions are DEDUP, GROUP BY, ORDER BY, OFFSET, LIMIT, COUNT",
));
}
}
Ok(functions)
}
fn when_selector(
entity_name: String,
arg: ToSelect,
uuid: Option<Uuid>,
chars: &mut std::str::Chars,
) -> Result<Wql, String> {
let next_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>()
.to_uppercase();
if let (&ToSelect::All, Some(uuid), "START") = (&arg, uuid, next_symbol.to_uppercase().as_str())
{
return when_time_range(entity_name, uuid, chars);
}
if next_symbol.to_uppercase() != "AT" {
return Err(String::from("Keyword AT is required after WHEN"));
};
let date = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>();
Ok(Wql::SelectWhen(entity_name, arg, uuid, date))
}
fn when_time_range(
entity_name: String,
uuid: Uuid,
chars: &mut std::str::Chars,
) -> Result<Wql, String> {
let start_date = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>();
let next_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>()
.to_uppercase();
if next_symbol.to_uppercase() != "END" {
return Err(String::from(
"Keyword END is required after START date for SELECT WHEN",
));
};
let end_date = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>();
if !end_date.starts_with(&start_date[0..10]) {
return Err(String::from(
"START date and END date should be the same date.",
));
}
Ok(Wql::SelectWhenRange(
entity_name,
uuid,
start_date,
end_date,
))
}
#[cfg(test)]
mod test {
use uuid::Uuid;
use crate::{ToSelect, Wql};
use std::{collections::HashMap, str::FromStr};
#[test]
fn select_all() {
let wql = Wql::from_str("SelEct * FROM my_entity");
assert_eq!(
wql.unwrap(),
Wql::Select("my_entity".to_string(), ToSelect::All, None, HashMap::new())
);
}
#[test]
fn select_all_from_missing() {
let wql = Wql::from_str("SelEct * my_entity");
assert_eq!(
wql.err(),
Some(String::from("Keyword FROM is required for SELECT"))
);
}
#[test]
fn select_all_from_entity() {
let wql = Wql::from_str("SelEct * FROM");
assert_eq!(
wql.err(),
Some(String::from("Entity name is required for SELECT"))
);
}
#[test]
fn select_arg() {
let wql = Wql::from_str("SelEct #{hello,} FROM my_entity");
assert_eq!(
wql.unwrap(),
Wql::Select(
"my_entity".to_string(),
ToSelect::Keys(vec!["hello".to_string()]),
None,
HashMap::new()
)
);
}
#[test]
fn select_args() {
let wql = Wql::from_str("SelEct #{hello,world, by_me,} FROM my_entity");
assert_eq!(
wql.unwrap(),
Wql::Select(
"my_entity".to_string(),
ToSelect::Keys(vec![
"hello".to_string(),
"world".to_string(),
"by_me".to_string()
]),
None,
HashMap::new()
)
);
}
#[test]
fn select_all_id() {
let wql = Wql::from_str("SelEct * FROM my_entity ID 2df2b8cf-49da-474d-8a00-c596c0bb6fd1");
let uuid = Uuid::from_str("2df2b8cf-49da-474d-8a00-c596c0bb6fd1");
assert_eq!(
wql.unwrap(),
Wql::Select(
"my_entity".to_string(),
ToSelect::All,
uuid.ok(),
HashMap::new()
)
);
}
#[test]
fn select_all_id_missing() {
let wql = Wql::from_str("SelEct * FROM my_entity ID ");
assert_eq!(wql.err(), Some(String::from("Field ID must be a UUID v4")));
}
#[test]
fn select_all_id_key_missing() {
let wql = Wql::from_str("SelEct * FROM my_entity 2df2b8cf-49da-474d-8a00-c596c0bb6fd1 ");
assert!(
wql.err().unwrap().contains("Keyword ID/IDS is required to set an uuid in SELECT or functions WHEN/WHERE/OFFSET/LIMIT/DEDUP/GROUP BY/ORDER BY"),
);
}
#[test]
fn select_all_wrong_key() {
let wql = Wql::from_str("SelEct * FROM my_entity ops");
assert_eq!(
wql.err(),
Some(String::from("Keyword ID/IDS is required to set an uuid in SELECT or functions WHEN/WHERE/OFFSET/LIMIT/DEDUP/GROUP BY/ORDER BY. Key was OPS")),
);
}
#[test]
fn select_all_ids() {
let wql = Wql::from_str("SelEct * FROM my_entity IDS IN #{2df2b8cf-49da-474d-8a00-c596c0bb6fd1, 53315090-e14d-4738-a4d2-f1ec2a93664c,}");
let uuid1 = Uuid::from_str("2df2b8cf-49da-474d-8a00-c596c0bb6fd1").unwrap();
let uuid2 = Uuid::from_str("53315090-e14d-4738-a4d2-f1ec2a93664c").unwrap();
assert_eq!(
wql.unwrap(),
Wql::SelectIds(
"my_entity".to_string(),
ToSelect::All,
vec![uuid1, uuid2],
HashMap::new()
)
);
}
#[test]
fn select_keys_ids() {
let wql = Wql::from_str("SelEct #{a, b, c,} FROM my_entity IDS IN #{2df2b8cf-49da-474d-8a00-c596c0bb6fd1, 53315090-e14d-4738-a4d2-f1ec2a93664c,}");
let uuid1 = Uuid::from_str("2df2b8cf-49da-474d-8a00-c596c0bb6fd1").unwrap();
let uuid2 = Uuid::from_str("53315090-e14d-4738-a4d2-f1ec2a93664c").unwrap();
assert_eq!(
wql.unwrap(),
Wql::SelectIds(
"my_entity".to_string(),
ToSelect::Keys(vec!["a".to_string(), "b".to_string(), "c".to_string()]),
vec![uuid1, uuid2],
HashMap::new()
)
);
}
#[test]
fn select_all_ids_missing_in() {
let wql = Wql::from_str("SelEct * FROM my_entity IDS #{2df2b8cf-49da-474d-8a00-c596c0bb6fd1, 53315090-e14d-4738-a4d2-f1ec2a93664c,}");
assert_eq!(
wql.err(),
Some(String::from(
"Keyword IN is required after IDS to define a set of uuids"
))
);
}
#[test]
fn when_at() {
let wql = Wql::from_str("SelEct * FROM my_entity ID 2df2b8cf-49da-474d-8a00-c596c0bb6fd1 WHEN AT 2020-01-01T00:00:00Z");
let uuid = Uuid::from_str("2df2b8cf-49da-474d-8a00-c596c0bb6fd1").unwrap();
assert_eq!(
wql.unwrap(),
Wql::SelectWhen(
"my_entity".to_string(),
ToSelect::All,
Some(uuid),
"2020-01-01T00:00:00Z".to_string()
)
);
}
#[test]
fn when_at_args() {
let wql = Wql::from_str("SelEct #{a,b,c,} FROM my_entity ID 2df2b8cf-49da-474d-8a00-c596c0bb6fd1 WHEN AT 2020-01-01T00:00:00Z");
let uuid = Uuid::from_str("2df2b8cf-49da-474d-8a00-c596c0bb6fd1").unwrap();
assert_eq!(
wql.unwrap(),
Wql::SelectWhen(
"my_entity".to_string(),
ToSelect::Keys(vec!["a".to_string(), "b".to_string(), "c".to_string()]),
Some(uuid),
"2020-01-01T00:00:00Z".to_string()
)
);
}
#[test]
fn when_at_args_no_id() {
let wql = Wql::from_str("SelEct #{a,b,c,} FROM my_entity WHEN AT 2020-01-01T00:00:00Z");
assert_eq!(
wql.unwrap(),
Wql::SelectWhen(
"my_entity".to_string(),
ToSelect::Keys(vec!["a".to_string(), "b".to_string(), "c".to_string()]),
None,
"2020-01-01T00:00:00Z".to_string()
)
);
}
#[test]
fn when_range_all() {
let wql = Wql::from_str("SelEct * FROM my_entity ID 2df2b8cf-49da-474d-8a00-c596c0bb6fd1 WHEN START 2020-01-01T00:00:00Z END 2020-01-01T03:00:00Z");
let uuid = Uuid::from_str("2df2b8cf-49da-474d-8a00-c596c0bb6fd1").unwrap();
assert_eq!(
wql.unwrap(),
Wql::SelectWhenRange(
"my_entity".to_string(),
uuid,
"2020-01-01T00:00:00Z".to_string(),
"2020-01-01T03:00:00Z".to_string()
)
);
}
#[test]
fn when_range_args_err() {
let wql = Wql::from_str("SelEct * FROM my_entity ID 2df2b8cf-49da-474d-8a00-c596c0bb6fd1 WHEN START 2020-01-01T00:00:00Z 2020-01-01T03:00:00Z");
assert_eq!(
wql.err(),
Some(String::from(
"Keyword END is required after START date for SELECT WHEN"
))
);
}
}
#[cfg(test)]
mod functions_test {
use super::*;
use crate::{ToSelect, Wql};
use std::str::FromStr;
#[test]
fn select_all_limit_offset() {
let wql = Wql::from_str("SelEct * FROM my_entity LIMIT 3 OFFSET 5");
let hm: HashMap<String, Algebra> = vec![
("LIMIT".to_string(), Algebra::Limit(3)),
("OFFSET".to_string(), Algebra::Offset(5)),
]
.iter()
.cloned()
.collect();
assert_eq!(
wql.unwrap(),
Wql::Select("my_entity".to_string(), ToSelect::All, None, hm)
);
}
#[test]
fn select_all_order_by() {
let wql = Wql::from_str("SelEct * FROM my_entity ORDER BY key_1 :asc");
let hm: HashMap<String, Algebra> = vec![(
"ORDER".to_string(),
Algebra::OrderBy("key_1".to_string(), Order::Asc),
)]
.iter()
.cloned()
.collect();
assert_eq!(
wql.unwrap(),
Wql::Select("my_entity".to_string(), ToSelect::All, None, hm)
);
}
#[test]
fn select_all_group_by() {
let wql = Wql::from_str("SelEct * FROM my_entity GROUP BY key_1");
let hm: HashMap<String, Algebra> =
vec![("GROUP".to_string(), Algebra::GroupBy("key_1".to_string()))]
.iter()
.cloned()
.collect();
assert_eq!(
wql.unwrap(),
Wql::Select("my_entity".to_string(), ToSelect::All, None, hm)
);
}
#[test]
fn select_all_dedup() {
let wql = Wql::from_str("SelEct * FROM my_entity DEDUP key_1 COUNT");
let hm: HashMap<String, Algebra> = vec![
("DEDUP".to_string(), Algebra::Dedup("key_1".to_string())),
("COUNT".to_string(), Algebra::Count),
]
.iter()
.cloned()
.collect();
assert_eq!(
wql.unwrap(),
Wql::Select("my_entity".to_string(), ToSelect::All, None, hm)
);
}
#[test]
fn select_all_ids_order() {
let wql = Wql::from_str("SelEct * FROM my_entity IDS IN #{2df2b8cf-49da-474d-8a00-c596c0bb6fd1, 53315090-e14d-4738-a4d2-f1ec2a93664c,} ORDER BY my_key :desc DEDUP ley");
let uuid1 = Uuid::from_str("2df2b8cf-49da-474d-8a00-c596c0bb6fd1").unwrap();
let uuid2 = Uuid::from_str("53315090-e14d-4738-a4d2-f1ec2a93664c").unwrap();
let hm: HashMap<String, Algebra> = vec![
(
"ORDER".to_string(),
Algebra::OrderBy("my_key".to_string(), Order::Desc),
),
("DEDUP".to_string(), Algebra::Dedup("ley".to_string())),
]
.iter()
.cloned()
.collect();
assert_eq!(
wql.unwrap(),
Wql::SelectIds(
"my_entity".to_string(),
ToSelect::All,
vec![uuid1, uuid2],
hm
)
);
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/wql/src/lib.rs | wql/src/lib.rs | use chrono::{DateTime, Utc};
use language_parser::read_symbol;
use serde::{Deserialize, Serialize};
use std::{cmp::Ordering, hash::Hash};
use std::{collections::HashMap, str::FromStr};
use uuid::Uuid;
mod join;
mod language_parser;
mod logic;
mod relation;
mod select;
#[cfg(test)]
mod test;
mod where_clause;
pub use logic::parse_value as parse_types;
use logic::{integer_decode, read_map, read_match_args};
pub use relation::{Relation, RelationType};
pub use where_clause::{Clause, Function, Value};
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
pub enum Wql {
CreateEntity(String, Vec<String>, Vec<String>),
Insert(String, Entity, Option<Uuid>),
UpdateContent(String, Entity, Uuid),
UpdateSet(String, Entity, Uuid),
Delete(String, String),
MatchUpdate(String, Entity, Uuid, MatchCondition),
Evict(String, Option<Uuid>),
Select(String, ToSelect, Option<Uuid>, HashMap<String, Algebra>),
SelectWhen(String, ToSelect, Option<Uuid>, String),
SelectWhenRange(String, Uuid, String, String),
SelectIds(String, ToSelect, Vec<Uuid>, HashMap<String, Algebra>),
SelectWhere(String, ToSelect, Vec<Clause>, HashMap<String, Algebra>),
CheckValue(String, Uuid, HashMap<String, String>),
RelationQuery(Vec<Wql>, Relation, RelationType),
Join((String, String), (String, String), Vec<Wql>),
}
pub use select::{Algebra, Order};
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
pub enum ToSelect {
All,
Keys(Vec<String>),
}
pub type Entity = HashMap<String, Types>;
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
pub enum MatchCondition {
All(Vec<MatchCondition>),
Any(Vec<MatchCondition>),
Eq(String, Types),
NotEq(String, Types),
GEq(String, Types),
G(String, Types),
LEq(String, Types),
L(String, Types),
}
#[allow(clippy::redundant_pub_crate)]
pub(crate) fn tokenize(wql: &str) -> std::str::Chars {
wql.chars()
}
impl std::str::FromStr for Wql {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut tokens = tokenize(s.trim_start());
let wql = parse(tokens.next(), &mut tokens)?;
Ok(wql)
}
}
#[allow(clippy::redundant_pub_crate)]
pub(crate) fn parse(c: Option<char>, chars: &mut std::str::Chars) -> Result<Wql, String> {
c.map_or_else(
|| Err(String::from("Empty WQL")),
|ch| read_symbol(ch, chars),
)
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum Types {
Char(char),
Integer(isize),
String(String),
Uuid(Uuid),
Float(f64),
Boolean(bool),
Vector(Vec<Types>),
Map(HashMap<String, Types>),
Hash(String),
Precise(String),
DateTime(DateTime<Utc>),
Nil,
}
impl Types {
pub fn default_values(&self) -> Self {
match self {
Self::Char(_) => Self::Char(' '),
Self::Integer(_) => Self::Integer(0),
Self::String(_) => Self::String(String::new()),
Self::Uuid(_) => Self::Uuid(Uuid::new_v4()),
Self::Float(_) => Self::Float(0_f64),
Self::Boolean(_) => Self::Boolean(false),
Self::Vector(_) => Self::Vector(Vec::new()),
Self::Map(_) => Self::Map(HashMap::new()),
Self::Hash(_) => Self::Hash(String::new()),
Self::Precise(_) => Self::Precise(String::from("0")),
Self::DateTime(_) => Self::DateTime(Utc::now()),
Self::Nil => Self::Nil,
}
}
pub fn to_hash(&self, cost: Option<u32>) -> Result<Self, String> {
use bcrypt::{hash, DEFAULT_COST};
let value = match self {
Self::Char(c) => format!("{}", c),
Self::Integer(i) => format!("{}", i),
Self::String(s) => s.to_string(),
Self::DateTime(date) => date.to_string(),
Self::Uuid(id) => format!("{}", id),
Self::Float(f) => format!("{:?}", integer_decode(f.to_owned())),
Self::Boolean(b) => format!("{}", b),
Self::Vector(vec) => format!("{:?}", vec),
Self::Map(map) => format!("{:?}", map),
Self::Precise(p) => p.to_string(),
Self::Hash(_) => return Err(String::from("Hash cannot be hashed")),
Self::Nil => return Err(String::from("Nil cannot be hashed")),
};
match hash(value, cost.map_or(DEFAULT_COST, |c| c)) {
Ok(s) => Ok(Self::Hash(s)),
Err(e) => Err(format!("{:?}", e)),
}
}
pub const fn is_hash(&self) -> bool {
matches!(self, Self::Hash(_))
}
}
impl Eq for Types {}
impl PartialOrd for Types {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
match (self, other) {
(Self::Integer(a), Self::Integer(b)) => Some(a.cmp(b)),
(Self::Float(a), Self::Float(b)) => Some(if a > b {
Ordering::Greater
} else {
Ordering::Less
}),
(Self::Integer(a), Self::Float(b)) => Some(if &(*a as f64) > b {
Ordering::Greater
} else {
Ordering::Less
}),
(Self::Float(a), Self::Integer(b)) => Some(if a > &(*b as f64) {
Ordering::Greater
} else {
Ordering::Less
}),
(Self::Char(a), Self::Char(b)) => Some(a.cmp(b)),
(Self::String(a), Self::String(b)) | (Self::Precise(a), Self::Precise(b)) => {
Some(a.cmp(b))
}
(Self::Uuid(a), Self::Uuid(b)) => Some(a.cmp(b)),
(Self::Boolean(a), Self::Boolean(b)) => Some(a.cmp(b)),
(Self::Vector(a), Self::Vector(b)) => Some(a.len().cmp(&b.len())),
_ => None,
}
}
}
// UNSAFE
impl Hash for Types {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
match self {
Self::Char(t) => t.hash(state),
Self::Integer(t) => t.hash(state),
Self::String(t) => t.hash(state),
Self::Uuid(t) => t.hash(state),
Self::Float(t) => {
let int_t = integer_decode(t.to_owned());
int_t.hash(state)
}
Self::Boolean(t) => t.hash(state),
Self::Vector(t) => t.hash(state),
Self::Map(t) => t.iter().fold((), |acc, (k, v)| {
k.hash(state);
v.hash(state);
acc
}),
Self::Hash(t) => t.hash(state),
Self::Precise(t) => t.hash(state),
Self::DateTime(t) => t.hash(state),
Self::Nil => "".hash(state),
}
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/wql/src/where_clause.rs | wql/src/where_clause.rs | use std::str::FromStr;
use crate::{logic::parse_value, select::algebra_functions, ToSelect, Types, Wql};
use serde::{Deserialize, Serialize};
pub fn where_selector(
entity_name: String,
arg: ToSelect,
chars: &mut std::str::Chars,
) -> Result<Wql, String> {
let mut open = chars.skip_while(|c| c.is_whitespace()).take(1);
if open.next() != Some('{') {
return Err(String::from(
"WHERE clauses must be contained inside ` {...}`",
));
}
let mut clauses = Vec::new();
let mut clause = String::new();
loop {
match chars.next() {
Some(',') => {
clauses.push(clause);
clause = String::new();
}
Some('}') | None => break,
Some(c) => clause.push(c),
}
}
let clauses = clauses
.into_iter()
.filter(|c| !c.is_empty())
.map(|c| {
let mut chs = c.trim().chars();
set_clause(&entity_name, &mut chs)
})
.collect::<Vec<Clause>>();
if clauses.is_empty() {
return Err(String::from("WHERE clause cannot be empty"));
}
let next_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>()
.to_uppercase();
Ok(Wql::SelectWhere(
entity_name,
arg,
clauses,
algebra_functions(next_symbol, chars)?,
))
}
fn set_clause(entity_name: &str, chs: &mut std::str::Chars) -> Clause {
let c_str: String = chs
.skip_while(|c| c.is_whitespace())
.take_while(|c| c != &',')
.collect();
if c_str.starts_with("?*") {
clause_entity_definition(entity_name, &c_str)
} else if c_str.starts_with('(') && c_str.ends_with(')') {
clause_function(entity_name, &c_str[1..c_str.len() - 1])
} else {
Clause::Error
}
}
fn clause_function(entity_name: &str, clause: &str) -> Clause {
let args: Vec<&str> = clause
.split(' ')
.filter(|c| !c.is_empty())
.map(str::trim)
.collect();
if args.len() < 3 {
return Clause::Error;
}
match &args[0].to_lowercase()[..] {
">=" | ">" | "==" | "<=" | "<" | "like" => {
let mut chs = args[2].chars();
let function = Function::from_str(args[0]).unwrap();
if Function::Error == function {
Clause::Error
} else if let Ok(value) = parse_value(chs.next().unwrap(), &mut chs) {
Clause::SimpleComparisonFunction(function, args[1].to_string(), value)
} else {
Clause::Error
}
}
"in" | "between" => {
let function = Function::from_str(args[0]).unwrap();
let key = args[1].to_string();
let values = args[2..]
.iter()
.filter(|s| !s.is_empty())
.filter_map(|s| {
let mut chs = s.chars();
parse_value(chs.next().unwrap(), &mut chs).ok()
})
.collect::<Vec<Types>>();
if (Function::Between == function && values.len() != 2)
|| values.iter().any(|t| t == &Types::Nil)
{
Clause::Error
} else {
Clause::ComplexComparisonFunctions(function, key, values)
}
}
"or" => {
let clauses = or_clauses(entity_name, clause);
Clause::Or(Function::Or, clauses)
}
_ => Clause::Error,
}
}
fn or_clauses(entity_name: &str, clause: &str) -> Vec<Clause> {
let mut chars = clause[2..].chars();
let mut clauses = Vec::new();
let mut clause = String::new();
loop {
match chars.next() {
Some(',') => {
clauses.push(clause);
clause = String::new();
}
Some(')') => {
clause.push(')');
clauses.push(clause);
clause = String::new();
}
Some('(') => clause = String::from('('),
Some(c) => clause.push(c),
None => break,
}
}
clauses
.iter()
.filter(|c| !c.is_empty())
.map(|c| {
let mut chs = c.trim().chars();
set_clause(entity_name, &mut chs)
})
.collect::<Vec<Clause>>()
}
fn clause_entity_definition(entity_name: &str, clause: &str) -> Clause {
let elements = clause
.split(' ')
.filter(|c| !c.is_empty())
.map(str::trim)
.collect::<Vec<&str>>();
if elements.len() != 3 {
return Clause::Error;
}
let last_element = elements.last().unwrap();
let entity_key = elements[1].split(':').collect::<Vec<&str>>();
if entity_key.len() != 2 {
return Clause::Error;
}
let (entity, key) = (entity_key[0], entity_key[1]);
if entity != entity_name {
return Clause::Error;
}
let mut last = last_element.chars();
if last_element.starts_with('?') {
Clause::ValueAttribution(
entity.to_owned(),
key.to_owned(),
Value((*last_element).to_string()),
)
} else if let Ok(value) = parse_value(last.next().unwrap(), &mut last) {
Clause::ContainsKeyValue(entity.to_owned(), key.to_owned(), value)
} else {
Clause::Error
}
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
pub enum Clause {
ContainsKeyValue(String, String, Types),
ValueAttribution(String, String, Value),
SimpleComparisonFunction(Function, String, Types),
ComplexComparisonFunctions(Function, String, Vec<Types>),
Or(Function, Vec<Clause>),
Error,
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
pub enum Function {
Eq,
GEq,
G,
LEq,
L,
NotEq,
Like,
Between,
Or,
In,
Error,
}
impl FromStr for Function {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(match &s.to_lowercase()[..] {
"==" => Self::Eq,
">=" => Self::GEq,
">" => Self::G,
"<=" => Self::LEq,
"<" => Self::L,
"!=" | "<>" => Self::NotEq,
"like" => Self::Like,
"between" => Self::Between,
"in" => Self::In,
_ => Self::Error,
})
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct Value(pub String);
#[cfg(test)]
mod test {
use super::*;
use std::collections::HashMap;
#[test]
fn test_error_open() {
let mut chars = " [".chars();
let wql = where_selector("hello".to_string(), ToSelect::All, &mut chars);
assert_eq!(
wql.err(),
Some(String::from(
"WHERE clauses must be contained inside ` {...}`"
))
);
}
#[test]
fn simple_equality() {
let mut chars = " {
?* my_entity:name \"julia\",
?* my_entity:id 349875325,
}"
.chars();
let wql = where_selector("my_entity".to_string(), ToSelect::All, &mut chars);
assert_eq!(
wql.unwrap(),
Wql::SelectWhere(
"my_entity".to_string(),
ToSelect::All,
vec![
Clause::ContainsKeyValue(
"my_entity".to_string(),
"name".to_string(),
Types::String("julia".to_string())
),
Clause::ContainsKeyValue(
"my_entity".to_string(),
"id".to_string(),
Types::Integer(349875325)
),
],
HashMap::new()
)
)
}
#[test]
fn simple_comparison() {
let mut chars = " {
?* my_entity:age ?age,
(>= ?age 30),
(> ?age 30),
(== ?age 30),
(<= ?age 30),
(< ?age 30),
(like ?name \"%uli%\"),
}"
.chars();
let wql = where_selector("my_entity".to_string(), ToSelect::All, &mut chars);
assert_eq!(
wql.unwrap(),
Wql::SelectWhere(
"my_entity".to_string(),
ToSelect::All,
vec![
Clause::ValueAttribution(
"my_entity".to_string(),
"age".to_string(),
Value("?age".to_string())
),
Clause::SimpleComparisonFunction(
Function::GEq,
"?age".to_string(),
Types::Integer(30)
),
Clause::SimpleComparisonFunction(
Function::G,
"?age".to_string(),
Types::Integer(30)
),
Clause::SimpleComparisonFunction(
Function::Eq,
"?age".to_string(),
Types::Integer(30)
),
Clause::SimpleComparisonFunction(
Function::LEq,
"?age".to_string(),
Types::Integer(30)
),
Clause::SimpleComparisonFunction(
Function::L,
"?age".to_string(),
Types::Integer(30)
),
Clause::SimpleComparisonFunction(
Function::Like,
"?name".to_string(),
Types::String("%uli%".to_string())
),
],
HashMap::new()
)
)
}
#[test]
fn complex_comp_func() {
let mut chars = " {
(in ?id 32434 45345 345346436),
(between ?age 30 35),
}"
.chars();
let wql = where_selector("my_entity".to_string(), ToSelect::All, &mut chars);
assert_eq!(
wql.unwrap(),
Wql::SelectWhere(
"my_entity".to_string(),
ToSelect::All,
vec![
Clause::ComplexComparisonFunctions(
Function::In,
"?id".to_string(),
vec![
Types::Integer(32434),
Types::Integer(45345),
Types::Integer(345346436),
]
),
Clause::ComplexComparisonFunctions(
Function::Between,
"?age".to_string(),
vec![Types::Integer(30), Types::Integer(35)]
)
],
HashMap::new()
)
)
}
#[test]
fn between_err() {
let mut chars = " {
(between ?id 32434),
(between ?age 30 35 34),
}"
.chars();
let wql = where_selector("my_entity".to_string(), ToSelect::All, &mut chars);
assert_eq!(
wql.unwrap(),
Wql::SelectWhere(
"my_entity".to_string(),
ToSelect::All,
vec![Clause::Error, Clause::Error,],
HashMap::new()
)
)
}
#[test]
fn or() {
let mut chars = " {
?* my_entity:age ?age,
?* my_entity:name ?name,
(or
(>= ?age 30)
(like ?name \"%uli%\")
),
}"
.chars();
let wql = where_selector("my_entity".to_string(), ToSelect::All, &mut chars);
assert_eq!(
wql.unwrap(),
Wql::SelectWhere(
"my_entity".to_string(),
ToSelect::All,
vec![
Clause::ValueAttribution(
"my_entity".to_string(),
"age".to_string(),
Value("?age".to_string())
),
Clause::ValueAttribution(
"my_entity".to_string(),
"name".to_string(),
Value("?name".to_string())
),
Clause::Or(
Function::Or,
vec![
Clause::SimpleComparisonFunction(
Function::GEq,
"?age".to_string(),
Types::Integer(30)
),
Clause::SimpleComparisonFunction(
Function::Like,
"?name".to_string(),
Types::String("%uli%".to_string())
),
]
),
],
HashMap::new()
)
)
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/wql/src/relation.rs | wql/src/relation.rs | use serde::{Deserialize, Serialize};
use std::str::FromStr;
use crate::Wql;
const ERROR: &str = "Supported operations for INTERSECT and DIFFERECE are KEY for mathching keys and KEY_VALUE for matching key_values";
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
pub enum Relation {
Difference,
Intersect,
Union,
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
pub enum RelationType {
Key,
KeyValue,
}
impl FromStr for RelationType {
fn from_str(s: &str) -> Result<Self, String> {
match &s.to_uppercase()[..] {
"KEY" => Ok(Self::Key),
"KEY-VALUE" => Ok(Self::KeyValue),
_ => Err(String::from(ERROR)),
}
}
type Err = String;
}
const POSSIBLE_RELATION_TYPES: [&str; 2] = ["KEY", "KEY-VALUE"];
pub fn relation(chars: &mut std::str::Chars, relation: Relation) -> Result<Wql, String> {
let type_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>()
.to_uppercase();
if !POSSIBLE_RELATION_TYPES.contains(&&type_symbol[..]) {
return Err(String::from(ERROR));
}
let all: String = chars.collect();
let queries: Vec<&str> = all.split('|').collect();
if queries.len() != 2 {
return Err(String::from(
"Intersect and difference should have exactly 2 queries",
));
}
let queries = queries
.into_iter()
.map(Wql::from_str)
.collect::<Result<Vec<Wql>, String>>()?;
let queries = queries.into_iter()
.map(|q| {
match &q {
Wql::Select(_, _, Some(_), hm) if hm.is_empty()
=> Ok(q),
Wql::SelectWhen(_, _, Some(_), _)
=> Ok(q),
_ => Err(String::from("Only single value queries are allowed, so key `ID` is required and keys `WHEN AT` are optional"))
}
})
.collect::<Result<Vec<Wql>, String>>()?;
let operation = RelationType::from_str(&type_symbol)?;
Ok(Wql::RelationQuery(queries, relation, operation))
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/wql/src/language_parser.rs | wql/src/language_parser.rs | use crate::{
join::join,
logic::{read_args, read_map_as_str},
relation::{relation, Relation},
select::{select_all, select_args},
};
use super::{read_map, read_match_args, FromStr, MatchCondition, Uuid, Wql};
#[allow(clippy::redundant_pub_crate)]
pub(crate) fn read_symbol(a: char, chars: &mut std::str::Chars) -> Result<Wql, String> {
let symbol = chars.take_while(|c| !c.is_whitespace()).collect::<String>();
match (a, &symbol.to_uppercase()[..]) {
('c', "REATE") | ('C', "REATE") => create_entity(chars),
('i', "NSERT") | ('I', "NSERT") => insert(chars),
('u', "PDATE") | ('U', "PDATE") => update(chars),
('d', "ELETE") | ('D', "ELETE") => delete(chars),
('m', "ATCH") | ('M', "ATCH") => match_update(chars),
('e', "VICT") | ('E', "VICT") => evict(chars),
('s', "ELECT") | ('S', "ELECT") => select(chars),
('c', "HECK") | ('C', "HECK") => check(chars),
('i', "NTERSECT") | ('I', "NTERSECT") => relation(chars, Relation::Intersect),
('d', "IFFERENCE") | ('D', "IFFERENCE") => relation(chars, Relation::Difference),
('u', "NION") | ('U', "NION") => relation(chars, Relation::Union),
('j', "OIN") | ('J', "OIN") => join(chars),
_ => Err(format!("Symbol `{}{}` not implemented", a, symbol)),
}
}
fn create_entity(chars: &mut std::str::Chars) -> Result<Wql, String> {
let entity_symbol = chars.take_while(|c| !c.is_whitespace()).collect::<String>();
if entity_symbol.to_uppercase() != "ENTITY" {
return Err(String::from("Keyword ENTITY is required for CREATE"));
}
let entity_name = chars
.take_while(|c| c.is_alphanumeric() || c == &'_')
.collect::<String>()
.trim()
.to_string();
let next_symbol = chars.take_while(|c| !c.is_whitespace()).collect::<String>();
if next_symbol.to_uppercase() == "UNIQUES" {
let (uniques, encrypts) = create_uniques_and_encrypts(chars, "ENCRYPT")?;
Ok(Wql::CreateEntity(entity_name, uniques, encrypts))
} else if next_symbol.to_uppercase() == "ENCRYPT" {
let (encrypts, uniques) = create_uniques_and_encrypts(chars, "UNIQUES")?;
Ok(Wql::CreateEntity(entity_name, uniques, encrypts))
} else if next_symbol.to_uppercase() == "ENCRYPTS" {
Err(String::from("Correct wording is ENCRYPT"))
} else if next_symbol.to_uppercase() == "UNIQUE" {
Err(String::from("Correct wording is UNIQUES"))
} else {
Ok(Wql::CreateEntity(entity_name, Vec::new(), Vec::new()))
}
}
fn create_uniques_and_encrypts(
chars: &mut std::str::Chars,
next_element: &str,
) -> Result<(Vec<String>, Vec<String>), String> {
let mut aux_vec = Vec::new();
if chars.next() != Some('#') {
return Err(String::from(
"Arguments set should start with `#{` and end with `}`",
));
}
let main_vec = read_args(chars)?;
let encrypt_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>();
if encrypt_symbol.to_uppercase() == next_element {
if chars.next() != Some('#') {
return Err(String::from(
"Arguments set should start with `#{` and end with `}`",
));
}
aux_vec = read_args(chars)?;
}
if aux_vec.iter().any(|e| main_vec.contains(e)) {
return Err(String::from("Encrypted arguments cannot be set to UNIQUE"));
}
Ok((main_vec, aux_vec))
}
fn select(chars: &mut std::str::Chars) -> Result<Wql, String> {
loop {
match chars.next() {
Some(' ') => (),
Some('*') => return select_all(chars),
Some('#') => return select_args(chars),
_ => return Err(String::from("SELECT expression should be followed by `*` for ALL keys or `#{key_names...}` for some keys"))
}
}
}
fn delete(chars: &mut std::str::Chars) -> Result<Wql, String> {
let entity_id = chars
.take_while(|c| c.is_alphanumeric() || c == &'-')
.collect::<String>()
.trim()
.to_string();
if entity_id.is_empty() || entity_id == "FROM" {
return Err(String::from("Entity UUID is required for DELETE"));
}
let entity_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>();
if entity_symbol.to_uppercase() != "FROM" {
return Err(String::from("Keyword FROM is required for DELETE"));
}
let entity_name = chars
.take_while(|c| c.is_alphanumeric() || c == &'_')
.collect::<String>()
.trim()
.to_string();
if entity_name.is_empty() {
return Err(String::from("Entity name is required after FROM"));
}
Ok(Wql::Delete(entity_name, entity_id))
}
fn insert(chars: &mut std::str::Chars) -> Result<Wql, String> {
let entity_map = read_map(chars)?;
let entity_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>();
if entity_symbol.to_uppercase() != "INTO" {
return Err(String::from("Keyword INTO is required for INSERT"));
}
let entity_name = chars
.take_while(|c| c.is_alphanumeric() || c == &'_')
.collect::<String>()
.trim()
.to_string();
if entity_name.is_empty() {
return Err(String::from("Entity name is required after INTO"));
}
let with_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>();
if with_symbol.is_empty() {
Ok(Wql::Insert(entity_name, entity_map, None))
} else if with_symbol.to_uppercase() != "WITH" {
Err(String::from(
"Keyword WITH is required for INSERT with Uuid",
))
} else {
let entity_id = chars
.take_while(|c| c.is_alphanumeric() || c == &'-')
.collect::<String>()
.trim()
.to_string();
if entity_id.is_empty() {
return Err(String::from("Entity UUID is required for INSERT WITH id"));
}
Ok(Wql::Insert(
entity_name,
entity_map,
Uuid::parse_str(&entity_id).ok(),
))
}
}
fn check(chars: &mut std::str::Chars) -> Result<Wql, String> {
let entity_map = read_map_as_str(chars)?;
let entity_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>();
if entity_symbol.to_uppercase() != "FROM" {
return Err(String::from("Keyword FROM is required for CHECK"));
}
let entity_name = chars
.take_while(|c| c.is_alphanumeric() || c == &'_')
.collect::<String>()
.trim()
.to_string();
if entity_name.is_empty() {
return Err(String::from("Entity name is required after FROM"));
}
let id_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>();
if id_symbol.to_uppercase() != "ID" {
return Err(String::from("Keyword FROM is required for CHECK"));
}
let entity_id = chars
.take_while(|c| c.is_alphanumeric() || c == &'-')
.collect::<String>()
.trim()
.to_owned();
let id = Uuid::from_str(&entity_id).map_err(|e| format!("{:?}", e))?;
Ok(Wql::CheckValue(entity_name, id, entity_map))
}
fn update(chars: &mut std::str::Chars) -> Result<Wql, String> {
let entity_name = chars
.take_while(|c| c.is_alphanumeric() || c == &'_')
.collect::<String>()
.trim()
.to_string();
if entity_name.is_empty() {
return Err(String::from("Entity name is required for UPDATE"));
};
let entity_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>();
if entity_symbol.to_uppercase() != "SET" && entity_symbol.to_uppercase() != "CONTENT" {
return Err(String::from(
"UPDATE type is required after entity. Keywords are SET or CONTENT",
));
};
let entity_map = read_map(chars)?;
let into_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>();
if into_symbol.to_uppercase() != "INTO" {
return Err(String::from("Keyword INTO is required for UPDATE"));
};
let uuid_str = chars
.take_while(|c| c.is_alphanumeric() || c == &'-')
.collect::<String>()
.trim()
.to_string();
let uuid = Uuid::from_str(&uuid_str)
.map_err(|e| format!("Couldn't create uuid from {}. Error: {:?}", uuid_str, e))?;
match &entity_symbol.to_uppercase()[..] {
"SET" => Ok(Wql::UpdateSet(entity_name, entity_map, uuid)),
"CONTENT" => Ok(Wql::UpdateContent(entity_name, entity_map, uuid)),
_ => Err("Couldn't parse UPDATE query".to_string()),
}
}
fn match_update(chars: &mut std::str::Chars) -> Result<Wql, String> {
let match_arg_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| c.is_alphabetic())
.collect::<String>();
if &match_arg_symbol.to_uppercase() != "ALL" && &match_arg_symbol.to_uppercase() != "ANY" {
return Err(String::from("MATCH requires ALL or ANY symbols"));
}
let logical_args = read_match_args(chars)?;
let match_args = if match_arg_symbol.to_uppercase().eq("ALL") {
Ok(MatchCondition::All(logical_args))
} else if match_arg_symbol.to_uppercase().eq("ANY") {
Ok(MatchCondition::Any(logical_args))
} else {
Err(String::from("MATCH requires ALL or ANY symbols"))
};
let update_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| c.is_alphabetic())
.collect::<String>();
if update_symbol.to_uppercase() != "UPDATE" {
return Err(String::from("UPDATE keyword is required for MATCH UPDATE"));
};
let entity_name = chars
.take_while(|c| c.is_alphanumeric() || c == &'_')
.collect::<String>()
.trim()
.to_string();
if entity_name.is_empty() {
return Err(String::from("Entity name is required for MATCH UPDATE"));
};
let entity_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>();
if entity_symbol.to_uppercase() != "SET" {
return Err(String::from(
"MATCH UPDATE type is required after entity. Keyword is SET",
));
};
let entity_map = read_map(chars)?;
let into_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>();
if into_symbol.to_uppercase() != "INTO" {
return Err(String::from("Keyword INTO is required for MATCH UPDATE"));
};
let uuid_str = chars
.take_while(|c| c.is_alphanumeric() || c == &'-')
.collect::<String>()
.trim()
.to_string();
let uuid = Uuid::from_str(&uuid_str)
.map_err(|e| format!("Couldn't create uuid from {}, Error: {:?}", uuid_str, e))?;
match &entity_symbol.to_uppercase()[..] {
"SET" => Ok(Wql::MatchUpdate(entity_name, entity_map, uuid, match_args?)),
_ => Err("Couldn't parse MATCH UPDATE query".to_string()),
}
}
fn evict(chars: &mut std::str::Chars) -> Result<Wql, String> {
let info = chars
.take_while(|c| c.is_alphanumeric() || c == &'-' || c == &'_')
.collect::<String>()
.trim()
.to_string();
let uuid = Uuid::from_str(&info);
if uuid.is_err() {
if info.chars().any(|c| c == '-') {
return Err("Entity name cannot contain `-`".to_string());
}
Ok(Wql::Evict(info, None))
} else {
let from_symbol = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>()
.trim()
.to_string();
if from_symbol.to_uppercase() != "FROM" {
return Err(String::from("Keyword FROM is required to EVICT an UUID"));
}
let name = chars
.take_while(|c| c.is_alphanumeric() || c == &'_')
.collect::<String>()
.trim()
.to_string();
if name.is_empty() {
return Err(String::from("Entity name is required for EVICT"));
}
Ok(Wql::Evict(name, uuid.ok()))
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/wql/src/join.rs | wql/src/join.rs | use std::str::FromStr;
use crate::Wql;
pub fn join(chars: &mut std::str::Chars) -> Result<Wql, String> {
let mut entity_a = (String::new(), String::new());
let mut entity_b = (String::new(), String::new());
let mut ent = String::new();
let mut key = String::new();
let mut is_entity = true;
loop {
match chars.next() {
Some(' ') | Some('(') => (),
Some(c) if c.is_alphanumeric() || c == '_' => {
if is_entity {
ent.push(c);
} else {
key.push(c);
}
}
Some(':') => {
is_entity = false;
if entity_a.0.is_empty() {
entity_a.0 = ent;
} else {
entity_b.0 = ent;
}
ent = String::new();
}
Some(',') => {
is_entity = true;
if entity_a.1.is_empty() {
entity_a.1 = key;
} else {
entity_b.1 = key;
}
key = String::new();
}
Some(')') => {
entity_b.1 = key;
break;
}
_ => return Err(String::from("Invalid char for Join")),
}
}
let queries = chars
.skip_while(|c| c == &'(' || c.is_whitespace())
.take_while(|c| c != &')')
.collect::<String>();
let queries = queries.split('|').collect::<Vec<&str>>();
if queries.len() != 2 {
return Err(String::from("Join can only support 2 select queries"));
} else if !queries[0].contains(&entity_a.0) {
return Err(format!(
"{} must be present as entity tree key in `SELECT * FROM {}`",
entity_a.0, queries[0]
));
} else if !queries[1].contains(&entity_b.0) {
return Err(format!(
"{} must be present as entity tree key in `SELECT * FROM {}`",
entity_b.0, queries[1]
));
}
let queries_wql = queries
.into_iter()
.map(Wql::from_str)
.collect::<Result<Vec<Wql>, String>>()?;
// WITH clause
Ok(Wql::Join(entity_a, entity_b, queries_wql))
}
#[cfg(test)]
mod test {
use crate::{ToSelect, Wql};
use std::collections::HashMap;
use std::str::FromStr;
#[test]
fn test_join() {
let wql = Wql::from_str(
"JOIN (entity_A:c, entity_B:c) Select * FROM entity_A | Select * FROM entity_B",
);
assert_eq!(
wql.unwrap(),
Wql::Join(
("entity_A".to_string(), "c".to_string()),
("entity_B".to_string(), "c".to_string()),
vec![
Wql::Select("entity_A".to_string(), ToSelect::All, None, HashMap::new()),
Wql::Select("entity_B".to_string(), ToSelect::All, None, HashMap::new())
]
)
)
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/wql/src/logic.rs | wql/src/logic.rs | use chrono::{DateTime, Utc};
use uuid::Uuid;
use super::{FromStr, HashMap, MatchCondition, Types};
#[allow(clippy::redundant_pub_crate)]
pub(crate) fn read_match_args(chars: &mut std::str::Chars) -> Result<Vec<MatchCondition>, String> {
let base = chars
.skip_while(|c| c == &'(' || c.is_whitespace())
.take_while(|c| c != &')')
.collect::<String>()
.trim()
.to_string();
let mut conditions: Vec<MatchCondition> = Vec::new();
base.split(',')
.filter(|l| !l.is_empty())
.map(|l| {
let k = l
.split(' ')
.filter(|f| !f.is_empty())
.collect::<Vec<&str>>();
let mut c = k[2].chars();
match k.get(1) {
Some(&"==") => Ok(MatchCondition::Eq(
k[0].to_string(),
parse_value(
c.next()
.ok_or_else(|| String::from("Not able to parse match argument"))?,
&mut c,
)?,
)),
Some(&"!=") => Ok(MatchCondition::NotEq(
k[0].to_string(),
parse_value(
c.next()
.ok_or_else(|| String::from("Not able to parse match argument"))?,
&mut c,
)?,
)),
Some(&">=") => Ok(MatchCondition::GEq(
k[0].to_string(),
parse_value(
c.next()
.ok_or_else(|| String::from("Not able to parse match argument"))?,
&mut c,
)?,
)),
Some(&"<=") => Ok(MatchCondition::LEq(
k[0].to_string(),
parse_value(
c.next()
.ok_or_else(|| String::from("Not able to parse match argument"))?,
&mut c,
)?,
)),
Some(&">") => Ok(MatchCondition::G(
k[0].to_string(),
parse_value(
c.next()
.ok_or_else(|| String::from("Not able to parse match argument"))?,
&mut c,
)?,
)),
Some(&"<") => Ok(MatchCondition::L(
k[0].to_string(),
parse_value(
c.next()
.ok_or_else(|| String::from("Not able to parse match argument"))?,
&mut c,
)?,
)),
_ => Err(String::from("Unidentified Match Condition")),
}
})
.try_for_each(|e: Result<MatchCondition, String>| {
conditions.push(e?);
Ok::<(), String>(())
})?;
Ok(conditions)
}
#[allow(clippy::redundant_pub_crate)]
pub(crate) fn read_map(chars: &mut std::str::Chars) -> Result<HashMap<String, Types>, String> {
let mut res: HashMap<String, Types> = HashMap::new();
let mut key: Option<String> = None;
let mut val: Option<Types> = None;
loop {
match chars.next() {
Some(' ') => (),
Some('{') => break,
_ => {
return Err(String::from(
"Entity map should start with `{` and end with `}`",
))
}
}
}
loop {
match chars.next() {
Some('}') => return Ok(res),
Some('{') => {
if key.is_some() {
val = Some(Types::Map(read_inner_map(chars)?));
} else {
return Err(String::from("Key must be an alphanumeric value"));
}
}
Some('[') => {
if key.is_some() {
val = Some(Types::Vector(read_vec(chars)?));
} else {
return Err(String::from("Key must be an alphanumeric value"));
}
}
Some(c) if !c.is_whitespace() && c != ',' => {
if key.is_some() {
val = Some(parse_value(c, chars)?);
} else {
key = Some(parse_key(c, chars));
}
}
Some(c) if c.is_whitespace() || c == ',' => (),
_ => return Err(String::from("Entity HashMap could not be created")),
}
if key.is_some() && val.is_some() {
res.insert(key.unwrap().to_string(), val.unwrap());
key = None;
val = None;
}
}
}
#[allow(clippy::redundant_pub_crate)]
pub(crate) fn read_map_as_str(
chars: &mut std::str::Chars,
) -> Result<HashMap<String, String>, String> {
let mut res: HashMap<String, String> = HashMap::new();
let mut key: Option<String> = None;
let mut val: Option<String> = None;
loop {
match chars.next() {
Some(' ') => (),
Some('{') => break,
_ => {
return Err(String::from(
"Entity map should start with `{` and end with `}`",
))
}
}
}
loop {
match chars.next() {
Some('}') => return Ok(res),
Some(c) if !c.is_whitespace() && c != ',' => {
if key.is_some() {
val = Some(parse_str_value(c, chars));
} else {
key = Some(parse_key(c, chars));
}
}
Some(c) if c.is_whitespace() || c == ',' => (),
_ => return Err(String::from("Entity HashMap could not be created")),
}
if key.is_some() && val.is_some() {
res.insert(key.unwrap().to_string(), val.unwrap());
key = None;
val = None;
}
}
}
#[allow(clippy::redundant_pub_crate)]
pub(crate) fn read_inner_map(
chars: &mut std::str::Chars,
) -> Result<HashMap<String, Types>, String> {
let mut res: HashMap<String, Types> = HashMap::new();
let mut key: Option<String> = None;
let mut val: Option<Types> = None;
loop {
match chars.next() {
Some('}') => return Ok(res),
Some('{') => {
if key.is_some() {
val = Some(Types::Map(read_inner_map(chars)?));
} else {
return Err(String::from("Key must be an alphanumeric value"));
}
}
Some('[') => {
if key.is_some() {
val = Some(Types::Vector(read_vec(chars)?));
} else {
return Err(String::from("Key must be an alphanumeric value"));
}
}
Some(c) if !c.is_whitespace() && c != ',' => {
if key.is_some() {
val = Some(parse_value(c, chars)?);
} else {
key = Some(parse_key(c, chars));
}
}
Some(c) if c.is_whitespace() || c == ',' => (),
_ => return Err(String::from("Entity HashMap could not be created")),
}
if key.is_some() && val.is_some() {
res.insert(key.unwrap().to_string(), val.unwrap());
key = None;
val = None;
}
}
}
fn read_vec(chars: &mut std::str::Chars) -> Result<Vec<Types>, String> {
let mut res: Vec<Types> = vec![];
loop {
match chars.next() {
Some(']') => return Ok(res),
Some('[') => res.push(Types::Vector(read_vec(chars)?)),
Some('{') => res.push(Types::Map(read_inner_map(chars)?)),
Some(c) if !c.is_whitespace() && c != ',' => {
res.push(parse_value(c, chars)?);
}
Some(c) if c.is_whitespace() || c == ',' => (),
err => return Err(format!("{:?} could not be parsed at char", err)),
}
}
}
#[allow(clippy::redundant_pub_crate)]
pub(crate) fn read_select_args(chars: &mut std::str::Chars) -> Result<Vec<String>, String> {
let mut res = Vec::new();
if chars.next() != Some('{') {
return Err(String::from(
"SELECT arguments set should start with `#{` and end with `}`",
));
}
loop {
match chars.next() {
Some('}') => return Ok(res),
Some(c) if !c.is_whitespace() && c != ',' => {
let key_rest = chars
.take_while(|c| c.is_alphanumeric() || c == &'_')
.collect::<String>();
let key = format!("{}{}", c, key_rest);
res.push(key);
}
Some(c) if c.is_whitespace() || c == ',' => (),
err => return Err(format!("{:?} could not be parsed at char", err)),
}
}
}
#[allow(clippy::redundant_pub_crate)]
pub(crate) fn read_args(chars: &mut std::str::Chars) -> Result<Vec<String>, String> {
let mut res = Vec::new();
if chars.next() != Some('{') {
return Err(String::from(
"Arguments set should start with `#{` and end with `}`",
));
}
loop {
match chars.next() {
Some('}') => return Ok(res),
Some(c) if !c.is_whitespace() && c != ',' => {
let key_rest = chars
.skip_while(|c| c.is_whitespace())
.take_while(|c| c.is_alphanumeric() || c == &'_')
.collect::<String>()
.trim()
.to_owned();
let key = format!("{}{}", c, key_rest);
res.push(key);
}
Some(c) if c.is_whitespace() || c == ',' => (),
err => return Err(format!("{:?} could not be parsed at char", err)),
}
}
}
#[allow(clippy::redundant_pub_crate)]
pub(crate) fn parse_key(c: char, chars: &mut std::str::Chars) -> String {
let key_rest = chars
.take_while(|c| c.is_alphanumeric() || c == &'_')
.collect::<String>();
format!("{}{}", c, key_rest)
}
pub fn parse_value(c: char, chars: &mut std::str::Chars) -> Result<Types, String> {
if c == '"' {
return read_str(chars);
}
let value = format!(
"{}{}",
c,
chars
.take_while(|c| !c.is_whitespace() && c != &',')
.collect::<String>()
);
if value.ends_with('P') && value[..value.len() - 1].parse::<f64>().is_ok() {
Ok(Types::Precise(value[..value.len() - 1].to_string()))
} else if value.parse::<isize>().is_ok() {
Ok(Types::Integer(value.parse().unwrap()))
} else if value.parse::<f64>().is_ok() {
Ok(Types::Float(value.parse().unwrap()))
} else if uuid::Uuid::from_str(&value).is_ok() {
Ok(Types::Uuid(uuid::Uuid::from_str(&value).unwrap()))
} else if value.parse::<bool>().is_ok() {
Ok(Types::Boolean(value.parse().unwrap()))
} else if &value.to_lowercase() == "nil" {
Ok(Types::Nil)
} else if value.starts_with('\'') && value.ends_with('\'') && value.len() == 3 {
Ok(Types::Char(value.chars().nth(1).unwrap()))
} else if value.parse::<DateTime<Utc>>().is_ok() {
Ok(Types::DateTime(value.parse::<DateTime<Utc>>().unwrap()))
} else {
Err(format!("Value Type could not be created from {}", value))
}
}
#[allow(clippy::redundant_pub_crate)]
pub(crate) fn parse_str_value(c: char, chars: &mut std::str::Chars) -> String {
format!(
"{}{}",
c,
chars
.take_while(|c| !c.is_whitespace() && c != &',')
.collect::<String>()
)
.replace('\"', "")
}
#[allow(clippy::redundant_pub_crate)]
pub(crate) fn read_str(chars: &mut std::str::Chars) -> Result<Types, String> {
let result = chars.try_fold((false, String::new()), |(last_was_escape, mut s), c| {
if last_was_escape {
// Supported escape characters, per https://github.com/edn-format/edn#strings
match c {
't' => s.push('\t'),
'r' => s.push('\r'),
'n' => s.push('\n'),
'\\' => s.push('\\'),
'\"' => s.push('\"'),
_ => return Err(Err(format!("Invalid escape sequence \\{}", c))),
};
Ok((false, s))
} else if c == '\"' {
// Unescaped quote means we're done
Err(Ok(s))
} else if c == '\\' {
Ok((true, s))
} else {
s.push(c);
Ok((false, s))
}
});
match result {
// An Ok means we actually finished parsing *without* seeing the end of the string, so that's
// an error.
Ok(_) => Err("Unterminated string".to_string()),
Err(Err(e)) => Err(e),
Err(Ok(string)) => Ok(Types::String(string)),
}
}
#[allow(clippy::redundant_pub_crate)]
pub(crate) fn read_uuids(chars: &mut std::str::Chars) -> Result<Vec<Uuid>, String> {
let mut uuids = Vec::new();
let mut uuid = String::new();
loop {
match chars.next() {
Some(' ') | Some('#') | Some('{') => (),
Some(l) if l.is_alphanumeric() => uuid.push(l),
Some(dash) if dash == '-' => uuid.push(dash),
Some(',') => {
uuids.push(Uuid::from_str(&uuid).map_err(|e| {
format!("Couldn't creat an Uuid from {:?}. Error {:?}", uuid, e)
})?);
uuid = String::new();
}
Some('}') => return Ok(uuids),
_ => {
return Err(String::from(
"Uuids in `IDS IN` are reuired to be inside a `#{` and `}`",
))
}
}
}
}
// UNSAFE
#[allow(clippy::redundant_pub_crate)]
pub(crate) fn integer_decode(val: f64) -> u64 {
val.to_bits()
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/main.rs | woori-db/src/main.rs | use actix_web::{
middleware::{DefaultHeaders, Logger},
web, App, HttpResponse, HttpServer,
};
mod actors;
#[allow(dead_code)]
mod auth;
mod controllers;
mod core;
mod http;
mod io;
mod model;
mod repository;
mod schemas;
use http::{ping, readiness, routes};
#[actix_web::main]
async fn main() -> std::io::Result<()> {
std::env::set_var("RUST_LOG", "actix_web=info");
env_logger::init();
let env_port = std::env::var("PORT").unwrap_or_else(|_| "1438".to_owned());
let port = env_port.parse::<u16>().expect("PORT must be a u16");
let addr = format!("0.0.0.0:{}", port);
let env_max_connections =
std::env::var("MAX_CONNECTIONS").unwrap_or_else(|_| "1000".to_owned());
let max_connections = env_max_connections
.parse::<usize>()
.expect("MAX_CONNECTIONS must be a usize");
let env_client_shutdown =
std::env::var("CLIENT_SHUTDOWN").unwrap_or_else(|_| "5000".to_owned());
let client_shutdown = env_client_shutdown
.parse::<u64>()
.expect("CLIENT_SHUTDOWN must be a u64");
HttpServer::new(move || {
App::new()
.wrap(Logger::default())
.wrap(DefaultHeaders::new().header("x-request-id", uuid::Uuid::new_v4().to_string()))
.wrap(Logger::new("IP:%a DATETIME:%t REQUEST:\"%r\" STATUS: %s DURATION:%T X-REQUEST-ID:%{x-request-id}o"))
.service(ping)
.service(readiness)
.configure(routes)
.route("", web::get().to(HttpResponse::NotFound))
})
.max_connections(max_connections)
.client_shutdown(client_shutdown)
.workers(1)
.bind(addr)?
.run()
.await
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/auth/middlewares.rs | woori-db/src/auth/middlewares.rs | use crate::repository::local::{SessionContext, SessionInfo};
use actix_web::{dev::ServiceRequest, web, Error};
use actix_web_httpauth::extractors::bearer::BearerAuth;
use std::{
collections::BTreeMap,
sync::{Arc, Mutex},
};
use super::schemas::Role;
pub async fn wql_validator(
req: ServiceRequest,
credentials: BearerAuth,
) -> Result<ServiceRequest, Error> {
if req.path().starts_with("/wql/tx") {
let allow = req
.app_data::<web::Data<Arc<Mutex<SessionContext>>>>()
.and_then(|db| {
validate_token(
&db,
Some(credentials.token()),
vec![Role::Write, Role::User],
)
});
if let Some(true) = allow {
Ok(req)
} else {
Err(crate::model::error::Error::AuthorizationBadRequest.into())
}
} else if req.path().starts_with("/wql/query") {
let allow = req
.app_data::<web::Data<Arc<Mutex<SessionContext>>>>()
.and_then(|db| {
validate_token(&db, Some(credentials.token()), vec![Role::Read, Role::User])
});
if let Some(true) = allow {
Ok(req)
} else {
Err(crate::model::error::Error::AuthorizationBadRequest.into())
}
} else {
Ok(req)
}
}
pub async fn history_validator(
req: ServiceRequest,
credentials: BearerAuth,
) -> Result<ServiceRequest, Error> {
if req.path().starts_with("/entity-history") {
let allow = req
.app_data::<web::Data<Arc<Mutex<SessionContext>>>>()
.and_then(|db| {
validate_token(
&db,
Some(credentials.token()),
vec![Role::History, Role::User],
)
});
if let Some(true) = allow {
Ok(req)
} else {
Err(crate::model::error::Error::AuthorizationBadRequest.into())
}
} else {
Ok(req)
}
}
fn validate_token(
db: &Arc<Mutex<BTreeMap<String, SessionInfo>>>,
token: Option<&str>,
roles: Vec<Role>,
) -> Option<bool> {
if let (Ok(data), Some(t)) = (db.lock(), token) {
if let Some(session) = data.get(t) {
Some(session.is_valid_date() && session.is_valid_role(roles))
} else {
None
}
} else {
None
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/auth/io.rs | woori-db/src/auth/io.rs | use std::{
fs::OpenOptions,
io::{BufRead, BufReader, Seek, SeekFrom, Write},
};
use crate::model::error::Error;
use bcrypt::{hash, DEFAULT_COST};
use chrono::{DateTime, Utc};
use ron::from_str;
use uuid::Uuid;
use super::models::{AdminInfo, User, UserRegistry};
use super::schemas;
pub fn read_admin_info() -> Result<AdminInfo, Error> {
#[cfg(test)]
let admin = std::env::var("ADMIN").unwrap_or("your_admin".to_string());
#[cfg(not(test))]
let admin = std::env::var("ADMIN").map_err(|_| Error::AdminNotConfigured)?;
#[cfg(test)]
let admin_password = std::env::var("ADMIN_PASSWORD").unwrap_or("your_password".to_string());
#[cfg(not(test))]
let admin_password = std::env::var("ADMIN_PASSWORD").map_err(|_| Error::AdminNotConfigured)?;
#[cfg(test)]
let auth_hashing_cost = std::env::var("AUTH_HASHING_COST").unwrap_or("4".to_string());
#[cfg(not(test))]
let auth_hashing_cost =
std::env::var("AUTH_HASHING_COST").map_err(|_| Error::AdminNotConfigured)?;
let cost = auth_hashing_cost.parse::<u32>().unwrap_or(DEFAULT_COST);
let pswd_hash = match hash(&admin_password, cost) {
Ok(hash) => hash,
Err(_) => return Err(Error::AdminNotConfigured),
};
Ok(AdminInfo::new(admin, pswd_hash, cost))
}
pub fn to_users_log(user: &User) -> Result<(), Error> {
let utc: DateTime<Utc> = Utc::now();
let users_info_log = "data/users_info.log";
let mut file = OpenOptions::new()
.append(true)
.create(true)
.open(users_info_log)?;
let log = user.format_user_log(utc)?;
file.write_all(log.as_bytes())?;
Ok(())
}
pub fn remove_users_from_log(users: &[Uuid]) -> Result<(), Error> {
let users_info_log = "data/users_info.log";
let file = OpenOptions::new().read(true).open(users_info_log)?;
let lines = BufReader::new(file)
.lines()
.filter(|line| {
if line.is_ok() {
!users
.iter()
.any(|user| line.as_ref().unwrap().contains(&user.to_string()))
} else {
false
}
})
.filter_map(Result::ok)
.collect::<Vec<String>>()
.join("\r\n");
// Improve this, OpenOptions is not overwriting this file
std::fs::remove_file(users_info_log)?;
let mut file = OpenOptions::new()
.create(true)
.write(true)
.open(users_info_log)?;
file.seek(SeekFrom::Start(0))?;
file.write_all(lines.as_bytes())?;
Ok(())
}
pub async fn find_user(user: schemas::User) -> Result<UserRegistry, Error> {
let users_info_log = "data/users_info.log";
let file = OpenOptions::new().read(true).open(users_info_log)?;
let buffer = BufReader::new(file);
let uuid = user.id;
let user_content = buffer
.lines()
.find(|l| (l.as_ref().unwrap_or(&String::new())).contains(&uuid.to_string()))
.ok_or(Error::Unknown)??;
let user: Result<UserRegistry, Error> = match from_str(&user_content) {
Ok(u) => Ok(u),
Err(_) => Err(Error::Unknown),
};
user
}
#[cfg(test)]
pub fn assert_users_content(pat: &str) {
use chrono::prelude::*;
use std::io::Read;
let utc: DateTime<Utc> = Utc::now();
let user_log = utc.format("data/users_info.log").to_string();
let mut file = OpenOptions::new().read(true).open(user_log).unwrap();
let mut s = String::new();
file.read_to_string(&mut s).unwrap();
assert!(s.contains(pat));
}
#[cfg(test)]
pub fn assert_users_not_content(pat: &str) {
use chrono::prelude::*;
use std::io::Read;
let utc: DateTime<Utc> = Utc::now();
let user_log = utc.format("data/users_info.log").to_string();
let mut file = OpenOptions::new().read(true).open(user_log).unwrap();
let mut s = String::new();
file.read_to_string(&mut s).unwrap();
assert!(!s.contains(pat));
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/auth/mod.rs | woori-db/src/auth/mod.rs | pub(crate) mod controllers;
pub(crate) mod io;
#[cfg(not(debug_assertions))]
pub(crate) mod middlewares;
pub(crate) mod models;
pub(crate) mod schemas;
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/auth/controllers.rs | woori-db/src/auth/controllers.rs | use std::sync::{Arc, Mutex};
use actix_web::{web, HttpResponse, Responder};
use bcrypt::hash;
use chrono::Utc;
use ron::de::from_str;
use uuid::Uuid;
use crate::{
core::pretty_config_output,
model::{
error::{error_to_http, Error},
DataI64,
},
repository::local::{SessionContext, SessionInfo},
};
use super::{
io,
models::{AdminInfo, User},
schemas::{CreateUserWithAdmin, DeleteUsersWithAdmin, UserId},
};
pub async fn create_user(body: String, admin: web::Data<AdminInfo>) -> impl Responder {
match create_user_controller(body, admin).await {
Err(e) => error_to_http(&e),
Ok(body) => {
#[cfg(feature = "json")]
match serde_json::to_string(&body) {
Ok(ron) => HttpResponse::Created().body(ron),
Err(_) => {
HttpResponse::ServiceUnavailable().body(Error::FailedToCreateUser.to_string())
}
}
#[cfg(not(feature = "json"))]
match ron::ser::to_string_pretty(&body, pretty_config_output()) {
Ok(ron) => HttpResponse::Created().body(ron),
Err(_) => {
HttpResponse::ServiceUnavailable().body(Error::FailedToCreateUser.to_string())
}
}
}
}
}
pub async fn create_user_controller(
body: String,
admin: web::Data<AdminInfo>,
) -> Result<UserId, Error> {
#[cfg(feature = "json")]
let credentials: Result<CreateUserWithAdmin, Error> = match serde_json::from_str(&body) {
Ok(x) => Ok(x),
Err(e) => Err(Error::SerdeJson(e)),
};
#[cfg(not(feature = "json"))]
let credentials: Result<CreateUserWithAdmin, Error> = match from_str(&body) {
Ok(x) => Ok(x),
Err(e) => Err(Error::Ron(e.into())),
};
if let Ok(cred) = credentials {
if admin.is_valid_hash(&cred.admin_password, &cred.admin_id) {
let new_user_id = Uuid::new_v4();
if let Ok(new_user_hash) = hash(&cred.user_info.user_password, admin.cost()) {
let user = User::new(new_user_id, new_user_hash, cred.user_info.role);
if io::to_users_log(&user).is_ok() {
Ok(UserId {
user_id: new_user_id,
})
} else {
Err(Error::FailedToCreateUser)
}
} else {
Err(Error::FailedToCreateUser)
}
} else {
Err(Error::AuthenticationBadRequest)
}
} else {
Err(Error::AuthenticationBadRequestBody(
credentials.err().unwrap().to_string(),
))
}
}
pub async fn delete_users(body: String, admin: web::Data<AdminInfo>) -> impl Responder {
match delete_users_controller(body, admin).await {
Err(e) => error_to_http(&e),
Ok(body) => {
#[cfg(feature = "json")]
match serde_json::to_string(&body) {
Ok(ron) => HttpResponse::Created().body(ron),
Err(_) => {
HttpResponse::ServiceUnavailable().body(Error::FailedToDeleteUsers.to_string())
}
}
#[cfg(not(feature = "json"))]
match ron::ser::to_string_pretty(&body, pretty_config_output()) {
Ok(ron) => HttpResponse::Created().body(ron),
Err(_) => {
HttpResponse::ServiceUnavailable().body(Error::FailedToDeleteUsers.to_string())
}
}
}
}
}
pub async fn delete_users_controller(
body: String,
admin: web::Data<AdminInfo>,
) -> Result<Vec<Uuid>, Error> {
#[cfg(feature = "json")]
let credentials: Result<DeleteUsersWithAdmin, Error> = match serde_json::from_str(&body) {
Ok(x) => Ok(x),
Err(e) => Err(Error::SerdeJson(e)),
};
#[cfg(not(feature = "json"))]
let credentials: Result<DeleteUsersWithAdmin, Error> = match from_str(&body) {
Ok(x) => Ok(x),
Err(e) => Err(Error::Ron(e.into())),
};
if let Ok(cred) = credentials {
if admin.is_valid_hash(&cred.admin_password, &cred.admin_id) {
if io::remove_users_from_log(&cred.users_ids).is_ok() {
Ok(cred.users_ids)
} else {
Err(Error::FailedToDeleteUsers)
}
} else {
Err(Error::AuthenticationBadRequest)
}
} else {
Err(Error::AuthenticationBadRequestBody(
credentials.err().unwrap().to_string(),
))
}
}
pub async fn put_user_session(
body: String,
session_context: web::Data<Arc<Mutex<SessionContext>>>,
expiration_time: DataI64,
) -> impl Responder {
match put_user_session_controller(body, session_context, expiration_time).await {
Err(e) => error_to_http(&e),
Ok(token) => HttpResponse::Created().body(token),
}
}
pub async fn put_user_session_controller(
body: String,
session_context: web::Data<Arc<Mutex<SessionContext>>>,
expiration_time: DataI64,
) -> Result<String, Error> {
let exp_time: i64 = *expiration_time.into_inner();
#[cfg(feature = "json")]
let ok_user: Result<super::schemas::User, Error> = match serde_json::from_str(&body) {
Ok(x) => Ok(x),
Err(e) => Err(Error::Unknown),
};
#[cfg(not(feature = "json"))]
let ok_user: Result<super::schemas::User, Error> = match ron::de::from_str(&body) {
Ok(u) => Ok(u),
Err(_) => Err(Error::Unknown),
};
if let Ok(user) = ok_user {
let user_registry = io::find_user(user.clone()).await;
if let Ok(reg) = user_registry {
let (hash, roles) = reg.context();
match bcrypt::verify(&(user.user_password), &hash) {
Err(_) | Ok(false) => (),
Ok(true) => {
if let Ok(mut session) = session_context.lock() {
let token = bcrypt::hash(&Uuid::new_v4().to_string(), 4)
.unwrap_or_else(|_| Uuid::new_v4().to_string());
let expiration = Utc::now() + chrono::Duration::seconds(exp_time);
session.insert(token.clone(), SessionInfo::new(expiration, roles));
return Ok(token);
}
}
};
}
Err(Error::Unknown)
} else {
Err(Error::AuthenticationBadRequestBody(
ok_user.err().unwrap().to_string(),
))
}
}
#[cfg(test)]
mod test {
use crate::{
auth::{
io::{assert_users_content, assert_users_not_content},
schemas::UserId,
},
http::routes,
};
use actix_http::body::ResponseBody;
use actix_web::{body::Body, test, App};
#[ignore]
#[actix_rt::test]
async fn create_new_user_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.set_payload("(admin_id: \"your_admin\",admin_password: \"your_password\",user_info: (user_password: \"my_password\",role: [User,],),)")
.uri("/auth/createUser")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
assert!(resp.status().is_success());
assert!(body.contains("user_id"));
assert_users_content("roles: [User,],date:");
assert_users_content("hash: ");
assert_users_content("id: ");
}
#[ignore]
#[actix_rt::test]
async fn delete_user_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.set_payload("(admin_id: \"your_admin\",admin_password: \"your_password\",user_info: (user_password: \"my_password\",role: [User,],),)")
.uri("/auth/createUser")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.set_payload("(admin_id: \"your_admin\",admin_password: \"your_password\",user_info: (user_password: \"my_password\",role: [User,],),)")
.uri("/auth/createUser")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
let user: UserId = ron::de::from_str(&body).unwrap();
assert!(resp.status().is_success());
assert!(body.contains("user_id"));
assert_users_content(&user.user_id.to_string());
assert_users_content("id: ");
let req = test::TestRequest::post()
.set_payload(format!("(admin_id: \"your_admin\",admin_password: \"your_password\", users_ids: [\"{}\",],)", user.user_id))
.uri("/auth/deleteUsers")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
assert!(body.contains(&user.user_id.to_string()));
assert!(resp.status().is_success());
assert_users_not_content(&user.user_id.to_string());
}
#[ignore]
#[actix_rt::test]
async fn create_new_user_wrong_admin() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.set_payload("(admin_id: \"your_admin\",admin_password: \"nice_password\",user_info: (user_password: \"my_password\",role: [User,],),)")
.uri("/auth/createUser")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
assert!(resp.status().is_client_error());
assert_eq!(body, "(\n error_type: \"AuthenticationBadRequest\",\n error_message: \"Bad request at authenticating endpoint\",\n)");
}
#[ignore]
#[actix_rt::test]
async fn get_token_test() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.set_payload("(admin_id: \"your_admin\",admin_password: \"your_password\",user_info: (user_password: \"my_password\",role: [User,],),)")
.uri("/auth/createUser")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
let uuid: UserId = ron::de::from_str(&body).unwrap();
let payload = format!(
"(id: \"{}\", user_password: \"my_password\",)",
uuid.user_id
);
let req = test::TestRequest::put()
.set_payload(payload)
.uri("/auth/putUserSession")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
assert!(resp.status().is_success());
assert!(body.len() > 20);
}
#[ignore]
#[actix_rt::test]
async fn bad_request_if_user_password_is_wrong() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.set_payload("(admin_id: \"your_admin\",admin_password: \"your_password\",user_info: (user_password: \"my_password\",role: [User,],),)")
.uri("/auth/createUser")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
let uuid: UserId = ron::de::from_str(&body).unwrap();
let payload = format!(
"(id: \"{}\", user_password: \"another_pswd\",)",
uuid.user_id
);
let req = test::TestRequest::put()
.set_payload(payload)
.uri("/auth/putUserSession")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_client_error());
let body = resp.take_body().as_str().to_string();
assert_eq!(
body,
"(\n error_type: \"Unknown\",\n error_message: \"Request credentials failed\",\n)"
);
}
trait BodyTest {
fn as_str(&self) -> &str;
}
impl BodyTest for ResponseBody<Body> {
fn as_str(&self) -> &str {
match self {
ResponseBody::Body(ref b) => match b {
Body::Bytes(ref by) => std::str::from_utf8(&by).unwrap(),
_ => panic!(),
},
ResponseBody::Other(ref b) => match b {
Body::Bytes(ref by) => std::str::from_utf8(&by).unwrap(),
_ => panic!(),
},
}
}
}
}
#[cfg(test)]
mod routes_test_with_auth {
use crate::{auth::schemas::UserId, http::routes};
use actix_http::body::ResponseBody;
use actix_web::{body::Body, test, App};
use uuid::Uuid;
#[ignore]
#[actix_rt::test]
async fn query_and_tx_with_token() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.set_payload("(admin_id: \"your_admin\",admin_password: \"your_password\",user_info: (user_password: \"my_password\",role: [User,],),)")
.uri("/auth/createUser")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
let uuid: UserId = ron::de::from_str(&body).unwrap();
let payload = format!(
"(id: \"{}\", user_password: \"my_password\",)",
uuid.user_id
);
let req = test::TestRequest::put()
.set_payload(payload)
.uri("/auth/putUserSession")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let token = resp.take_body().as_str().to_string();
let token = format!("Bearer {}", token);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.header("Authorization", token.clone())
.set_payload("CREATE ENTITY token_test_ok")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.header("Authorization", token.clone())
.set_payload("INSERT {a: 123,} INTO token_test_ok")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.header("Authorization", token.clone())
.set_payload("Select * FROM token_test_ok")
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
assert!(resp.status().is_success());
assert!(body.contains("\"a\": Integer(123)"))
}
#[ignore]
#[actix_rt::test]
async fn history_with_token() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.set_payload("(admin_id: \"your_admin\",admin_password: \"your_password\",user_info: (user_password: \"my_password\",role: [User,],),)")
.uri("/auth/createUser")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
let uuid: UserId = ron::de::from_str(&body).unwrap();
let payload = format!(
"(id: \"{}\", user_password: \"my_password\",)",
uuid.user_id
);
let req = test::TestRequest::put()
.set_payload(payload)
.uri("/auth/putUserSession")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let token = resp.take_body().as_str().to_string();
let token = format!("Bearer {}", token);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.header("Authorization", token.clone())
.set_payload("CREATE ENTITY token_history_ok")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let uuid = Uuid::new_v4();
let payload = format!("INSERT {{a: 123,}} INTO token_history_ok with {}", uuid);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.header("Authorization", token.clone())
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let payload = format!(
"(entity_key: \"token_history_ok\", entity_id: \"{}\",)",
uuid
);
let req = test::TestRequest::post()
.header("Authorization", token.clone())
.set_payload(payload)
.uri("/entity-history")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
assert!(body.contains("\"a\": Integer(123)"));
}
trait BodyTest {
fn as_str(&self) -> &str;
}
impl BodyTest for ResponseBody<Body> {
fn as_str(&self) -> &str {
match self {
ResponseBody::Body(ref b) => match b {
Body::Bytes(ref by) => std::str::from_utf8(&by).unwrap(),
_ => panic!(),
},
ResponseBody::Other(ref b) => match b {
Body::Bytes(ref by) => std::str::from_utf8(&by).unwrap(),
_ => panic!(),
},
}
}
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/auth/schemas.rs | woori-db/src/auth/schemas.rs | use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Serialize, Deserialize)]
pub struct CreateUserWithAdmin {
pub admin_id: String,
pub admin_password: String,
pub user_info: UserInfo,
}
#[derive(Serialize, Deserialize)]
pub struct DeleteUsersWithAdmin {
pub admin_id: String,
pub admin_password: String,
pub users_ids: Vec<Uuid>,
}
#[derive(Serialize, Deserialize)]
pub struct UserInfo {
pub user_password: String,
pub role: Vec<Role>,
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
pub enum Role {
// Admin,
User,
Read,
Write,
History,
}
#[derive(Serialize, Deserialize)]
pub struct UserId {
pub user_id: Uuid,
}
#[derive(Serialize, Deserialize, Clone)]
pub struct User {
pub id: Uuid,
pub user_password: String,
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/auth/models.rs | woori-db/src/auth/models.rs | use bcrypt::verify;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::{core::pretty_config_inner, model::error::Error};
use super::schemas::Role;
#[derive(Serialize, Deserialize)]
pub struct AdminInfo {
admin_id: String,
admin_hash: String,
cost: u32,
}
impl AdminInfo {
pub fn new(id: String, hash: String, cost: u32) -> Self {
Self {
admin_id: id,
admin_hash: hash,
cost,
}
}
pub fn is_valid_hash(&self, pswd: &str, id: &str) -> bool {
match verify(pswd, &self.admin_hash) {
Ok(b) => b && id == self.admin_id,
Err(_) => false,
}
}
pub fn cost(&self) -> u32 {
self.cost
}
}
#[derive(Serialize, Deserialize)]
pub struct User {
id: Uuid,
user_password: String,
roles: Vec<Role>,
}
#[derive(Serialize, Deserialize)]
pub struct UserRegistry {
id: Uuid,
hash: String,
roles: Vec<Role>,
date: DateTime<Utc>,
}
impl UserRegistry {
pub fn context(self) -> (String, Vec<Role>) {
(self.hash, self.roles)
}
}
impl User {
pub fn new(id: Uuid, user_password: String, roles: Vec<Role>) -> Self {
Self {
id,
user_password,
roles,
}
}
pub fn format_user_log(&self, date: DateTime<Utc>) -> Result<String, Error> {
let value = UserRegistry {
id: self.id,
hash: self.user_password.clone(),
roles: self.roles.clone(),
date,
};
Ok(format!(
"{}\r\n",
ron::ser::to_string_pretty(&value, pretty_config_inner())?
))
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/schemas/tx.rs | woori-db/src/schemas/tx.rs | #[cfg(not(feature = "json"))]
use crate::core::pretty_config_output;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum TxType {
Create,
Insert,
UpdateSet,
UpdateContent,
Delete,
EvictEntity,
EvictEntityTree,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TxResponse {
tx_type: TxType,
entity: String,
pub(crate) uuid: Option<Uuid>,
state: String,
message: String,
}
impl TxResponse {
pub fn write(&self) -> String {
#[cfg(feature = "json")]
return serde_json::to_string(self).unwrap_or_else(|_| "SERVER ERROR".to_string());
#[cfg(not(feature = "json"))]
ron::ser::to_string_pretty(self, pretty_config_output())
.unwrap_or_else(|_| "SERVER ERROR".to_string())
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CreateEntityResponse {
entity: String,
message: String,
}
impl CreateEntityResponse {
pub fn new(entity: String, message: String) -> Self {
Self { entity, message }
}
}
impl From<CreateEntityResponse> for TxResponse {
fn from(tx: CreateEntityResponse) -> Self {
Self {
tx_type: TxType::Create,
entity: tx.entity,
uuid: None,
state: String::new(),
message: tx.message,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct InsertEntityResponse {
entity: String,
pub(crate) uuid: Uuid,
message: String,
}
impl From<InsertEntityResponse> for TxResponse {
fn from(tx: InsertEntityResponse) -> Self {
Self {
tx_type: TxType::Insert,
entity: tx.entity,
uuid: Some(tx.uuid),
state: String::new(),
message: tx.message,
}
}
}
impl InsertEntityResponse {
pub fn new(entity: String, uuid: Uuid, message: String) -> Self {
Self {
entity,
uuid,
message,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DeleteOrEvictEntityResponse {
entity: String,
uuid: Option<Uuid>,
message: String,
tx_type: TxType,
}
impl From<DeleteOrEvictEntityResponse> for TxResponse {
fn from(tx: DeleteOrEvictEntityResponse) -> Self {
Self {
tx_type: tx.tx_type,
entity: tx.entity,
uuid: tx.uuid,
state: String::new(),
message: tx.message,
}
}
}
impl DeleteOrEvictEntityResponse {
pub fn new(entity: String, uuid: Option<Uuid>, message: String, tx_type: TxType) -> Self {
Self {
entity,
uuid,
message,
tx_type,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UpdateEntityResponse {
entity: String,
uuid: Uuid,
state: String,
message: String,
tx_type: TxType,
}
impl From<UpdateEntityResponse> for TxResponse {
fn from(tx: UpdateEntityResponse) -> Self {
Self {
tx_type: tx.tx_type,
entity: tx.entity,
uuid: Some(tx.uuid),
state: tx.state,
message: tx.message,
}
}
}
impl UpdateEntityResponse {
pub fn new(
entity: String,
uuid: Uuid,
state: String,
message: String,
tx_type: TxType,
) -> Self {
Self {
entity,
uuid,
state,
message,
tx_type,
}
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/schemas/error.rs | woori-db/src/schemas/error.rs | use crate::core::pretty_config_output;
use serde::Serialize;
use std::fmt;
#[derive(Debug, Clone, Serialize)]
pub struct Response {
error_type: String,
error_message: String,
}
impl Response {
pub fn new(error_type: String, error_message: String) -> Self {
Self {
error_type,
error_message,
}
}
pub fn write(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let output = ron::ser::to_string_pretty(self, pretty_config_output())
.unwrap_or_else(|_| "SERVER ERROR".to_owned());
write!(f, "{}", output)
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/schemas/history.rs | woori-db/src/schemas/history.rs | use chrono::{DateTime, Utc};
use serde::Deserialize;
use uuid::Uuid;
#[derive(Debug, Clone, Deserialize)]
pub struct EntityHistoryInfo {
pub entity_key: String,
pub entity_id: Uuid,
pub start_datetime: Option<DateTime<Utc>>,
pub end_datetime: Option<DateTime<Utc>>,
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/schemas/mod.rs | woori-db/src/schemas/mod.rs | pub mod error;
pub mod history;
pub mod query;
pub mod tx;
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/schemas/query.rs | woori-db/src/schemas/query.rs | use std::collections::{BTreeMap, HashMap};
use crate::{core::pretty_config_output, model::error::Error};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use wql::Types;
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct CountResponse {
response: Box<Response>,
count: usize,
}
#[derive(Serialize)]
pub struct CountId {
response: HashMap<String, Types>,
count: usize,
}
#[derive(Serialize)]
pub struct CountAll {
response: BTreeMap<Uuid, HashMap<String, Types>>,
count: usize,
}
#[derive(Serialize)]
pub struct CountOrder {
response: Vec<(Uuid, HashMap<String, Types>)>,
count: usize,
}
#[derive(Serialize)]
pub struct CountGroupBy {
response: HashMap<String, BTreeMap<Uuid, HashMap<String, Types>>>,
count: usize,
}
#[derive(Serialize)]
pub struct CountOrderedGroupBy {
response: HashMap<String, Vec<(Uuid, HashMap<String, Types>)>>,
count: usize,
}
#[derive(Serialize)]
pub struct CountOptionOrder {
response: Vec<(Uuid, Option<HashMap<String, Types>>)>,
count: usize,
}
#[derive(Serialize)]
pub struct CountOptionGroupBy {
response: HashMap<String, BTreeMap<Uuid, Option<HashMap<String, Types>>>>,
count: usize,
}
#[derive(Serialize)]
pub struct CountOptionSelect {
response: BTreeMap<Uuid, Option<HashMap<String, Types>>>,
count: usize,
}
#[derive(Serialize)]
pub struct CountCheckValues {
response: HashMap<String, bool>,
count: usize,
}
#[derive(Serialize)]
pub struct CountTimeRange {
response: BTreeMap<DateTime<Utc>, HashMap<String, Types>>,
count: usize,
}
#[derive(Serialize)]
pub struct CountDateSelect {
response: HashMap<String, HashMap<String, Types>>,
count: usize,
}
impl CountResponse {
pub fn new(count: usize, response: Response) -> Self {
Self {
count,
response: Box::new(response),
}
}
pub fn to_response(&self) -> Result<String, Error> {
let count = self.count;
match &*self.response {
Response::Id(state) => {
let resp = CountId {
count,
response: state.to_owned(),
};
#[cfg(feature = "json")]
return Ok(serde_json::to_string(&resp)?);
#[cfg(not(feature = "json"))]
Ok(ron::ser::to_string_pretty(&resp, pretty_config_output())?)
}
Response::All(state) => {
let resp = CountAll {
count,
response: state.to_owned(),
};
#[cfg(feature = "json")]
return Ok(serde_json::to_string(&resp)?);
#[cfg(not(feature = "json"))]
Ok(ron::ser::to_string_pretty(&resp, pretty_config_output())?)
}
Response::Order(state) => {
let resp = CountOrder {
count,
response: state.to_owned(),
};
#[cfg(feature = "json")]
return Ok(serde_json::to_string(&resp)?);
#[cfg(not(feature = "json"))]
Ok(ron::ser::to_string_pretty(&resp, pretty_config_output())?)
}
Response::GroupBy(state) => {
let resp = CountGroupBy {
count,
response: state.to_owned(),
};
#[cfg(feature = "json")]
return Ok(serde_json::to_string(&resp)?);
#[cfg(not(feature = "json"))]
Ok(ron::ser::to_string_pretty(&resp, pretty_config_output())?)
}
Response::OrderedGroupBy(state) => {
let resp = CountOrderedGroupBy {
count,
response: state.to_owned(),
};
#[cfg(feature = "json")]
return Ok(serde_json::to_string(&resp)?);
#[cfg(not(feature = "json"))]
Ok(ron::ser::to_string_pretty(&resp, pretty_config_output())?)
}
Response::OptionOrder(state) => {
let resp = CountOptionOrder {
count,
response: state.to_owned(),
};
#[cfg(feature = "json")]
return Ok(serde_json::to_string(&resp)?);
#[cfg(not(feature = "json"))]
Ok(ron::ser::to_string_pretty(&resp, pretty_config_output())?)
}
Response::OptionGroupBy(state) => {
let resp = CountOptionGroupBy {
count,
response: state.to_owned(),
};
#[cfg(feature = "json")]
return Ok(serde_json::to_string(&resp)?);
#[cfg(not(feature = "json"))]
Ok(ron::ser::to_string_pretty(&resp, pretty_config_output())?)
}
Response::OptionSelect(state) => {
let resp = CountOptionSelect {
count,
response: state.to_owned(),
};
#[cfg(feature = "json")]
return Ok(serde_json::to_string(&resp)?);
#[cfg(not(feature = "json"))]
Ok(ron::ser::to_string_pretty(&resp, pretty_config_output())?)
}
Response::CheckValues(state) => {
let resp = CountCheckValues {
count,
response: state.to_owned(),
};
#[cfg(feature = "json")]
return Ok(serde_json::to_string(&resp)?);
#[cfg(not(feature = "json"))]
Ok(ron::ser::to_string_pretty(&resp, pretty_config_output())?)
}
Response::TimeRange(state) => {
let resp = CountTimeRange {
count,
response: state.to_owned(),
};
#[cfg(feature = "json")]
return Ok(serde_json::to_string(&resp)?);
#[cfg(not(feature = "json"))]
Ok(ron::ser::to_string_pretty(&resp, pretty_config_output())?)
}
Response::DateSelect(state) => {
let resp = CountDateSelect {
count,
response: state.to_owned(),
};
Ok(ron::ser::to_string_pretty(&resp, pretty_config_output())?)
}
_ => Err(Error::Unknown),
}
}
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub enum Response {
Id(HashMap<String, Types>),
Intersect(HashMap<String, Types>),
Difference(HashMap<String, Types>),
Union(HashMap<String, Types>),
All(BTreeMap<Uuid, HashMap<String, Types>>),
Order(Vec<(Uuid, HashMap<String, Types>)>),
GroupBy(HashMap<String, BTreeMap<Uuid, HashMap<String, Types>>>),
OrderedGroupBy(HashMap<String, Vec<(Uuid, HashMap<String, Types>)>>),
OptionOrder(Vec<(Uuid, Option<HashMap<String, Types>>)>),
OptionGroupBy(HashMap<String, BTreeMap<Uuid, Option<HashMap<String, Types>>>>),
OptionSelect(BTreeMap<Uuid, Option<HashMap<String, Types>>>),
CheckValues(HashMap<String, bool>),
TimeRange(BTreeMap<DateTime<Utc>, HashMap<String, Types>>),
WithCount(CountResponse),
DateSelect(HashMap<String, HashMap<String, Types>>),
Join(Vec<HashMap<String, Types>>),
}
impl From<CountResponse> for Response {
fn from(map: CountResponse) -> Self {
Self::WithCount(map)
}
}
impl From<HashMap<String, HashMap<String, Types>>> for Response {
fn from(map: HashMap<String, HashMap<String, Types>>) -> Self {
Self::DateSelect(map)
}
}
impl From<BTreeMap<Uuid, Option<HashMap<String, Types>>>> for Response {
fn from(map: BTreeMap<Uuid, Option<HashMap<String, Types>>>) -> Self {
Self::OptionSelect(map)
}
}
impl From<HashMap<String, Types>> for Response {
fn from(map: HashMap<String, Types>) -> Self {
Self::Id(map)
}
}
impl From<HashMap<String, bool>> for Response {
fn from(map: HashMap<String, bool>) -> Self {
Self::CheckValues(map)
}
}
impl From<BTreeMap<DateTime<Utc>, HashMap<String, Types>>> for Response {
fn from(map: BTreeMap<DateTime<Utc>, HashMap<String, Types>>) -> Self {
Self::TimeRange(map)
}
}
impl From<BTreeMap<Uuid, HashMap<String, Types>>> for Response {
fn from(map: BTreeMap<Uuid, HashMap<String, Types>>) -> Self {
Self::All(map)
}
}
impl From<Vec<(Uuid, HashMap<String, Types>)>> for Response {
fn from(map: Vec<(Uuid, HashMap<String, Types>)>) -> Self {
Self::Order(map)
}
}
impl From<HashMap<String, BTreeMap<Uuid, HashMap<String, Types>>>> for Response {
fn from(map: HashMap<String, BTreeMap<Uuid, HashMap<String, Types>>>) -> Self {
Self::GroupBy(map)
}
}
impl From<HashMap<String, Vec<(Uuid, HashMap<String, Types>)>>> for Response {
fn from(map: HashMap<String, Vec<(Uuid, HashMap<String, Types>)>>) -> Self {
Self::OrderedGroupBy(map)
}
}
impl From<Vec<(Uuid, Option<HashMap<String, Types>>)>> for Response {
fn from(map: Vec<(Uuid, Option<HashMap<String, Types>>)>) -> Self {
Self::OptionOrder(map)
}
}
impl From<HashMap<String, BTreeMap<Uuid, Option<HashMap<String, Types>>>>> for Response {
fn from(map: HashMap<String, BTreeMap<Uuid, Option<HashMap<String, Types>>>>) -> Self {
Self::OptionGroupBy(map)
}
}
impl Response {
pub fn parse(
self,
key: String,
ent_b: &(String, String),
vec: &mut Vec<HashMap<String, Types>>,
b_hash: HashMap<Types, Vec<HashMap<String, Types>>>,
) -> bool {
match self {
Response::OptionGroupBy(_)
| Response::CheckValues(_)
| Response::TimeRange(_)
| Response::WithCount(_)
| Response::Id(_)
| Response::Intersect(_)
| Response::Difference(_)
| Response::Union(_)
| Response::GroupBy(_)
| Response::OrderedGroupBy(_)
| Response::Join(_)
| Response::DateSelect(_) => {
return false;
}
Response::All(state) => {
state.into_iter().for_each(|(_, s)| {
let type_key = s.get(&key).unwrap_or(&Types::Nil);
let entities = b_hash.get(type_key);
if let Some(v) = entities {
for ent in v {
let mut s = s.clone();
for entry in ent
.iter()
.filter(|(k, _)| *k != "tx_time" && *k != &ent_b.1)
{
let entry_name = if s.contains_key(entry.0) {
format!("{}:{}", entry.0, ent_b.0)
} else {
entry.0.to_owned()
};
s.insert(entry_name, entry.1.to_owned());
}
vec.push(s.to_owned());
}
}
});
}
Response::Order(state) => {
state.into_iter().for_each(|(_, s)| {
let type_key = s.get(&key).unwrap_or(&Types::Nil);
let entities = b_hash.get(type_key);
if let Some(v) = entities {
for ent in v {
let mut s = s.clone();
for entry in ent
.iter()
.filter(|(k, _)| *k != "tx_time" && *k != &ent_b.1)
{
let entry_name = if s.contains_key(entry.0) {
format!("{}:{}", entry.0, ent_b.0)
} else {
entry.0.to_owned()
};
s.insert(entry_name, entry.1.to_owned());
}
vec.push(s.to_owned());
}
}
});
}
Response::OptionOrder(state) => {
state.into_iter().for_each(|(_, s)| {
if let Some(s) = s {
let type_key = s.get(&key).unwrap_or(&Types::Nil);
let entities = b_hash.get(type_key);
if let Some(v) = entities {
for ent in v {
let mut s = s.clone();
for entry in ent
.iter()
.filter(|(k, _)| *k != "tx_time" && *k != &ent_b.1)
{
let entry_name = if s.contains_key(entry.0) {
format!("{}:{}", entry.0, ent_b.0)
} else {
entry.0.to_owned()
};
s.insert(entry_name, entry.1.to_owned());
}
vec.push(s.to_owned());
}
}
}
});
}
Response::OptionSelect(state) => {
state.into_iter().for_each(|(_, s)| {
if let Some(s) = s {
let type_key = s.get(&key).unwrap_or(&Types::Nil);
let entities = b_hash.get(type_key);
if let Some(v) = entities {
for ent in v {
let mut s = s.clone();
for entry in ent
.iter()
.filter(|(k, _)| *k != "tx_time" && *k != &ent_b.1)
{
let entry_name = if s.contains_key(entry.0) {
entry.0.to_owned()
} else {
format!("{}:{}", entry.0, ent_b.0)
};
s.insert(entry_name, entry.1.to_owned());
}
vec.push(s.to_owned());
}
}
}
});
}
}
true
}
pub fn hash(self, key: &str) -> Option<HashMap<Types, Vec<HashMap<String, Types>>>> {
let mut hm = HashMap::new();
match self {
Response::All(state) => {
state.into_iter().for_each(|(_, s)| {
let entry = hm
.entry(s.get(key).unwrap_or(&Types::Nil).to_owned())
.or_insert(Vec::new());
(*entry).push(s);
});
}
Response::Order(state) => {
state.into_iter().for_each(|(_, s)| {
let entry = hm
.entry(s.get(key).unwrap_or(&Types::Nil).to_owned())
.or_insert(Vec::new());
(*entry).push(s);
});
}
Response::OptionOrder(state) => {
state.into_iter().for_each(|(_, s)| {
if let Some(s) = s {
let entry = hm
.entry(s.get(key).unwrap_or(&Types::Nil).to_owned())
.or_insert(Vec::new());
(*entry).push(s);
}
});
}
Response::OptionSelect(state) => {
state.into_iter().for_each(|(_, s)| {
if let Some(s) = s {
let entry = hm
.entry(s.get(key).unwrap_or(&Types::Nil).to_owned())
.or_insert(Vec::new());
(*entry).push(s);
}
});
}
Response::OptionGroupBy(_)
| Response::CheckValues(_)
| Response::Join(_)
| Response::TimeRange(_)
| Response::WithCount(_)
| Response::Id(_)
| Response::Intersect(_)
| Response::Difference(_)
| Response::Union(_)
| Response::GroupBy(_)
| Response::OrderedGroupBy(_)
| Response::DateSelect(_) => {
return None;
}
}
Some(hm)
}
pub fn to_string(&self) -> Result<String, Error> {
match self {
Response::Id(state) => Ok(ron::ser::to_string_pretty(&state, pretty_config_output())?),
Response::Intersect(state) => {
Ok(ron::ser::to_string_pretty(&state, pretty_config_output())?)
}
Response::Difference(state) => {
Ok(ron::ser::to_string_pretty(&state, pretty_config_output())?)
}
Response::Union(state) => {
Ok(ron::ser::to_string_pretty(&state, pretty_config_output())?)
}
Response::CheckValues(state) => {
Ok(ron::ser::to_string_pretty(&state, pretty_config_output())?)
}
Response::TimeRange(state) => {
Ok(ron::ser::to_string_pretty(&state, pretty_config_output())?)
}
Response::All(state) => Ok(ron::ser::to_string_pretty(&state, pretty_config_output())?),
Response::Order(state) => {
Ok(ron::ser::to_string_pretty(&state, pretty_config_output())?)
}
Response::GroupBy(state) => {
Ok(ron::ser::to_string_pretty(&state, pretty_config_output())?)
}
Response::OrderedGroupBy(state) => {
Ok(ron::ser::to_string_pretty(&state, pretty_config_output())?)
}
Response::OptionOrder(state) => {
Ok(ron::ser::to_string_pretty(&state, pretty_config_output())?)
}
Response::OptionGroupBy(state) => {
Ok(ron::ser::to_string_pretty(&state, pretty_config_output())?)
}
Response::WithCount(state) => state.to_response(),
Response::OptionSelect(state) => {
Ok(ron::ser::to_string_pretty(&state, pretty_config_output())?)
}
Response::DateSelect(state) => {
Ok(ron::ser::to_string_pretty(&state, pretty_config_output())?)
}
Response::Join(state) => {
Ok(ron::ser::to_string_pretty(&state, pretty_config_output())?)
}
}
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/model/wql.rs | woori-db/src/model/wql.rs | use std::collections::HashMap;
use uuid::Uuid;
use wql::{MatchCondition, Types};
#[derive(Debug, PartialEq)]
pub enum Action {
CreateEntity,
Insert,
Read,
UpdateSet,
UpdateContent,
Delete,
EvictEntity,
EvictEntityId,
Error,
}
impl std::fmt::Display for Action {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match *self {
Action::Read => write!(f, "READ"),
Action::CreateEntity => write!(f, "CREATE_ENTITY"),
Action::Insert => write!(f, "INSERT"),
Action::UpdateSet => write!(f, "UPDATE_SET"),
Action::UpdateContent => write!(f, "UPDATE_CONTENT"),
Action::Delete => write!(f, "DELETE"),
Action::EvictEntity => write!(f, "EVICT_ENTITY"),
Action::EvictEntityId => write!(f, "EVICT_ENTITY_ID"),
Action::Error => write!(f, "Error"),
}
}
}
impl From<String> for Action {
fn from(val: String) -> Self {
match val.as_str() {
"READ" => Action::Read,
"CREATE_ENTITY" => Action::CreateEntity,
"INSERT" => Action::Insert,
"DELETE" => Action::Delete,
"UPDATE_SET" => Action::UpdateSet,
"UPDATE_CONTENT" => Action::UpdateContent,
"EVICT_ENTITY" => Action::EvictEntity,
"EVICT_ENTITY_ID" => Action::EvictEntityId,
_ => Action::Error,
}
}
}
pub struct MatchUpdateArgs {
pub entity: String,
pub content: HashMap<String, Types>,
pub id: uuid::Uuid,
pub conditions: MatchCondition,
}
impl MatchUpdateArgs {
pub fn new(
entity: String,
content: HashMap<String, Types>,
id: uuid::Uuid,
conditions: MatchCondition,
) -> Self {
Self {
entity,
content,
id,
conditions,
}
}
}
pub struct UpdateArgs {
pub entity: String,
pub content: HashMap<String, Types>,
pub id: Uuid,
}
impl UpdateArgs {
pub fn new(entity: String, content: HashMap<String, Types>, id: Uuid) -> Self {
Self {
entity,
content,
id,
}
}
}
pub struct InsertArgs {
pub entity: String,
pub content: HashMap<String, Types>,
pub uuid: Option<Uuid>,
}
impl InsertArgs {
pub fn new(entity: String, content: HashMap<String, Types>, uuid: Option<Uuid>) -> Self {
Self {
entity,
content,
uuid,
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn from_str() {
assert_eq!(Action::from(String::from("READ")), Action::Read);
assert_eq!(
Action::from(String::from("CREATE_ENTITY")),
Action::CreateEntity
);
assert_eq!(Action::from(String::from("INSERT")), Action::Insert);
assert_eq!(Action::from(String::from("DELETE")), Action::Delete);
assert_eq!(Action::from(String::from("UPDATE_SET")), Action::UpdateSet);
assert_eq!(
Action::from(String::from("UPDATE_CONTENT")),
Action::UpdateContent
);
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/model/error.rs | woori-db/src/model/error.rs | use actix::MailboxError;
use actix_web::{error, HttpResponse};
use std::io;
use uuid::Uuid;
use wql::Types;
use crate::schemas::error::Response;
#[derive(Debug)]
pub enum Error {
Io(io::Error),
QueryFormat(String),
InvalidQuery,
EntityAlreadyCreated(String),
EntityNotCreated(String),
EntityNotCreatedWithUniqueness(String),
Serialization(ron::Error),
#[cfg(feature = "json")]
SerdeJson(serde_json::Error),
UuidNotCreatedForEntity(String, Uuid),
FailedToParseState,
FailedToParseRegistry,
UnknownCondition,
FailedMatchCondition,
DuplicatedUnique(String, String, Types),
SelectBadRequest,
NonSelectQuery,
ActixMailbox(MailboxError),
LockData,
KeyTxTimeNotAllowed,
Ron(ron::Error),
InvalidUuid(uuid::Error),
UpdateContentEncryptKeys(Vec<String>),
CheckNonEncryptedKeys(Vec<String>),
DateTimeParse(chrono::ParseError),
FailedToParseDate,
AdminNotConfigured,
#[allow(dead_code)]
AuthorizationBadRequest,
AuthenticationBadRequest,
AuthenticationBadRequestBody(String),
FailedToCreateUser,
FailedToDeleteUsers,
Unknown,
}
pub fn error_to_http(e: &Error) -> HttpResponse {
match e {
Error::Io(_)
| Error::Serialization(_)
| Error::FailedToParseState
| Error::FailedToParseRegistry
| Error::UnknownCondition
| Error::ActixMailbox(_)
| Error::Ron(_)
| Error::DateTimeParse(_)
| Error::FailedToParseDate => HttpResponse::InternalServerError().body(e.to_string()),
Error::QueryFormat(_)
| Error::InvalidQuery
| Error::DuplicatedUnique(_, _, _)
| Error::EntityNotCreated(_)
| Error::EntityNotCreatedWithUniqueness(_)
| Error::UuidNotCreatedForEntity(_, _)
| Error::InvalidUuid(_)
| Error::UpdateContentEncryptKeys(_)
| Error::CheckNonEncryptedKeys(_)
| Error::FailedToCreateUser
| Error::FailedToDeleteUsers
| Error::KeyTxTimeNotAllowed => HttpResponse::BadRequest().body(e.to_string()),
Error::EntityAlreadyCreated(_) => HttpResponse::UnprocessableEntity().body(e.to_string()),
#[cfg(feature = "json")]
Error::SerdeJson(_) => HttpResponse::InternalServerError().body(e.to_string()),
Error::FailedMatchCondition => HttpResponse::PreconditionFailed().body(e.to_string()),
Error::NonSelectQuery => HttpResponse::MethodNotAllowed().body(e.to_string()),
Error::LockData => HttpResponse::ServiceUnavailable().body(e.to_string()),
Error::AdminNotConfigured | Error::AuthorizationBadRequest | Error::Unknown => {
HttpResponse::Unauthorized().body(e.to_string())
}
Error::AuthenticationBadRequest | Error::AuthenticationBadRequestBody(_) => {
HttpResponse::Forbidden().body(e.to_string())
}
Error::SelectBadRequest => HttpResponse::MethodNotAllowed().body(e.to_string()),
}
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Error::QueryFormat(s) => {
Response::new(String::from("QueryFormat"), format!("{:?}", s)).write(f)
}
Error::InvalidQuery => {
Response::new(
String::from("InvalidQuery"),
"Only single value queries are allowed, so key `ID` is required and keys `WHEN AT` are optional".to_string()
).write(f)
}
Error::Io(e) => Response::new(String::from("IO"), format!("{:?}", e)).write(f),
Error::EntityAlreadyCreated(e) => Response::new(
String::from("EntityAlreadyCreated"),
format!("Entity `{}` already created", e),
)
.write(f),
Error::EntityNotCreated(e) => Response::new(
String::from("EntityNotCreated"),
format!("Entity `{}` not created", e),
)
.write(f),
Error::EntityNotCreatedWithUniqueness(e) => Response::new(
String::from("EntityNotCreatedWithUniqueness"),
format!("Entity `{}` not created", e),
)
.write(f),
Error::Serialization(e) => {
Response::new(String::from("Serialization"), format!("{:?}", e)).write(f)
}
#[cfg(feature = "json")]
Error::SerdeJson(e) => {
Response::new(String::from("SerdeJson"), format!("{:?}", e)).write(f)
}
Error::UuidNotCreatedForEntity(s, id) => Response::new(
String::from("UuidNotCreatedForEntity"),
format!("Uuid {:?} not created for entity {}", id, s),
)
.write(f),
Error::FailedToParseState => Response::new(
String::from("FailedToParseState"),
"Failed to parse state".to_string(),
)
.write(f),
Error::FailedToParseRegistry => Response::new(
String::from("FailedToParseRegistry"),
"Failed to parse registry".to_string(),
)
.write(f),
Error::DuplicatedUnique(entity, key, t) => Response::new(
String::from("DuplicatedUnique"),
format!(
"key `{}` in entity `{}` already contains value `{:?}`",
key, entity, t
),
)
.write(f),
Error::UnknownCondition => Response::new(
String::from("UnknownCondition"),
"UNKNOWN MATCH CONDITION".to_string(),
)
.write(f),
Error::FailedMatchCondition => Response::new(
String::from("FailedMatchCondition"),
"One or more MATCH CONDITIONS failed".to_string(),
)
.write(f),
Error::SelectBadRequest => Response::new(
String::from("SelectBadRequest"),
"SELECT expressions are handled by `/wql/query` endpoint".to_string(),
)
.write(f),
Error::NonSelectQuery => Response::new(
String::from("NonSelectQuery"),
"Non-SELECT expressions are handled by `/wql/tx` endpoint".to_string(),
)
.write(f),
Error::ActixMailbox(r) => {
Response::new(String::from("ActixMailbox"), format!("{:?}", r)).write(f)
}
Error::LockData => Response::new(
String::from("LockData"),
"System was not able to get a lock on data".to_string(),
)
.write(f),
Error::Ron(e) => Response::new(String::from("Ron"), format!("{:?}", e)).write(f),
Error::InvalidUuid(e) => {
Response::new(String::from("InvalidUuid"), format!("{:?}", e)).write(f)
}
Error::UpdateContentEncryptKeys(keys) => Response::new(
String::from("UpdateContentEncryptKeys"),
format!(
"Encrypted keys cannont be updated with UPDATE CONTENT: {:?}",
keys
),
)
.write(f),
Error::CheckNonEncryptedKeys(keys) => Response::new(
String::from("CheckNonEncryptedKeys"),
format!("CHECK can only verify encrypted keys: {:?}", keys),
)
.write(f),
Error::DateTimeParse(e) => Response::new(
String::from("DateTimeParse"),
format!("Date parse error: {:?}", e),
)
.write(f),
Error::FailedToParseDate => Response::new(
String::from("FailedToParseDate"),
"Log date parse error".to_string(),
)
.write(f),
Error::AdminNotConfigured => Response::new(
String::from("AdminNotConfigured"),
"Admin credentials not configured".to_string(),
)
.write(f),
Error::AuthorizationBadRequest => Response::new(
String::from("AuthorizationBadRequest"),
"Bad request at authorizing endpoint".to_string(),
)
.write(f),
Error::AuthenticationBadRequest => Response::new(
String::from("AuthenticationBadRequest"),
"Bad request at authenticating endpoint".to_string(),
)
.write(f),
Error::AuthenticationBadRequestBody(error) => Response::new(
String::from("AuthenticationBadRequest"),
format!("Bad request: {}", error),
)
.write(f),
Error::FailedToCreateUser => Response::new(
String::from("FailedToCreateUser"),
"Failed to create requested user".to_string(),
)
.write(f),
Error::FailedToDeleteUsers => Response::new(
String::from("FailedToDeleteUsers"),
"Failed to delete requested users".to_string(),
)
.write(f),
Error::Unknown => Response::new(
String::from("Unknown"),
"Request credentials failed".to_string(),
)
.write(f),
Error::KeyTxTimeNotAllowed => Response::new(
String::from("KeyTxTimeNotAllowed"),
"Key `tx_time` is not allowed".to_string(),
)
.write(f),
}
}
}
#[cfg(feature = "json")]
impl From<serde_json::Error> for Error {
fn from(error: serde_json::Error) -> Self {
Error::SerdeJson(error)
}
}
impl From<io::Error> for Error {
fn from(error: io::Error) -> Self {
Error::Io(error)
}
}
impl From<MailboxError> for Error {
fn from(error: MailboxError) -> Self {
Error::ActixMailbox(error)
}
}
impl From<ron::Error> for Error {
fn from(error: ron::Error) -> Self {
Error::Ron(error)
}
}
impl From<uuid::Error> for Error {
fn from(error: uuid::Error) -> Self {
Error::InvalidUuid(error)
}
}
impl error::ResponseError for Error {}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/model/mod.rs | woori-db/src/model/mod.rs | pub(crate) mod error;
pub(crate) mod wql;
use actix::prelude::*;
use actix_web::web;
use serde::{Deserialize, Serialize};
use std::{
io::Error,
sync::{atomic::AtomicUsize, Arc, Mutex},
};
use crate::{
actors::wql::Executor,
repository::local::{EncryptContext, LocalContext, UniquenessContext},
};
pub type DataLocalContext = web::Data<Arc<Mutex<LocalContext>>>;
pub type DataUniquenessContext = web::Data<Arc<Mutex<UniquenessContext>>>;
pub type DataEncryptContext = web::Data<Arc<Mutex<EncryptContext>>>;
pub type DataAtomicUsize = web::Data<AtomicUsize>;
pub type DataU32 = web::Data<u32>;
pub type DataI64 = web::Data<i64>;
pub type DataExecutor = web::Data<Addr<Executor>>;
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct DataRegister {
pub file_name: String,
pub offset: usize,
pub bytes_length: usize,
}
impl Message for DataRegister {
type Result = Result<String, Error>;
}
impl Handler<DataRegister> for Executor {
type Result = Result<String, Error>;
fn handle(&mut self, msg: DataRegister, _: &mut Self::Context) -> Self::Result {
use crate::io::read::read_log;
read_log(msg)
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/controllers/clauses.rs | woori-db/src/controllers/clauses.rs | use std::collections::{BTreeMap, HashMap};
use rayon::prelude::*;
use uuid::Uuid;
use wql::{Algebra, Clause, ToSelect, Types, Value};
use crate::{
core::registry::get_registries,
model::{error::Error, DataLocalContext, DataRegister},
schemas::query::Response as QueryResponse,
};
use crate::core::query::{dedup_states, get_limit_offset_count, get_result_after_manipulation};
pub async fn select_where_controller(
entity: String,
args_to_select: ToSelect,
clauses: Vec<Clause>,
local_data: DataLocalContext,
functions: HashMap<String, wql::Algebra>,
) -> Result<QueryResponse, Error> {
let states = select_where(entity, args_to_select, clauses, local_data, &functions);
let count = matches!(functions.get("COUNT"), Some(Algebra::Count));
Ok(get_result_after_manipulation(
states.await?,
&functions,
count,
))
}
pub async fn select_where(
entity: String,
args_to_select: ToSelect,
clauses: Vec<Clause>,
local_data: DataLocalContext,
functions: &HashMap<String, wql::Algebra>,
) -> Result<BTreeMap<Uuid, HashMap<String, Types>>, Error> {
let (limit, offset, _) = get_limit_offset_count(functions);
let args_to_key = clauses
.clone()
.into_par_iter()
.filter_map(|clause| {
if let Clause::ValueAttribution(_, key, Value(arg)) = clause {
Some((arg, key))
} else {
None
}
})
.collect::<HashMap<String, String>>();
let registries = get_registries(&entity, &local_data)?;
let states = generate_state(®istries, args_to_select).await?;
let states = filter_where_clauses(states, args_to_key, &clauses)
.await
.into_iter()
.skip(offset)
.take(limit)
.collect();
let states = dedup_states(states, &functions);
Ok(states)
}
async fn filter_where_clauses(
states: BTreeMap<Uuid, HashMap<String, Types>>,
args_to_key: HashMap<String, String>,
clauses: &[Clause],
) -> BTreeMap<Uuid, HashMap<String, Types>> {
let default = String::new();
let mut states = states.clone();
for clause in clauses {
match clause {
Clause::ValueAttribution(_, _, _) => {}
Clause::Or(_, inner_clauses) => {
for (id, state) in states.clone() {
if !or_clauses(&state, &args_to_key, &inner_clauses) {
states.remove(&id);
}
}
}
Clause::ContainsKeyValue(_, key, value) => {
for (id, state) in states.clone() {
if !state.get(key).map_or(false, |v| value == v) {
states.remove(&id);
}
}
}
Clause::SimpleComparisonFunction(f, key, value) => {
let key = args_to_key.get(key).unwrap_or(&default);
for (id, state) in states.clone() {
state.get(key).map(|v| match f {
wql::Function::Eq => {
if !(v == value) {
states.remove(&id);
}
}
wql::Function::NotEq => {
if !(v != value) {
states.remove(&id);
}
}
wql::Function::GEq => {
if !(v >= &value) {
states.remove(&id);
}
}
wql::Function::G => {
if !(v > &value) {
states.remove(&id);
}
}
wql::Function::LEq => {
if !(v <= &value) {
states.remove(&id);
}
}
wql::Function::L => {
if !(v < &value) {
states.remove(&id);
}
}
wql::Function::Like => {
if let (Types::String(content), Types::String(regex)) = (v, value) {
let pattern = regex.replace("%", "");
if (regex.starts_with('%')
&& regex.ends_with('%')
&& content.contains(&pattern))
|| (regex.starts_with('%') && content.ends_with(&pattern))
|| (regex.ends_with('%') && content.starts_with(&pattern))
|| content.contains(&pattern)
{
()
} else {
states.remove(&id);
}
} else {
states.remove(&id);
}
}
_ => {}
});
}
}
Clause::ComplexComparisonFunctions(wql::Function::In, key, set) => {
let key = args_to_key.get(key).unwrap_or(&default);
for (id, state) in states.clone() {
if !state.get(key).map_or(false, |v| set.contains(v)) {
states.remove(&id);
}
}
}
Clause::ComplexComparisonFunctions(wql::Function::Between, key, start_end) => {
let key = args_to_key.get(key).unwrap_or(&default);
for (id, state) in states.clone() {
if !state
.get(key)
.map_or(false, |v| v >= &start_end[0] && v <= &start_end[1])
{
states.remove(&id);
}
}
}
_ => (),
}
}
states
}
fn or_clauses(
state: &HashMap<std::string::String, wql::Types>,
args_to_key: &HashMap<String, String>,
inner_clauses: &[Clause],
) -> bool {
let default = String::new();
inner_clauses
.par_iter()
.map(|clause| match clause {
Clause::ValueAttribution(_, _, _) => true,
Clause::Or(_, or_inner_clauses) => or_clauses(state, &args_to_key, or_inner_clauses),
Clause::ContainsKeyValue(_, key, value) => {
let key = args_to_key.get(key).unwrap_or(&default);
state.get(key).map_or(false, |v| value == v)
}
Clause::SimpleComparisonFunction(f, key, value) => {
let key = args_to_key.get(key).unwrap_or(&default);
state.get(key).map_or(false, |v| match f {
wql::Function::Eq => v == value,
wql::Function::NotEq => v != value,
wql::Function::GEq => v >= value,
wql::Function::G => v > value,
wql::Function::LEq => v <= value,
wql::Function::L => v < value,
wql::Function::Like => {
if let (Types::String(content), Types::String(regex)) = (v, value) {
if regex.starts_with('%') && regex.ends_with('%') {
let regex = regex.replace("%", "");
content.contains(®ex)
} else if regex.starts_with('%') {
let regex = regex.replace("%", "");
content.ends_with(®ex)
} else if regex.ends_with('%') {
let regex = regex.replace("%", "");
content.starts_with(®ex)
} else {
content.contains(®ex[..])
}
} else {
false
}
}
_ => false,
})
}
Clause::ComplexComparisonFunctions(wql::Function::In, key, set) => {
let key = args_to_key.get(key).unwrap_or(&default);
state.get(key).map_or(false, |v| set.contains(v))
}
Clause::ComplexComparisonFunctions(wql::Function::Between, key, start_end) => {
let key = args_to_key.get(key).unwrap_or(&default);
state
.get(key)
.map_or(false, |v| v >= &start_end[0] && v <= &start_end[1])
}
_ => false,
})
.any(|f| f)
}
async fn generate_state(
registries: &BTreeMap<Uuid, (DataRegister, Vec<u8>)>,
args_to_select: ToSelect,
) -> Result<BTreeMap<Uuid, HashMap<String, Types>>, Error> {
let mut states: BTreeMap<Uuid, HashMap<String, Types>> = BTreeMap::new();
for (uuid, (_, state)) in registries {
let state: HashMap<String, Types> = bincode::deserialize(&state).unwrap();
let state = state
.into_par_iter()
.filter(|(_, v)| !v.is_hash())
.map(|(k, v)| (k.to_owned(), v.to_owned()));
let filtered = if let ToSelect::Keys(ref keys) = args_to_select {
state
.into_par_iter()
.filter(|(k, _)| keys.contains(k))
.collect::<HashMap<String, Types>>()
} else {
state.collect::<HashMap<String, Types>>()
};
states.insert(uuid.to_owned(), filtered);
}
Ok(states)
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/controllers/query_test.rs | woori-db/src/controllers/query_test.rs | use crate::{http::routes, schemas::tx::TxResponse};
use actix_http::body::ResponseBody;
use actix_web::{body::Body, test, App};
#[ignore]
#[actix_rt::test]
async fn test_select_all_id_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_select_all_id")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: 12.3,} INTO test_select_all_id")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid = response.uuid.unwrap();
let payload = format!("Select * FROM test_select_all_id ID {}", uuid);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(body.contains("\"a\": Integer(123)"));
assert!(body.contains("\"b\": Float(12.3)"));
let payload = format!(
"UPDATE test_select_all_id SET {{a: 12, c: Nil,}} INTO {}",
uuid
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let payload = format!("Select * FROM test_select_all_id ID {}", uuid);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(body.contains("\"a\": Integer(12)"));
assert!(body.contains("\"b\": Float(12.3)"));
assert!(body.contains("\"c\": Nil"));
}
#[ignore]
#[actix_rt::test]
async fn test_select_args_id_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_select_all_id")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: 12.3, c: 'd', e_f: \"hello\"} INTO test_select_all_id")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid = response.uuid.unwrap();
let payload = format!("Select #{{a, b, e_f,}} FROM test_select_all_id ID {}", uuid);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(body.contains("\"a\": Integer(123)"));
assert!(body.contains("\"b\": Float(12.3)"));
assert!(body.contains("\"e_f\": String(\"hello\")"));
}
#[actix_rt::test]
async fn test_create_on_query_endpoint() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_select_all_id")
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_client_error());
let body = resp.take_body().as_str().to_string();
assert_eq!(
body,
"(\n error_type: \"NonSelectQuery\",\n error_message: \"Non-SELECT expressions are handled by `/wql/tx` endpoint\",\n)"
);
}
#[ignore]
#[actix_rt::test]
async fn test_select_all_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_select_all_id")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 4365, b: 76.3,} INTO test_select_all_id")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 7654, b: 98.4, c: \"hello\",} INTO test_select_all_id")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: 12.3,} INTO test_select_all_id")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("Select * FROM test_select_all_id")
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(body.contains("Integer(7654)"));
assert!(body.contains("Float(98.4)"));
assert!(body.contains("String(\"hello\")"));
assert!(body.contains("Float(76.3)"));
assert!(body.contains("Integer(4365)"));
assert!(body.contains("Float(12.3)"));
assert!(body.contains("Integer(123)"));
}
#[ignore]
#[actix_rt::test]
async fn test_select_keys_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_select_all_id")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 4365, b: 76.3,} INTO test_select_all_id")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 7654, b: 98.4, c: \"hello\",} INTO test_select_all_id")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: 12.3,} INTO test_select_all_id")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("Select #{b,c,} FROM test_select_all_id")
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(!body.contains("Integer(7654)"));
assert!(body.contains("Float(98.4)"));
assert!(body.contains("String(\"hello\")"));
assert!(body.contains("Float(76.3)"));
assert!(!body.contains("Integer(4365)"));
assert!(body.contains("Float(12.3)"));
assert!(!body.contains("Integer(123)"));
}
#[ignore]
#[actix_rt::test]
async fn test_select_all_ids_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_select_all_id")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: 12.3,} INTO test_select_all_id")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid1 = response.uuid.unwrap();
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 456, b: 52.3,} INTO test_select_all_id")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid2 = response.uuid.unwrap();
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 543, b: 32.3,} INTO test_select_all_id")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid3 = response.uuid.unwrap();
let payload = format!(
"Select * FROM test_select_all_id IDS IN #{{ {}, {}, {}, }}",
uuid1, uuid2, uuid3
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(body.contains("\"a\": Integer(123)"));
assert!(body.contains("\"b\": Float(52.3)"));
assert!(body.contains("\"b\": Float(32.3)"));
assert!(body.contains(&uuid1.to_string()));
assert!(body.contains(&uuid2.to_string()));
assert!(body.contains(&uuid3.to_string()));
}
#[ignore]
#[actix_rt::test]
async fn test_select_keys_ids_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_select_all_id")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: 12.3,} INTO test_select_all_id")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid1 = response.uuid.unwrap();
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 456, b: 52.3,} INTO test_select_all_id")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid2 = response.uuid.unwrap();
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 543, b: 32.3,} INTO test_select_all_id")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid3 = response.uuid.unwrap();
let payload = format!(
"Select #{{a,}} FROM test_select_all_id IDS IN #{{ {}, {}, {}, }}",
uuid1, uuid2, uuid3
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(body.contains("\"a\": Integer(123)"));
assert!(!body.contains("\"b\": Float(52.3)"));
assert!(!body.contains("\"b\": Float(32.3)"));
assert!(body.contains(&uuid1.to_string()));
assert!(body.contains(&uuid2.to_string()));
assert!(body.contains(&uuid3.to_string()));
}
#[ignore]
#[actix_rt::test]
async fn test_select_all_without_encrypts_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_select_all_encrypt ENCRYPT #{pswd,}")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(
"INSERT {a: 123, b: 12.3, pswd: \"my-password\",} INTO test_select_all_encrypt",
)
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid = response.uuid.unwrap();
let payload = format!("Select * FROM test_select_all_encrypt ID {}", uuid);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(body.contains("\"a\": Integer(123)"));
assert!(body.contains("\"b\": Float(12.3)"));
assert!(!body.contains("\"pswd\""));
}
#[actix_rt::test]
async fn test_select_when_all_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let payload = format!(
"Select * FROM test_ok WHEN AT {}",
"2021-01-08T12:00:00+03:00"
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(body.contains("\"a\": Integer(123)"));
assert!(body.contains("30d2b740-e791-4ff6-8471-215d38b1ff5c"));
assert!(body.contains("bcab53d9-1ef0-4eb3-9b99-f00259d8725b"));
}
#[actix_rt::test]
async fn test_select_when_args_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let payload = format!(
"Select #{{g,}} FROM test_update WHEN AT {}",
"2021-01-08T12:00:00+03:00"
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(!body.contains("\"a\": Integer(123)"));
assert!(body.contains("\"g\": Nil"));
assert!(body.contains("0a1b16ed-886c-4c99-97c9-0b977778ec13"));
assert!(body.contains("41ede07f-e98b-41dd-9ff2-8dce99af4e96"));
}
#[actix_rt::test]
async fn test_select_when_args_id_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let payload = format!(
"Select #{{g,}} FROM test_update ID 0a1b16ed-886c-4c99-97c9-0b977778ec13 WHEN AT {}",
"2021-01-08T12:00:00+03:00"
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(!body.contains("\"a\": Integer(123)"));
assert!(body.contains("\"g\": Nil"));
assert!(!body.contains("0a1b16ed-886c-4c99-97c9-0b977778ec13"));
assert!(!body.contains("41ede07f-e98b-41dd-9ff2-8dce99af4e96"));
}
#[actix_rt::test]
async fn test_select_when_all_id_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let payload = format!(
"Select * FROM test_update ID 0a1b16ed-886c-4c99-97c9-0b977778ec13 WHEN AT {}",
"2021-01-08T12:00:00+03:00"
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(body.contains("\"a\": Integer(123)"));
assert!(body.contains("\"g\": Nil"));
assert!(!body.contains("0a1b16ed-886c-4c99-97c9-0b977778ec13"));
assert!(!body.contains("41ede07f-e98b-41dd-9ff2-8dce99af4e96"));
}
#[actix_rt::test]
async fn test_select_when_range_all_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let payload = format!(
"Select * FROM test_update ID fb1ccddb-2465-4504-a4a4-e28ee75c7981 WHEN START {} END {}",
"2021-02-09T16:30:00Z", "2021-02-09T17:00:00Z"
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(body.contains("{\n \"2021-02-09T16:44:03.236333Z\":"));
assert!(body.contains("\"f\": String(\"hello\")"));
assert!(body.contains("\"2021-02-09T16:54:06.237774Z\":"));
assert!(body.contains("\"f\": String(\"helloworld\")"));
assert!(body.contains("\"2021-02-09T16:57:06.237774Z\":"));
assert!(body.contains("\"f\": String(\"JULIA\")"));
}
#[ignore]
#[actix_rt::test]
async fn test_check_encrypt_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_check_ok ENCRYPT #{pswd, ssn,}")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, pswd: \"my_password\", ssn: 63432,} INTO test_check_ok")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid = response.uuid.unwrap();
let payload = format!(
"CHECK {{pswd: \"my_password\", ssn: 63434,}} FROM test_check_ok ID {}",
uuid
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
assert!(resp.status().is_success());
assert!(body.contains("\"pswd\": true"));
assert!(body.contains("\"ssn\": false"));
}
trait BodyTest {
fn as_str(&self) -> &str;
}
impl BodyTest for ResponseBody<Body> {
fn as_str(&self) -> &str {
match self {
ResponseBody::Body(ref b) => match b {
Body::Bytes(ref by) => std::str::from_utf8(&by).unwrap(),
_ => panic!(),
},
ResponseBody::Other(ref b) => match b {
Body::Bytes(ref by) => std::str::from_utf8(&by).unwrap(),
_ => panic!(),
},
}
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/controllers/json_history_test.rs | woori-db/src/controllers/json_history_test.rs | use crate::{auth::schemas::UserId, http::routes, schemas::tx::InsertEntityResponse};
use actix_http::body::ResponseBody;
use actix_web::{body::Body, test, App};
use uuid::Uuid;
#[actix_rt::test]
async fn test_history_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_history")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: 12.3,} INTO test_history")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: InsertEntityResponse = serde_json::from_str(&body).unwrap();
let uuid = response.uuid;
let payload = format!("UPDATE test_history SET {{a: 12, c: Nil,}} INTO {}", uuid);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let payload = format!("Delete {} FROM test_history", uuid);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let payload = format!("UPDATE test_history SET {{a: 34, c: true,}} INTO {}", uuid);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let payload = format!("UPDATE test_history SET {{a: 321, c: 'h',}} INTO {}", uuid);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let payload = format!(
"{{\"entity_key\": \"test_history\", \"entity_id\": \"{}\"}}",
uuid
);
let req = test::TestRequest::post()
.set_payload(payload)
.uri("/entity-history")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(body.contains("\"a\":{\"Integer\":123}"));
assert!(body.contains("\"b\":{\"Float\":12.3}"));
assert!(body.contains("\"c\":{\"Boolean\":true}"));
assert!(body.contains("\"c\":{\"Char\":\"h\"}"));
clear();
}
#[ignore]
#[actix_rt::test]
async fn query_and_tx_with_token() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.set_payload("{\"admin_id\": \"your_admin\", \"admin_password\": \"your_password\", \"user_info\": {\"user_password\": \"my_password\",\"role\": [\"User\"]}}")
.uri("/auth/createUser")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
let uuid: UserId = serde_json::from_str(&body).unwrap();
let payload = format!(
"{{\"id\": \"{}\", \"user_password\": \"my_password\"}}",
uuid.user_id
);
let req = test::TestRequest::put()
.set_payload(payload)
.uri("/auth/putUserSession")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let token = resp.take_body().as_str().to_string();
let token = format!("Bearer {}", token);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.header("Authorization", token.clone())
.set_payload("CREATE ENTITY token_test_ok")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.header("Authorization", token.clone())
.set_payload("INSERT {a: 123,} INTO token_test_ok")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.header("Authorization", token.clone())
.set_payload("Select * FROM token_test_ok")
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
println!("{}", body);
assert!(resp.status().is_success());
assert!(body.contains("\"a\": Integer(123)"))
}
trait BodyTest {
fn as_str(&self) -> &str;
}
impl BodyTest for ResponseBody<Body> {
fn as_str(&self) -> &str {
match self {
ResponseBody::Body(ref b) => match b {
Body::Bytes(ref by) => std::str::from_utf8(&by).unwrap(),
_ => panic!(),
},
ResponseBody::Other(ref b) => match b {
Body::Bytes(ref by) => std::str::from_utf8(&by).unwrap(),
_ => panic!(),
},
}
}
}
pub fn clear() {
std::process::Command::new("rm")
.arg("-rf")
.arg("data/*.log")
.output()
.expect("failed to execute process")
.stdout;
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/controllers/relation.rs | woori-db/src/controllers/relation.rs | use std::collections::HashMap;
use wql::{RelationType, ToSelect, Wql};
use crate::{
model::{error::Error, DataExecutor, DataLocalContext},
schemas::query::Response,
};
use super::{
clauses::select_where_controller,
query::{
select_all, select_all_id_when_controller, select_all_with_id, select_all_with_ids,
select_args, select_keys_id_when_controller, select_keys_with_id, select_keys_with_ids,
},
};
const ERROR: &str = "Only single value queries are allowed, so key `ID` is required and keys `WHEN AT` are optional";
const ERROR_JOIN: &str =
"Only multiple values queries are allowed, so key `ID` and `WHEN AT` are not allowed";
pub async fn intersect(
queries: Vec<Wql>,
relation_type: RelationType,
local_data: DataLocalContext,
actor: DataExecutor,
) -> Result<Response, Error> {
let first = queries[0].clone();
let second = queries[1].clone();
let first = get_query_value(first, local_data.clone(), actor.clone()).await?;
let second = get_query_value(second, local_data, actor).await?;
match (first, second, relation_type) {
(Response::Id(f), Response::Id(s), RelationType::Key) => {
let mut state = HashMap::new();
for (k, _) in s {
if f.contains_key(&k) {
let v = f.get(&k).unwrap().to_owned();
state.insert(k, v);
}
}
Ok(Response::Intersect(state))
}
(Response::Id(f), Response::Id(s), RelationType::KeyValue) => {
let mut state = HashMap::new();
for (k, v) in s.clone() {
if f.contains_key(&k) && f.get(&k) == s.get(&k) {
state.insert(k, v);
}
}
Ok(Response::Intersect(state))
}
_ => Err(Error::InvalidQuery),
}
}
pub async fn difference(
queries: Vec<Wql>,
relation_type: RelationType,
local_data: DataLocalContext,
actor: DataExecutor,
) -> Result<Response, Error> {
let first = queries[0].clone();
let second = queries[1].clone();
let first = get_query_value(first, local_data.clone(), actor.clone()).await?;
let second = get_query_value(second, local_data, actor).await?;
match (first, second, relation_type) {
(Response::Id(f), Response::Id(s), RelationType::Key) => {
let mut state = f.clone();
for (k, _) in s {
if f.contains_key(&k) {
state.remove(&k);
}
}
Ok(Response::Difference(state))
}
(Response::Id(f), Response::Id(s), RelationType::KeyValue) => {
let mut state = f.clone();
for (k, _) in s.clone() {
if f.contains_key(&k) && f.get(&k) == s.get(&k) {
state.remove(&k);
}
}
Ok(Response::Difference(state))
}
_ => Err(Error::InvalidQuery),
}
}
pub async fn union(
queries: Vec<Wql>,
relation_type: RelationType,
local_data: DataLocalContext,
actor: DataExecutor,
) -> Result<Response, Error> {
let first = queries[0].clone();
let second = queries[1].clone();
let first = get_query_value(first, local_data.clone(), actor.clone()).await?;
let second = get_query_value(second, local_data, actor).await?;
match (first, second, relation_type) {
(Response::Id(f), Response::Id(s), RelationType::Key) => {
let mut state = f.clone();
for (k, v) in s {
if !f.contains_key(&k) {
state.insert(k, v);
}
}
Ok(Response::Union(state))
}
(Response::Id(f), Response::Id(s), RelationType::KeyValue) => {
let mut state = f.clone();
for (k, v) in s.clone() {
if f.get(&k) != s.get(&k) && f.get(&k).is_some() {
let key = format!("{}:duplicated", k);
state.insert(key, v);
} else if f.get(&k) != s.get(&k) && f.get(&k).is_none() {
let value = s.get(&k).unwrap().to_owned();
state.insert(k, value);
}
}
Ok(Response::Union(state))
}
_ => Err(Error::InvalidQuery),
}
}
pub async fn join(
entity_a: (String, String),
entity_b: (String, String),
queries: Vec<Wql>,
local_data: DataLocalContext,
) -> Result<Response, Error> {
let mut result = Vec::new();
let a = get_join_query_value(queries[0].clone(), local_data.clone()).await?;
let b = get_join_query_value(queries[1].clone(), local_data).await?;
let b_hash = b
.hash(&entity_b.1)
.ok_or_else(|| Error::QueryFormat("Join query not supported".to_string()))?;
let ok = a.parse(entity_a.1, &entity_b, &mut result, b_hash);
if ok {
Ok(Response::Join(result))
} else {
Err(Error::QueryFormat("Join query not supported".to_string()))
}
}
async fn get_query_value(
query: Wql,
local_data: DataLocalContext,
actor: DataExecutor,
) -> Result<Response, Error> {
match query {
Wql::Select(entity, ToSelect::All, Some(uuid), _) => {
select_all_with_id(entity, uuid, local_data).await
}
Wql::Select(entity, ToSelect::Keys(keys), Some(uuid), _) => {
select_keys_with_id(entity, uuid, keys, local_data).await
}
Wql::SelectWhen(entity, ToSelect::All, Some(uuid), date) => {
select_all_id_when_controller(entity, date, uuid, actor).await
}
Wql::SelectWhen(entity, ToSelect::Keys(keys), Some(uuid), date) => {
select_keys_id_when_controller(entity, date, keys, uuid, actor).await
}
_ => Err(Error::QueryFormat(String::from(ERROR))),
}
}
async fn get_join_query_value(query: Wql, local_data: DataLocalContext) -> Result<Response, Error> {
match query {
Wql::Select(entity, ToSelect::All, None, functions) => {
select_all(entity, local_data, functions).await
}
Wql::Select(entity, ToSelect::Keys(keys), None, functions) => {
select_args(entity, keys, local_data, functions).await
}
Wql::SelectIds(entity, ToSelect::All, uuids, functions) => {
select_all_with_ids(entity, uuids, local_data, functions).await
}
Wql::SelectIds(entity, ToSelect::Keys(keys), uuids, functions) => {
select_keys_with_ids(entity, keys, uuids, local_data, functions).await
}
Wql::SelectWhere(entity_name, args_to_select, clauses, functions) => {
select_where_controller(entity_name, args_to_select, clauses, local_data, functions)
.await
}
_ => Err(Error::QueryFormat(String::from(ERROR_JOIN))),
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/controllers/tx.rs | woori-db/src/controllers/tx.rs | use crate::core::tx_time;
use crate::schemas::tx::{TxResponse, TxType};
use crate::{
actors::{
encrypts::{CreateWithEncryption, EncryptContent, WriteWithEncryption},
recovery::{LocalData, OffsetCounter},
state::{MatchUpdate, PreviousRegistry, State},
uniques::{CreateWithUniqueKeys, WriteWithUniqueKeys},
wql::{DeleteId, InsertEntityContent, UpdateContentEntityContent, UpdateSetEntityContent},
},
core::{pretty_config_inner, wql::update_content_state},
model::{
error::error_to_http,
wql::{InsertArgs, MatchUpdateArgs, UpdateArgs},
DataAtomicUsize, DataEncryptContext, DataExecutor, DataLocalContext, DataU32,
DataUniquenessContext,
},
};
use crate::{
actors::{
uniques::CheckForUniqueKeys,
wql::{CreateEntity, EvictEntity, EvictEntityId},
},
schemas::tx::CreateEntityResponse,
};
use crate::{
model::{error::Error, DataRegister},
schemas::tx::InsertEntityResponse,
};
use crate::{
repository::local::LocalContext,
schemas::tx::{DeleteOrEvictEntityResponse, UpdateEntityResponse},
};
use actix_web::{HttpResponse, Responder};
use rayon::prelude::*;
use ron::ser::to_string_pretty;
use std::{
collections::{BTreeMap, HashMap},
str::FromStr,
sync::{atomic::Ordering, Arc, Mutex},
};
use uuid::Uuid;
use wql::{Types, Wql};
pub async fn wql_handler(
body: String,
local_data: DataLocalContext,
uniqueness: DataUniquenessContext,
encryption: DataEncryptContext,
bytes_counter: DataAtomicUsize,
hashing_cost: DataU32,
actor: DataExecutor,
) -> impl Responder {
let query = wql::Wql::from_str(&body);
let response = match query {
Ok(Wql::CreateEntity(entity, uniques, encrypts)) => {
let _ = create_unique_controller(&entity, uniques, uniqueness, &actor).await;
let _ = create_encrypts_controller(&entity, encrypts, encryption, &actor).await;
create_controller(entity, local_data.into_inner(), bytes_counter, actor).await
}
Ok(Wql::Delete(entity, uuid)) => {
delete_controller(entity, uuid, local_data.into_inner(), bytes_counter, actor).await
}
Ok(Wql::Insert(entity, content, uuid)) => {
insert_controller(
InsertArgs::new(entity, content, uuid),
local_data.into_inner(),
bytes_counter,
uniqueness,
encryption,
hashing_cost,
actor,
)
.await
}
Ok(Wql::UpdateContent(entity, content, uuid)) => {
update_content_controller(
UpdateArgs::new(entity, content, uuid),
local_data.into_inner(),
bytes_counter,
uniqueness,
encryption,
actor,
)
.await
}
Ok(Wql::UpdateSet(entity, content, uuid)) => {
update_set_controller(
UpdateArgs::new(entity, content, uuid),
local_data.into_inner(),
bytes_counter,
uniqueness,
encryption,
hashing_cost,
actor,
)
.await
}
Ok(Wql::MatchUpdate(entity, content, uuid, conditions)) => {
match_update_set_controller(
MatchUpdateArgs::new(entity, content, uuid, conditions),
local_data.into_inner(),
bytes_counter,
uniqueness,
encryption,
hashing_cost,
actor,
)
.await
}
Ok(Wql::Evict(entity, uuid)) => {
evict_controller(entity, uuid, local_data.into_inner(), bytes_counter, actor).await
}
Ok(_) => Err(Error::SelectBadRequest),
Err(e) => Err(Error::QueryFormat(e)),
};
match response {
Err(e) => error_to_http(&e),
Ok(resp) => HttpResponse::Ok().body(resp.write()),
}
}
pub async fn create_controller(
entity: String,
local_data: Arc<Arc<Mutex<LocalContext>>>,
bytes_counter: DataAtomicUsize,
actor: DataExecutor,
) -> Result<TxResponse, Error> {
let local_data = {
let mut local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
if local_data.contains_key(&entity) {
return Err(Error::EntityAlreadyCreated(entity));
}
local_data.insert(entity.clone(), BTreeMap::new());
local_data.clone()
};
actor.send(LocalData::new(local_data)).await??;
let message = format!("Entity `{}` created", &entity);
let (offset, is_empty) = actor.send(CreateEntity::new(&entity)).await??;
if is_empty {
bytes_counter.store(0, Ordering::SeqCst);
}
bytes_counter.fetch_add(offset, Ordering::SeqCst);
actor
.send(OffsetCounter::new(bytes_counter.load(Ordering::SeqCst)))
.await??;
Ok(CreateEntityResponse::new(entity, message).into())
}
pub async fn evict_controller(
entity: String,
uuid: Option<Uuid>,
local_data: Arc<Arc<Mutex<LocalContext>>>,
bytes_counter: DataAtomicUsize,
actor: DataExecutor,
) -> Result<TxResponse, Error> {
if uuid.is_none() {
let message = format!("Entity {} evicted", &entity);
let (offset, is_empty) = actor.send(EvictEntity::new(&entity)).await??;
if is_empty {
bytes_counter.store(0, Ordering::SeqCst);
}
bytes_counter.fetch_add(offset, Ordering::SeqCst);
actor
.send(OffsetCounter::new(bytes_counter.load(Ordering::SeqCst)))
.await??;
let local_data = {
let mut local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
local_data.remove(&entity);
local_data.clone()
};
actor.send(LocalData::new(local_data)).await??;
Ok(DeleteOrEvictEntityResponse::new(entity, None, message, TxType::EvictEntityTree).into())
} else {
let id = uuid.unwrap();
let (offset, is_empty) = actor.send(EvictEntityId::new(&entity, id)).await??;
if is_empty {
bytes_counter.store(0, Ordering::SeqCst);
}
bytes_counter.fetch_add(offset, Ordering::SeqCst);
actor
.send(OffsetCounter::new(bytes_counter.load(Ordering::SeqCst)))
.await??;
let local_data = {
let mut local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
if let Some(d) = local_data.get_mut(&entity) {
d.remove(&id);
}
local_data.clone()
};
actor.send(LocalData::new(local_data)).await??;
let message = format!("Entity {} with id {} evicted", &entity, &id);
Ok(DeleteOrEvictEntityResponse::new(entity, uuid, message, TxType::EvictEntity).into())
}
}
pub async fn create_unique_controller(
entity: &str,
uniques: Vec<String>,
uniqueness: DataUniquenessContext,
actor: &DataExecutor,
) -> Result<(), Error> {
if uniques.is_empty() {
Ok(())
} else {
let uniqueness_data = uniqueness.into_inner();
actor
.send(WriteWithUniqueKeys {
entity: entity.to_owned(),
uniques: uniques.clone(),
})
.await??;
actor
.send(CreateWithUniqueKeys {
entity: entity.to_owned(),
uniques,
data: uniqueness_data,
})
.await??;
Ok(())
}
}
pub async fn create_encrypts_controller(
entity: &str,
encrypts: Vec<String>,
encryption: DataEncryptContext,
actor: &DataExecutor,
) -> Result<(), Error> {
if encrypts.is_empty() {
Ok(())
} else {
let local_data = encryption.into_inner();
actor
.send(WriteWithEncryption {
entity: entity.to_owned(),
encrypts: encrypts.clone(),
})
.await??;
actor
.send(CreateWithEncryption {
entity: entity.to_owned(),
encrypts,
data: local_data,
})
.await??;
Ok(())
}
}
pub async fn insert_controller(
args: InsertArgs,
local_data: Arc<Arc<Mutex<LocalContext>>>,
bytes_counter: DataAtomicUsize,
uniqueness: DataUniquenessContext,
encryption: DataEncryptContext,
hashing_cost: DataU32,
actor: DataExecutor,
) -> Result<TxResponse, Error> {
let datetime = tx_time(&args.content)?;
let mut offset = bytes_counter.load(Ordering::SeqCst);
let encrypted_content = actor
.send(EncryptContent::new(
&args.entity,
args.content,
encryption.into_inner(),
*hashing_cost.into_inner(),
datetime,
))
.await??;
let content_log = to_string_pretty(&encrypted_content, pretty_config_inner())
.map_err(Error::Serialization)?;
{
let local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
if !local_data.contains_key(&args.entity) {
return Err(Error::EntityNotCreated(args.entity));
}
}
let uniqueness = uniqueness.into_inner();
actor
.send(CheckForUniqueKeys::new(
args.entity.to_owned(),
&encrypted_content,
uniqueness,
))
.await??;
let content_value = actor
.send(InsertEntityContent::new(
&args.entity,
&content_log,
args.uuid,
datetime,
))
.await??;
if content_value.3 {
bytes_counter.store(0, Ordering::SeqCst);
offset = 0;
}
let local_data_register = DataRegister {
offset,
bytes_length: content_value.2,
file_name: content_value.0.format("data/%Y_%m_%d.log").to_string(),
};
let local_data = {
let mut local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
if let Some(map) = local_data.get_mut(&args.entity) {
let encoded: Vec<u8> = bincode::serialize(&encrypted_content).unwrap();
map.insert(content_value.1, (local_data_register, encoded));
}
local_data.clone()
};
actor.send(LocalData::new(local_data)).await??;
bytes_counter.fetch_add(content_value.2, Ordering::SeqCst);
actor
.send(OffsetCounter::new(bytes_counter.load(Ordering::SeqCst)))
.await??;
let message = format!(
"Entity {} inserted with Uuid {}",
&args.entity, &content_value.1
);
Ok(InsertEntityResponse::new(args.entity, content_value.1, message).into())
}
pub async fn update_set_controller(
args: UpdateArgs,
local_data: Arc<Arc<Mutex<LocalContext>>>,
bytes_counter: DataAtomicUsize,
uniqueness: DataUniquenessContext,
encryption: DataEncryptContext,
hashing_cost: DataU32,
actor: DataExecutor,
) -> Result<TxResponse, Error> {
let datetime = tx_time(&args.content)?;
let mut offset = bytes_counter.load(Ordering::SeqCst);
let encrypted_content = actor
.send(EncryptContent::new(
&args.entity,
args.content,
encryption.into_inner(),
*hashing_cost.into_inner(),
datetime,
))
.await??;
let content_log = to_string_pretty(&encrypted_content, pretty_config_inner())
.map_err(Error::Serialization)?;
{
let local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
if !local_data.contains_key(&args.entity) {
return Err(Error::EntityNotCreated(args.entity));
} else if local_data.contains_key(&args.entity)
&& !local_data.get(&args.entity).unwrap().contains_key(&args.id)
{
return Err(Error::UuidNotCreatedForEntity(args.entity, args.id));
}
}
let uniqueness = uniqueness.into_inner();
actor
.send(CheckForUniqueKeys {
entity: args.entity.to_owned(),
content: encrypted_content.to_owned(),
uniqueness,
})
.await??;
let previous_entry = {
let local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
let previous_entry = local_data.get(&args.entity).unwrap().get(&args.id).unwrap();
previous_entry.clone()
};
let mut previous_state: HashMap<String, Types> =
bincode::deserialize(&previous_entry.1.clone()).unwrap();
let encrypted_content_clone = encrypted_content.clone();
encrypted_content.into_iter().for_each(|(k, v)| {
let local_state = previous_state.entry(k).or_insert_with(|| v.clone());
*local_state = v;
});
let state_log =
to_string_pretty(&previous_state, pretty_config_inner()).map_err(Error::Serialization)?;
let content_value = actor
.send(UpdateSetEntityContent::new(
&args.entity,
&state_log,
&content_log,
args.id,
datetime,
&to_string_pretty(&previous_entry, pretty_config_inner())
.map_err(Error::Serialization)?,
))
.await??;
if content_value.2 {
bytes_counter.store(0, Ordering::SeqCst);
offset = 0;
}
let local_data_register = DataRegister {
offset,
bytes_length: content_value.1,
file_name: content_value.0.format("data/%Y_%m_%d.log").to_string(),
};
let local_data = {
let mut local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
if let Some(map) = local_data.get_mut(&args.entity) {
if let Some(reg) = map.get_mut(&args.id) {
let encoded: Vec<u8> = bincode::serialize(&encrypted_content_clone).unwrap();
*reg = (local_data_register, encoded);
}
}
local_data.clone()
};
actor.send(LocalData::new(local_data)).await??;
bytes_counter.fetch_add(content_value.1, Ordering::SeqCst);
actor
.send(OffsetCounter::new(bytes_counter.load(Ordering::SeqCst)))
.await??;
let message = format!("Entity {} with Uuid {} updated", &args.entity, &args.id);
Ok(
UpdateEntityResponse::new(args.entity, args.id, state_log, message, TxType::UpdateSet)
.into(),
)
}
pub async fn update_content_controller(
args: UpdateArgs,
local_data: Arc<Arc<Mutex<LocalContext>>>,
bytes_counter: DataAtomicUsize,
uniqueness: DataUniquenessContext,
encryption: DataEncryptContext,
actor: DataExecutor,
) -> Result<TxResponse, Error> {
let datetime = tx_time(&args.content)?;
let mut offset = bytes_counter.load(Ordering::SeqCst);
if let Ok(guard) = encryption.lock() {
if guard.contains_key(&args.entity) {
let keys = args
.content
.par_iter()
.filter(|(k, _)| guard.get(&args.entity).unwrap().contains(k.to_owned()))
.map(|(k, _)| k.to_owned())
.collect::<Vec<String>>();
return Err(Error::UpdateContentEncryptKeys(keys));
}
} else {
return Err(Error::LockData);
};
let mut content = args.content;
content.insert("tx_time".to_owned(), Types::DateTime(datetime));
let content_log =
to_string_pretty(&content, pretty_config_inner()).map_err(Error::Serialization)?;
{
let local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
if !local_data.contains_key(&args.entity) {
return Err(Error::EntityNotCreated(args.entity));
} else if local_data.contains_key(&args.entity)
&& !local_data.get(&args.entity).unwrap().contains_key(&args.id)
{
return Err(Error::UuidNotCreatedForEntity(args.entity, args.id));
}
}
let uniqueness = uniqueness.into_inner();
actor
.send(CheckForUniqueKeys {
entity: args.entity.to_owned(),
content: content.to_owned(),
uniqueness,
})
.await??;
let previous_entry = {
let local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
let previous_entry = local_data.get(&args.entity).unwrap().get(&args.id).unwrap();
previous_entry.clone()
};
let mut previous_state: HashMap<String, Types> =
bincode::deserialize(&previous_entry.1.clone()).unwrap();
content
.into_iter()
.for_each(|(k, v)| update_content_state(&mut previous_state, k, v));
let state_log =
to_string_pretty(&previous_state, pretty_config_inner()).map_err(Error::Serialization)?;
let content_value = actor
.send(UpdateContentEntityContent::new(
&args.entity,
&state_log,
&content_log,
args.id,
&to_string_pretty(&previous_entry, pretty_config_inner())
.map_err(Error::Serialization)?,
))
.await??;
if content_value.2 {
bytes_counter.store(0, Ordering::SeqCst);
offset = 0;
}
let local_data_register = DataRegister {
offset,
bytes_length: content_value.1,
file_name: content_value.0.format("data/%Y_%m_%d.log").to_string(),
};
let local_data = {
let mut local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
if let Some(map) = local_data.get_mut(&args.entity) {
if let Some(reg) = map.get_mut(&args.id) {
let encoded: Vec<u8> = bincode::serialize(&previous_state).unwrap();
*reg = (local_data_register, encoded);
}
}
local_data.clone()
};
actor.send(LocalData::new(local_data)).await??;
bytes_counter.fetch_add(content_value.1, Ordering::SeqCst);
actor
.send(OffsetCounter::new(bytes_counter.load(Ordering::SeqCst)))
.await??;
let message = format!("Entity {} with Uuid {} updated", &args.entity, &args.id);
Ok(UpdateEntityResponse::new(
args.entity,
args.id,
state_log,
message,
TxType::UpdateContent,
)
.into())
}
pub async fn delete_controller(
entity: String,
id: String,
local_data: Arc<Arc<Mutex<LocalContext>>>,
bytes_counter: DataAtomicUsize,
actor: DataExecutor,
) -> Result<TxResponse, Error> {
let uuid = Uuid::from_str(&id)?;
let message = format!("Entity {} with Uuid {} deleted", &entity, id);
let mut offset = bytes_counter.load(Ordering::SeqCst);
let previous_entry = {
let local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
if !local_data.contains_key(&entity) {
return Err(Error::EntityNotCreated(entity));
} else if local_data.contains_key(&entity)
&& !local_data.get(&entity).unwrap().contains_key(&uuid)
{
return Err(Error::UuidNotCreatedForEntity(entity, uuid));
}
let previous_entry = local_data.get(&entity).unwrap().get(&uuid).unwrap();
previous_entry.clone().0
};
let previous_state_str = actor.send(previous_entry.to_owned()).await??;
let two_registries_ago = actor.send(PreviousRegistry(previous_state_str)).await??;
let state_to_be = if let Some((reg, _)) = two_registries_ago {
let state_str = actor.send(reg.to_owned()).await??;
(actor.send(State(state_str)).await??, reg.to_owned())
} else {
let local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
let insert_reg = local_data.get(&entity).unwrap().get(&uuid).unwrap();
(HashMap::new(), insert_reg.0.to_owned())
};
let content_log =
to_string_pretty(&state_to_be.0, pretty_config_inner()).map_err(Error::Serialization)?;
let previous_register_log =
to_string_pretty(&state_to_be.1, pretty_config_inner()).map_err(Error::Serialization)?;
let content_value = actor
.send(DeleteId::new(
&entity,
&content_log,
uuid,
&previous_register_log,
))
.await??;
if content_value.2 {
bytes_counter.store(0, Ordering::SeqCst);
offset = 0;
}
let local_data_register = DataRegister {
offset,
bytes_length: content_value.1,
file_name: content_value.0.format("data/%Y_%m_%d.log").to_string(),
};
let local_data = {
let mut local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
if let Some(map) = local_data.get_mut(&entity) {
if let Some(reg) = map.get_mut(&uuid) {
let encoded: Vec<u8> = bincode::serialize(&state_to_be.0).unwrap();
*reg = (local_data_register, encoded);
}
}
local_data.clone()
};
actor.send(LocalData::new(local_data)).await??;
bytes_counter.fetch_add(content_value.1, Ordering::SeqCst);
actor
.send(OffsetCounter::new(bytes_counter.load(Ordering::SeqCst)))
.await??;
Ok(DeleteOrEvictEntityResponse::new(entity, Some(uuid), message, TxType::Delete).into())
}
pub async fn match_update_set_controller(
args: MatchUpdateArgs,
local_data: Arc<Arc<Mutex<LocalContext>>>,
bytes_counter: DataAtomicUsize,
uniqueness: DataUniquenessContext,
encryption: DataEncryptContext,
hashing_cost: DataU32,
actor: DataExecutor,
) -> Result<TxResponse, Error> {
let datetime = tx_time(&args.content)?;
let previous_entry = {
let local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
if !local_data.contains_key(&args.entity) {
return Err(Error::EntityNotCreated(args.entity));
} else if local_data.contains_key(&args.entity)
&& !local_data.get(&args.entity).unwrap().contains_key(&args.id)
{
return Err(Error::UuidNotCreatedForEntity(args.entity, args.id));
}
let previous_entry = local_data.get(&args.entity).unwrap().get(&args.id).unwrap();
previous_entry.clone()
};
let previous_entry: HashMap<String, Types> =
bincode::deserialize(&previous_entry.1.clone()).unwrap();
let mut previous_state = previous_entry.clone();
actor
.send(MatchUpdate {
conditions: args.conditions,
previous_state: previous_entry.clone(),
})
.await??;
let mut offset = bytes_counter.load(Ordering::SeqCst);
let encrypted_content = actor
.send(EncryptContent::new(
&args.entity,
args.content.clone(),
encryption.into_inner(),
*hashing_cost.into_inner(),
datetime,
))
.await??;
let content_log = to_string_pretty(&encrypted_content, pretty_config_inner())
.map_err(Error::Serialization)?;
let uniqueness = uniqueness.into_inner();
actor
.send(CheckForUniqueKeys {
entity: args.entity.to_owned(),
content: args.content.to_owned(),
uniqueness,
})
.await??;
args.content.into_iter().for_each(|(k, v)| {
let local_state = previous_state.entry(k).or_insert_with(|| v.clone());
*local_state = v;
});
let state_log =
to_string_pretty(&previous_state, pretty_config_inner()).map_err(Error::Serialization)?;
let content_value = actor
.send(UpdateSetEntityContent {
name: args.entity.clone(),
current_state: state_log.clone(),
content_log,
id: args.id,
datetime,
previous_registry: to_string_pretty(&previous_entry, pretty_config_inner())
.map_err(Error::Serialization)?,
})
.await??;
if content_value.2 {
bytes_counter.store(0, Ordering::SeqCst);
offset = 0;
}
let local_data_register = DataRegister {
offset,
bytes_length: content_value.1,
file_name: content_value.0.format("data/%Y_%m_%d.log").to_string(),
};
let local_data = {
let mut local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
if let Some(map) = local_data.get_mut(&args.entity) {
if let Some(reg) = map.get_mut(&args.id) {
let encoded: Vec<u8> = bincode::serialize(&encrypted_content).unwrap();
*reg = (local_data_register, encoded);
}
}
local_data.clone()
};
actor.send(LocalData::new(local_data)).await??;
bytes_counter.fetch_add(content_value.1, Ordering::SeqCst);
actor
.send(OffsetCounter::new(bytes_counter.load(Ordering::SeqCst)))
.await??;
let message = format!("Entity {} with Uuid {} updated", &args.entity, &args.id);
Ok(
UpdateEntityResponse::new(args.entity, args.id, state_log, message, TxType::UpdateSet)
.into(),
)
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/controllers/entity_history_test.rs | woori-db/src/controllers/entity_history_test.rs | use crate::{http::routes, schemas::tx::TxResponse};
use actix_http::body::ResponseBody;
use actix_web::{body::Body, test, App};
use chrono::Utc;
use std::{thread, time};
#[actix_rt::test]
async fn test_history_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_history")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: 12.3,} INTO test_history")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid = response.uuid.unwrap();
let payload = format!("UPDATE test_history SET {{a: 12, c: Nil,}} INTO {}", uuid);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let payload = format!("Delete {} FROM test_history", uuid);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let payload = format!("UPDATE test_history SET {{a: 34, c: true,}} INTO {}", uuid);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let payload = format!("UPDATE test_history SET {{a: 321, c: 'h',}} INTO {}", uuid);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let payload = format!("(entity_key: \"test_history\", entity_id: \"{}\",)", uuid);
let req = test::TestRequest::post()
.set_payload(payload)
.uri("/entity-history")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(body.contains("\"a\": Integer(123),"));
assert!(body.contains("\"b\": Float(12.3)"));
assert!(body.contains("\"c\": Boolean(true)"));
assert!(body.contains("\"c\": Char(\'h\')"));
clear();
}
#[actix_rt::test]
async fn test_start_end_time_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let time_now = Utc::now();
let one_sec = time::Duration::from_secs(1);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_history")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: 12.3,} INTO test_history")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid = response.uuid.unwrap();
thread::sleep(one_sec);
let payload = format!("UPDATE test_history SET {{a: 12, c: Nil,}} INTO {}", uuid);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
thread::sleep(one_sec);
let payload = format!("Delete {} FROM test_history", uuid);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
thread::sleep(one_sec);
let payload = format!("UPDATE test_history SET {{a: 34, c: true,}} INTO {}", uuid);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
thread::sleep(one_sec);
let payload = format!("UPDATE test_history SET {{a: 321, c: 'h',}} INTO {}", uuid);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
thread::sleep(one_sec);
let payload = format!("(entity_key: \"test_history\", entity_id: \"{}\", start_datetime: Some(\"{}\"), end_datetime: Some(\"{}\"),)",
uuid,
time_now + chrono::Duration::seconds(3),
time_now + chrono::Duration::seconds(5));
println!("{}", payload);
let req = test::TestRequest::post()
.set_payload(payload)
.uri("/entity-history")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(body.contains("\"c\": Boolean(true)"));
assert!(body.contains("\"c\": Char(\'h\')"));
clear();
}
trait BodyTest {
fn as_str(&self) -> &str;
}
impl BodyTest for ResponseBody<Body> {
fn as_str(&self) -> &str {
match self {
ResponseBody::Body(ref b) => match b {
Body::Bytes(ref by) => std::str::from_utf8(&by).unwrap(),
_ => panic!(),
},
ResponseBody::Other(ref b) => match b {
Body::Bytes(ref by) => std::str::from_utf8(&by).unwrap(),
_ => panic!(),
},
}
}
}
pub fn clear() {
std::process::Command::new("rm")
.arg("-rf")
.arg("data/*.log")
.output()
.expect("failed to execute process")
.stdout;
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/controllers/clauses_test.rs | woori-db/src/controllers/clauses_test.rs | use std::collections::{BTreeMap, HashMap};
use crate::http::routes;
use actix_http::body::ResponseBody;
use actix_web::{body::Body, test, App};
use uuid::Uuid;
use wql::Types;
use super::tx_test::clear;
#[ignore]
#[actix_rt::test]
async fn simple_where_clause_eq() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_simple_where_eq")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 3, b: \"hello world\", c: 45.6,} INTO test_simple_where_eq")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 43, b: \"Julia Naomi\", c: 57.6,} INTO test_simple_where_eq")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 948, b: \"Otavio Pace\", c: 5.6,} INTO test_simple_where_eq")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: \"hello johnny\", c: 4345.6,} INTO test_simple_where_eq")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(
"Select * From test_simple_where_eq WHERE {
?* test_simple_where_eq:a 123,
}",
)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
let result: BTreeMap<Uuid, HashMap<String, Types>> = ron::de::from_str(&body).unwrap();
assert!(result.iter().count() == 1);
if let Some((_, map)) = result.iter().last() {
assert_eq!(map["a"], Types::Integer(123));
} else {
assert!(false);
}
clear();
}
#[ignore]
#[actix_rt::test]
async fn clause_between() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_where_between")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 3, b: \"hello world\", c: 45.6,} INTO test_where_between")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 43, b: \"Julia Naomi\", c: 57.6,} INTO test_where_between")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 948, b: \"Otavio Pace\", c: 5.6,} INTO test_where_between")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: \"hello johnny\", c: 4345.6,} INTO test_where_between")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(
"Select * From test_where_between WHERE {
?* test_where_between:a ?a,
(between ?a 0 100),
}",
)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
let result: BTreeMap<Uuid, HashMap<String, Types>> = ron::de::from_str(&body).unwrap();
assert!(result.iter().count() == 2);
if let Some((_, map)) = result.iter().last() {
assert!(map["a"] <= Types::Integer(100) || map["a"] >= Types::Integer(0));
} else {
assert!(false);
}
clear();
}
#[ignore]
#[actix_rt::test]
async fn clause_in() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_where_in")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 3, b: \"hello world\", c: 45.6,} INTO test_where_in")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 43, b: \"Julia Naomi\", c: 57.6,} INTO test_where_in")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 948, b: \"Otavio Pace\", c: 5.6,} INTO test_where_in")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: \"hello johnny\", c: 4345.6,} INTO test_where_in")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(
"Select * From test_where_in WHERE {
?* test_where_in:c ?c,
(in ?c 57.6 4345.6 5.6),
}",
)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
let result: BTreeMap<Uuid, HashMap<String, Types>> = ron::de::from_str(&body).unwrap();
assert!(result.iter().count() == 3);
if let Some((_, map)) = result.iter().last() {
assert!(
map["c"] == Types::Float(57.6)
|| map["c"] == Types::Float(4345.6)
|| map["c"] == Types::Float(5.6)
);
} else {
assert!(false);
}
clear();
}
#[ignore]
#[actix_rt::test]
async fn clause_ge_le() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_where_in")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 3, b: \"hello world\", c: 45.6,} INTO test_where_in")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 43, b: \"Julia Naomi\", c: 57.6,} INTO test_where_in")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 948, b: \"Otavio Pace\", c: 5.6,} INTO test_where_in")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: \"hello johnny\", c: 4345.6,} INTO test_where_in")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(
"Select * From test_where_in WHERE {
?* test_where_in:c ?c,
(>= ?c 10),
(< ?c 60.0),
}",
)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
println!("{}", body);
let result: BTreeMap<Uuid, HashMap<String, Types>> = ron::de::from_str(&body).unwrap();
assert!(result.iter().count() == 2);
if let Some((_, map)) = result.iter().last() {
assert!(map["c"] == Types::Float(57.6) || map["c"] == Types::Float(45.6));
} else {
assert!(false);
}
clear();
}
#[ignore]
#[actix_rt::test]
async fn clause_or() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_or")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 3, b: \"hello world\", c: 45.6,} INTO test_or")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: \"Julia Naomi\", c: 57.6,} INTO test_or")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: \"Otavio Pace\", c: 5.6,} INTO test_or")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: \"hello johnny\", c: 4345.6,} INTO test_or")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(
"Select * From test_or WHERE {
?* test_or:b ?b,
?* test_or:c ?c,
(or
(>= ?c 4300.0)
(< ?c 6.9)
(like ?b \"%Naomi\")
),
}",
)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
let result: BTreeMap<Uuid, HashMap<String, Types>> = ron::de::from_str(&body).unwrap();
assert!(result.iter().count() == 3);
if let Some((_, map)) = result.iter().last() {
assert!(
map["c"] == Types::Float(5.6)
|| map["c"] == Types::Float(4345.6)
|| map["c"] == Types::Float(57.6)
);
} else {
assert!(false);
}
assert!(
result
.iter()
.filter(|(_, c)| if let Some(Types::String(s)) = c.get("b") {
s.starts_with("Julia")
} else {
false
})
.count()
== 1
);
clear();
}
#[ignore]
#[actix_rt::test]
async fn clause_like() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_like")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 3, b: \"hello world\", c: 45.6,} INTO test_like")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: \"Julia Naomi\", c: 57.6,} INTO test_like")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: \"Otavio Pace\", c: 5.6,} INTO test_like")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: \"hello johnny\", c: 4345.6,} INTO test_like")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(
"Select * From test_like WHERE {
?* test_like:b ?b,
(like ?b \"hello%\"),
}",
)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
let result: BTreeMap<Uuid, HashMap<String, Types>> = ron::de::from_str(&body).unwrap();
assert!(result.iter().count() == 2);
if let Some((_, map)) = result.iter().last() {
assert!(match map["b"].clone() {
Types::String(s) => s.starts_with("hello"),
_ => false,
})
} else {
assert!(false);
}
clear();
}
trait BodyTest {
fn as_str(&self) -> &str;
}
impl BodyTest for ResponseBody<Body> {
fn as_str(&self) -> &str {
match self {
ResponseBody::Body(ref b) => match b {
Body::Bytes(ref by) => std::str::from_utf8(&by).unwrap(),
_ => panic!(),
},
ResponseBody::Other(ref b) => match b {
Body::Bytes(ref by) => std::str::from_utf8(&by).unwrap(),
_ => panic!(),
},
}
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/controllers/mod.rs | woori-db/src/controllers/mod.rs | #[cfg(test)]
pub mod algebra_test;
pub mod clauses;
#[cfg(test)]
pub mod clauses_test;
pub(crate) mod entity_history;
#[cfg(all(test, feature = "history"))]
pub mod entity_history_test;
#[cfg(all(test, feature = "history", feature = "json"))]
pub mod json_history_test;
pub(crate) mod query;
#[cfg(test)]
pub mod query_test;
pub(crate) mod relation;
#[cfg(test)]
pub mod relation_test;
pub(crate) mod tx;
#[cfg(test)]
pub mod tx_test;
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/controllers/query.rs | woori-db/src/controllers/query.rs | use std::{
collections::{BTreeMap, HashMap, HashSet},
str::FromStr,
};
use actix_web::{HttpResponse, Responder};
use rayon::prelude::*;
use uuid::Uuid;
use wql::{ToSelect, Types, Wql};
use crate::{
actors::{
encrypts::VerifyEncryption,
state::State,
when::{ReadEntitiesAt, ReadEntityIdAt, ReadEntityRange},
},
core::query::{
dedup_option_states, dedup_states, filter_keys_and_hash, get_limit_offset_count,
get_result_after_manipulation, get_result_after_manipulation_for_options,
registries_to_states,
},
model::{
error::{error_to_http, Error},
DataEncryptContext, DataExecutor, DataLocalContext, DataRegister,
},
schemas::query::Response as QueryResponse,
};
use super::{
clauses::select_where_controller,
relation::{difference, intersect, join, union},
};
pub async fn wql_handler(
body: String,
local_data: DataLocalContext,
encryption: DataEncryptContext,
actor: DataExecutor,
) -> impl Responder {
let query = Wql::from_str(&body);
let response = match query {
Ok(Wql::Select(entity, ToSelect::All, Some(uuid), _)) => {
select_all_with_id(entity, uuid, local_data).await
}
Ok(Wql::Select(entity, ToSelect::Keys(keys), Some(uuid), _)) => {
select_keys_with_id(entity, uuid, keys, local_data).await
}
Ok(Wql::Select(entity, ToSelect::All, None, functions)) => {
select_all(entity, local_data, functions).await
}
Ok(Wql::Select(entity, ToSelect::Keys(keys), None, functions)) => {
select_args(entity, keys, local_data, functions).await
}
Ok(Wql::SelectIds(entity, ToSelect::All, uuids, functions)) => {
select_all_with_ids(entity, uuids, local_data, functions).await
}
Ok(Wql::SelectIds(entity, ToSelect::Keys(keys), uuids, functions)) => {
select_keys_with_ids(entity, keys, uuids, local_data, functions).await
}
Ok(Wql::SelectWhen(entity, ToSelect::All, None, date)) => {
select_all_when_controller(entity, date, actor).await
}
Ok(Wql::SelectWhen(entity, ToSelect::Keys(keys), None, date)) => {
select_keys_when_controller(entity, date, keys, actor).await
}
Ok(Wql::SelectWhen(entity, ToSelect::All, Some(uuid), date)) => {
select_all_id_when_controller(entity, date, uuid, actor).await
}
Ok(Wql::SelectWhen(entity, ToSelect::Keys(keys), Some(uuid), date)) => {
select_keys_id_when_controller(entity, date, keys, uuid, actor).await
}
Ok(Wql::SelectWhenRange(entity_name, uuid, start_date, end_date)) => {
select_all_when_range_controller(entity_name, uuid, start_date, end_date, actor).await
}
Ok(Wql::SelectWhere(entity_name, args_to_select, clauses, functions)) => {
select_where_controller(entity_name, args_to_select, clauses, local_data, functions)
.await
}
Ok(Wql::CheckValue(entity, uuid, content)) => {
check_value_controller(entity, uuid, content, local_data, encryption, actor).await
}
Ok(Wql::RelationQuery(queries, wql::Relation::Intersect, relation_type)) => {
intersect(queries, relation_type, local_data, actor).await
}
Ok(Wql::RelationQuery(queries, wql::Relation::Difference, relation_type)) => {
difference(queries, relation_type, local_data, actor).await
}
Ok(Wql::RelationQuery(queries, wql::Relation::Union, relation_type)) => {
union(queries, relation_type, local_data, actor).await
}
Ok(Wql::Join(entity_a, entity_b, queries)) => {
join(entity_a, entity_b, queries, local_data).await
}
Ok(_) => Err(Error::NonSelectQuery),
Err(e) => Err(Error::QueryFormat(e)),
};
match response {
Err(e) => error_to_http(&e),
Ok(resp) => match resp.to_string() {
Ok(body) => HttpResponse::Ok().body(body),
Err(e) => error_to_http(&e),
},
}
}
pub async fn check_value_controller(
entity: String,
uuid: Uuid,
content: HashMap<String, String>,
local_data: DataLocalContext,
encryption: DataEncryptContext,
actor: DataExecutor,
) -> Result<QueryResponse, Error> {
if let Ok(guard) = encryption.lock() {
if guard.contains_key(&entity) {
let encrypts = guard.get(&entity).unwrap();
let non_encrypt_keys = content
.par_iter()
.filter(|(k, _)| !encrypts.contains(&(*k).to_string()))
.map(|(k, _)| k.to_owned())
.collect::<Vec<String>>();
if !non_encrypt_keys.is_empty() {
return Err(Error::CheckNonEncryptedKeys(non_encrypt_keys));
}
}
};
let local_data = {
let local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
if !local_data.contains_key(&entity) {
return Err(Error::EntityNotCreated(entity));
}
local_data.clone()
};
let previous_entry = local_data.get(&entity).unwrap().get(&uuid).unwrap();
let previous_state_str = actor.send(previous_entry.0.to_owned()).await??;
let state = actor.send(State(previous_state_str)).await??;
let keys = content
.keys()
.map(ToOwned::to_owned)
.collect::<HashSet<String>>();
let filtered_state: HashMap<String, Types> = state
.into_par_iter()
.filter(|(k, _)| keys.contains(k))
.collect();
let results = actor
.send(VerifyEncryption::new(filtered_state, content))
.await??;
Ok(results)
}
async fn select_all_when_range_controller(
entity: String,
uuid: Uuid,
start_date: String,
end_date: String,
actor: DataExecutor,
) -> Result<QueryResponse, Error> {
use chrono::{DateTime, Utc};
let start_date: DateTime<Utc> = start_date
.parse::<DateTime<Utc>>()
.map_err(Error::DateTimeParse)?;
let end_date: DateTime<Utc> = end_date
.parse::<DateTime<Utc>>()
.map_err(Error::DateTimeParse)?;
#[cfg(test)]
let date_log = start_date.format("data/%Y_%m_%d.txt").to_string();
#[cfg(not(test))]
let date_log = start_date.format("data/%Y_%m_%d.log").to_string();
let result = actor
.send(ReadEntityRange::new(
&entity, uuid, start_date, end_date, date_log,
))
.await??;
Ok(result.into())
}
async fn select_all_when_controller(
entity: String,
date: String,
actor: DataExecutor,
) -> Result<QueryResponse, Error> {
use chrono::{DateTime, Utc};
let date = date
.parse::<DateTime<Utc>>()
.map_err(Error::DateTimeParse)?;
#[cfg(test)]
let date_log = date.format("data/%Y_%m_%d.txt").to_string();
#[cfg(not(test))]
let date_log = date.format("data/%Y_%m_%d.log").to_string();
let result = actor
.send(ReadEntitiesAt::new(&entity, date_log, None))
.await??;
Ok(result.into())
}
pub async fn select_all_id_when_controller(
entity: String,
date: String,
uuid: Uuid,
actor: DataExecutor,
) -> Result<QueryResponse, Error> {
use chrono::{DateTime, Utc};
let date = date
.parse::<DateTime<Utc>>()
.map_err(Error::DateTimeParse)?;
#[cfg(test)]
let date_log = date.format("data/%Y_%m_%d.txt").to_string();
#[cfg(not(test))]
let date_log = date.format("data/%Y_%m_%d.log").to_string();
let result = actor
.send(ReadEntityIdAt::new(&entity, uuid, date_log))
.await??;
let result = filter_keys_and_hash(result, None);
Ok(result.into())
}
pub async fn select_keys_id_when_controller(
entity: String,
date: String,
keys: Vec<String>,
uuid: Uuid,
actor: DataExecutor,
) -> Result<QueryResponse, Error> {
use chrono::{DateTime, Utc};
let keys = keys.into_par_iter().collect::<HashSet<String>>();
let date = date
.parse::<DateTime<Utc>>()
.map_err(Error::DateTimeParse)?;
#[cfg(test)]
let date_log = date.format("data/%Y_%m_%d.txt").to_string();
#[cfg(not(test))]
let date_log = date.format("data/%Y_%m_%d.log").to_string();
let result = actor
.send(ReadEntityIdAt::new(&entity, uuid, date_log))
.await??;
let result = filter_keys_and_hash(result, Some(keys));
Ok(result.into())
}
async fn select_keys_when_controller(
entity: String,
date: String,
keys: Vec<String>,
actor: DataExecutor,
) -> Result<QueryResponse, Error> {
use chrono::{DateTime, Utc};
let keys = keys.into_par_iter().collect::<HashSet<String>>();
let date = date
.parse::<DateTime<Utc>>()
.map_err(Error::DateTimeParse)?;
#[cfg(test)]
let date_log = date.format("data/%Y_%m_%d.txt").to_string();
#[cfg(not(test))]
let date_log = date.format("data/%Y_%m_%d.log").to_string();
let result = actor
.send(ReadEntitiesAt::new(&entity, date_log, Some(keys)))
.await??;
Ok(result.into())
}
pub async fn select_all_with_id(
entity: String,
uuid: Uuid,
local_data: DataLocalContext,
) -> Result<QueryResponse, Error> {
let registry = {
let local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
let registry = if let Some(id_to_registry) = local_data.get(&entity) {
if let Some(reg) = id_to_registry.get(&uuid) {
reg
} else {
return Err(Error::UuidNotCreatedForEntity(entity, uuid));
}
} else {
return Err(Error::EntityNotCreated(entity));
}
.to_owned();
registry
};
let state: HashMap<String, Types> = bincode::deserialize(®istry.1).unwrap();
let filtered_state = filter_keys_and_hash(state, None);
Ok(filtered_state.into())
}
pub async fn select_all_with_ids(
entity: String,
uuids: Vec<Uuid>,
local_data: DataLocalContext,
functions: HashMap<String, wql::Algebra>,
) -> Result<QueryResponse, Error> {
let (limit, offset, count) = get_limit_offset_count(&functions);
let registries = {
let local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
let registries = if let Some(id_to_registry) = local_data.get(&entity) {
uuids
.into_par_iter()
.filter_map(|id| {
Some((
id,
id_to_registry
.get(&id)
.ok_or_else(|| Error::UuidNotCreatedForEntity(entity.clone(), id))
.ok(),
))
.filter(|(_id, reg)| reg.is_some())
})
.map(|(uuid, reg)| (uuid, reg.map(ToOwned::to_owned)))
.collect::<Vec<(Uuid, Option<(DataRegister, Vec<u8>)>)>>()
} else {
return Err(Error::EntityNotCreated(entity));
};
registries
};
let mut states: BTreeMap<Uuid, Option<HashMap<String, Types>>> = BTreeMap::new();
for (uuid, registry) in registries.into_iter().skip(offset).take(limit) {
if let Some((_, state)) = registry {
let state: HashMap<String, Types> = bincode::deserialize(&state).unwrap();
let filtered = filter_keys_and_hash(state, None);
states.insert(uuid, Some(filtered));
} else {
states.insert(uuid, None);
}
}
let states = dedup_option_states(states, &functions);
Ok(get_result_after_manipulation_for_options(
states, &functions, count,
))
}
pub async fn select_keys_with_id(
entity: String,
uuid: Uuid,
keys: Vec<String>,
local_data: DataLocalContext,
) -> Result<QueryResponse, Error> {
let keys = keys.into_par_iter().collect::<HashSet<String>>();
let registry = {
let local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
let registry = if let Some(id_to_registry) = local_data.get(&entity) {
if let Some(reg) = id_to_registry.get(&uuid) {
reg
} else {
return Err(Error::UuidNotCreatedForEntity(entity, uuid));
}
} else {
return Err(Error::EntityNotCreated(entity));
}
.to_owned();
registry
};
let state: HashMap<String, Types> = bincode::deserialize(®istry.1).unwrap();
let filtered = filter_keys_and_hash(state, Some(keys));
Ok(filtered.into())
}
pub async fn select_keys_with_ids(
entity: String,
keys: Vec<String>,
uuids: Vec<Uuid>,
local_data: DataLocalContext,
functions: HashMap<String, wql::Algebra>,
) -> Result<QueryResponse, Error> {
let keys = keys.into_par_iter().collect::<HashSet<String>>();
let (limit, offset, count) = get_limit_offset_count(&functions);
let registries = {
let local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
let registries = if let Some(id_to_registry) = local_data.get(&entity) {
uuids
.into_par_iter()
.filter_map(|id| {
Some((
id,
id_to_registry
.get(&id)
.ok_or_else(|| Error::UuidNotCreatedForEntity(entity.clone(), id))
.ok(),
))
.filter(|(_id, reg)| reg.is_some())
})
.map(|(uuid, reg)| (uuid, reg.map(ToOwned::to_owned)))
.collect::<Vec<(Uuid, Option<(DataRegister, Vec<u8>)>)>>()
} else {
return Err(Error::EntityNotCreated(entity));
};
registries
};
let mut states: BTreeMap<Uuid, Option<HashMap<String, Types>>> = BTreeMap::new();
for (uuid, registry) in registries.into_iter().skip(offset).take(limit) {
if let Some((_, state)) = registry {
let state: HashMap<String, Types> = bincode::deserialize(&state).unwrap();
let filtered = filter_keys_and_hash(state, Some(keys.clone()));
states.insert(uuid, Some(filtered));
} else {
states.insert(uuid, None);
}
}
let states = dedup_option_states(states, &functions);
Ok(get_result_after_manipulation_for_options(
states, &functions, count,
))
}
pub async fn select_all(
entity: String,
local_data: DataLocalContext,
functions: HashMap<String, wql::Algebra>,
) -> Result<QueryResponse, Error> {
let (limit, offset, count) = get_limit_offset_count(&functions);
let registries = {
let local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
let registries = if let Some(id_to_registries) = local_data.get(&entity) {
id_to_registries
} else {
return Err(Error::EntityNotCreated(entity));
}
.to_owned();
registries
};
let states = registries_to_states(registries, None, offset, limit);
let states = dedup_states(states, &functions);
Ok(get_result_after_manipulation(states, &functions, count))
}
pub async fn select_args(
entity: String,
keys: Vec<String>,
local_data: DataLocalContext,
functions: HashMap<String, wql::Algebra>,
) -> Result<QueryResponse, Error> {
let (limit, offset, count) = get_limit_offset_count(&functions);
let keys = keys.into_par_iter().collect::<HashSet<String>>();
let registries = {
let local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
let registries = if let Some(id_to_registries) = local_data.get(&entity) {
id_to_registries
} else {
return Err(Error::EntityNotCreated(entity));
}
.to_owned();
registries
};
let states = registries_to_states(registries, Some(keys), offset, limit);
let states = dedup_states(states, &functions);
Ok(get_result_after_manipulation(states, &functions, count))
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/controllers/entity_history.rs | woori-db/src/controllers/entity_history.rs | use crate::model::error::{error_to_http, Error};
use crate::{
actors::history::History,
core::pretty_config_output,
model::{DataExecutor, DataLocalContext},
schemas::history::EntityHistoryInfo,
};
use actix_web::{HttpResponse, Responder};
use chrono::{DateTime, Utc};
use rayon::prelude::*;
use std::collections::{BTreeMap, HashMap};
use wql::Types;
pub async fn history_handler(
body: String,
local_data: DataLocalContext,
actor: DataExecutor,
) -> impl Responder {
let response = history_controller(body, local_data, actor).await;
match response {
Err(e) => error_to_http(&e),
Ok(resp) => {
#[cfg(feature = "json")]
let response = serde_json::to_string(&resp).map_err(|e| Error::SerdeJson(e));
#[cfg(not(feature = "json"))]
let response = ron::ser::to_string_pretty(&resp, pretty_config_output())
.map_err(|e| Error::Ron(e));
match response {
Err(e) => error_to_http(&e),
Ok(resp) => HttpResponse::Ok().body(resp),
}
}
}
}
pub async fn history_controller(
body: String,
local_data: DataLocalContext,
actor: DataExecutor,
) -> Result<BTreeMap<chrono::DateTime<Utc>, HashMap<std::string::String, Types>>, Error> {
#[cfg(feature = "json")]
let info: EntityHistoryInfo = serde_json::from_str(&body)?;
#[cfg(not(feature = "json"))]
let info: EntityHistoryInfo = ron::de::from_str(&body).map_err(|e| Error::Ron(e.into()))?;
let registry = {
let local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
let registry = if let Some(id_to_registry) = local_data.get(&info.entity_key) {
if let Some(reg) = id_to_registry.get(&info.entity_id) {
reg
} else {
return Err(Error::UuidNotCreatedForEntity(
info.entity_key,
info.entity_id,
));
}
} else {
return Err(Error::EntityNotCreated(info.entity_key));
}
.to_owned();
registry
};
let mut content = actor.send(registry.0).await??;
let mut btree: BTreeMap<DateTime<Utc>, HashMap<String, Types>> = BTreeMap::new();
loop {
let (entity_map, date, previous_registry) = actor.send(History(content.clone())).await??;
if let (Some(start), Some(end)) = (info.start_datetime, info.end_datetime) {
if date >= start && date <= end {
btree.insert(date, entity_map);
} else if date > end {
break;
}
} else if let (Some(start), None) = (info.start_datetime, info.end_datetime) {
if date >= start {
btree.insert(date, entity_map);
}
} else if let (None, Some(end)) = (info.start_datetime, info.end_datetime) {
if date <= end {
btree.insert(date, entity_map);
} else if date > end {
break;
}
} else {
btree.insert(date, entity_map);
}
if previous_registry.is_none() {
break;
}
content = actor.send(previous_registry.unwrap()).await??;
}
let filtered_tree = btree
.into_par_iter()
.map(|(date, content)| {
(
date,
content
.into_iter()
.filter(|(_, v)| !v.is_hash())
.collect::<HashMap<String, Types>>(),
)
})
.collect::<BTreeMap<DateTime<Utc>, HashMap<String, Types>>>();
Ok(filtered_tree)
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/controllers/algebra_test.rs | woori-db/src/controllers/algebra_test.rs | use std::collections::BTreeMap;
use crate::http::routes;
use actix_http::{body::ResponseBody, Request};
use actix_web::{body::Body, test, App};
use std::collections::HashMap;
use uuid::Uuid;
use wql::Types;
#[ignore]
#[actix_rt::test]
async fn test_select_all_limit_offset_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
for req in inserts("limit_offset") {
let _ = test::call_service(&mut app, req).await;
}
let payload = format!("Select * FROM limit_offset LIMIT 3 OFFSET 2",);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
let _: Result<BTreeMap<Uuid, HashMap<String, Types>>, String> = match ron::de::from_str(&body) {
Ok(s) => {
let s: BTreeMap<Uuid, HashMap<String, Types>> = s;
assert_eq!(s.len(), 3);
Ok(s)
}
Err(e) => {
println!("{:?}", e);
assert!(false);
Err(String::new())
}
};
}
#[ignore]
#[actix_rt::test]
async fn test_select_all_limit_count_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
for req in inserts("limit_offset_count") {
let _ = test::call_service(&mut app, req).await;
}
let payload = format!("Select * FROM limit_offset_count LIMIT 3 OFFSET 2 COUNT",);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(body.contains("count: 3"));
}
#[ignore]
#[actix_rt::test]
async fn test_select_all_count_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
for req in inserts("select_count") {
let _ = test::call_service(&mut app, req).await;
}
let payload = format!("Select * FROM select_count COUNT",);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(body.contains("count: 6"));
}
#[ignore]
#[actix_rt::test]
async fn test_select_all_dedup_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
for req in inserts("dedup_test") {
let _ = test::call_service(&mut app, req).await;
}
let payload = format!("Select * FROM dedup_test DEDUP a",);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
let _: Result<BTreeMap<Uuid, HashMap<String, Types>>, String> = match ron::de::from_str(&body) {
Ok(s) => {
let s: BTreeMap<Uuid, HashMap<String, Types>> = s;
assert_eq!(s.len(), 5);
Ok(s)
}
Err(e) => {
println!("{:?}", e);
assert!(false);
Err(String::new())
}
};
}
#[ignore]
#[actix_rt::test]
async fn test_select_all_dedup_nil_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
for req in inserts("dedup_nil_test") {
let _ = test::call_service(&mut app, req).await;
}
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!(
"INSERT {{a: 235, b: 12.3, c: Nil,}} INTO {}",
"dedup_nil_test"
))
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let payload = format!("Select * FROM dedup_nil_test DEDUP NIL(c)",);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
let _: Result<BTreeMap<Uuid, HashMap<String, Types>>, String> = match ron::de::from_str(&body) {
Ok(s) => {
let s: BTreeMap<Uuid, HashMap<String, Types>> = s;
assert_eq!(s.len(), 3);
Ok(s)
}
Err(e) => {
println!("{:?}", e);
assert!(false);
Err(String::new())
}
};
assert!(!body.contains("\"a\": Integer(123)"));
}
#[ignore]
#[actix_rt::test]
async fn test_select_all_dedup_count_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
for req in inserts("dedup_test_count") {
let _ = test::call_service(&mut app, req).await;
}
let payload = format!("Select * FROM dedup_test_count DEDUP a COUNT",);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(body.contains("count: 5"));
}
#[ignore]
#[actix_rt::test]
async fn test_select_all_group_by_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
for req in inserts("group_by_test") {
let _ = test::call_service(&mut app, req).await;
}
let payload = format!("Select * FROM group_by_test GROUP BY c",);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
let _: Result<HashMap<String, BTreeMap<Uuid, HashMap<String, Types>>>, String> =
match ron::de::from_str(&body) {
Ok(s) => {
let s: HashMap<String, BTreeMap<Uuid, HashMap<String, Types>>> = s;
let keys = s.keys().map(|k| k.to_owned()).collect::<Vec<String>>();
assert!(keys.contains(&String::from("Char(\'r\')")));
assert!(keys.contains(&String::from("Char(\'d\')")));
assert!(keys.contains(&String::from("Char(\'c\')")));
assert!(keys.contains(&String::from("Nil")));
Ok(s)
}
Err(e) => {
println!("{:?}", e);
assert!(false);
Err(String::new())
}
};
}
#[ignore]
#[actix_rt::test]
async fn test_select_all_group_by_count_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
for req in inserts("group_by_count") {
let _ = test::call_service(&mut app, req).await;
}
let payload = format!("Select * FROM group_by_count GROUP BY c COUNT",);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(body.contains("count: 4"));
}
#[ignore]
#[actix_rt::test]
async fn test_select_where_group_by_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
for req in inserts("where_group_by_test") {
let _ = test::call_service(&mut app, req).await;
}
let payload = format!(
"Select * FROM where_group_by_test WHERE {{
?* where_group_by_test:c ?c,
(in ?c 'c' 'd'),
}} GROUP BY c",
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
let _: Result<HashMap<String, BTreeMap<Uuid, HashMap<String, Types>>>, String> =
match ron::de::from_str(&body) {
Ok(s) => {
let s: HashMap<String, BTreeMap<Uuid, HashMap<String, Types>>> = s;
let keys = s.keys().map(|k| k.to_owned()).collect::<Vec<String>>();
assert!(!keys.contains(&String::from("Char(\'r\')")));
assert!(keys.contains(&String::from("Char(\'d\')")));
assert!(keys.contains(&String::from("Char(\'c\')")));
assert!(!keys.contains(&String::from("Nil")));
Ok(s)
}
Err(e) => {
println!("{:?}", e);
assert!(false);
Err(String::new())
}
};
}
#[ignore]
#[actix_rt::test]
async fn test_select_all_group_by_with_order_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
for req in inserts("group_by_with_order") {
let _ = test::call_service(&mut app, req).await;
}
let payload = format!("Select * FROM group_by_with_order GROUP BY c ORDER BY a :desc",);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
let _: Result<HashMap<String, Vec<(Uuid, HashMap<String, Types>)>>, String> =
match ron::de::from_str(&body) {
Ok(s) => {
let s: HashMap<String, Vec<(Uuid, HashMap<String, Types>)>> = s;
let c = s.get("Char(\'c\')").unwrap();
let c1 = c[0].1.to_owned();
let c2 = c[1].1.to_owned();
assert_eq!(c1.get("a"), Some(&Types::Integer(235)));
assert_eq!(c2.get("a"), Some(&Types::Integer(25)));
Ok(s)
}
Err(e) => {
println!("{:?}", e);
assert!(false);
Err(String::new())
}
};
}
#[ignore]
#[actix_rt::test]
async fn test_select_all_order_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
for req in inserts("order_by_test") {
let _ = test::call_service(&mut app, req).await;
}
let payload = format!("Select * FROM order_by_test ORDER BY a :asc",);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
let _: Result<Vec<(Uuid, HashMap<String, Types>)>, String> = match ron::de::from_str(&body) {
Ok(s) => {
let s: Vec<(Uuid, HashMap<String, Types>)> = s;
assert_eq!(s.first().unwrap().1.get("a").unwrap(), &Types::Integer(25));
assert_eq!(s.last().unwrap().1.get("a").unwrap(), &Types::Integer(475));
Ok(s)
}
Err(e) => {
println!("{:?}", e);
assert!(false);
Err(String::new())
}
};
}
fn inserts(entity_name: &str) -> Vec<Request> {
vec![
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!("CREATE ENTITY {}", entity_name))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!("INSERT {{a: 123, b: 12.3,}} INTO {}", entity_name))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!(
"INSERT {{a: 235, b: 12.3, c: 'c',}} INTO {}",
entity_name
))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!(
"INSERT {{a: 235, b: 12.3, c: 'd',}} INTO {}",
entity_name
))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!(
"INSERT {{a: 25, b: 12.3, c: 'c',}} INTO {}",
entity_name
))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!(
"INSERT {{a: 475, b: 12.3, c: 'd',}} INTO {}",
entity_name
))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!(
"INSERT {{a: 295, b: 12.3, c: 'r',}} INTO {}",
entity_name
))
.uri("/wql/tx")
.to_request(),
]
}
trait BodyTest {
fn as_str(&self) -> &str;
}
impl BodyTest for ResponseBody<Body> {
fn as_str(&self) -> &str {
match self {
ResponseBody::Body(ref b) => match b {
Body::Bytes(ref by) => std::str::from_utf8(&by).unwrap(),
_ => panic!(),
},
ResponseBody::Other(ref b) => match b {
Body::Bytes(ref by) => std::str::from_utf8(&by).unwrap(),
_ => panic!(),
},
}
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/controllers/relation_test.rs | woori-db/src/controllers/relation_test.rs | use crate::http::routes;
use actix_http::{body::ResponseBody, Request};
use actix_web::{body::Body, test, App};
use uuid::Uuid;
#[actix_rt::test]
async fn test_intersect_key() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY intersect_ok1")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY intersect_ok2")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let uuid1 = Uuid::new_v4().to_string();
let payload = format!(
"INSERT {{a: 123, b: 234, c: true,}} INTO intersect_ok1 WITH {}",
uuid1
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let uuid2 = Uuid::new_v4().to_string();
let payload = format!(
"INSERT {{a: 123, b: 432, d: false,}} INTO intersect_ok2 WITH {}",
uuid2
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let payload = format!(
"INTERSECT KEY Select * FROM intersect_ok1 ID {} | Select * FROM intersect_ok2 ID {}",
uuid1, uuid2
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
assert!(body.contains("\"b\": Integer(234)"));
assert!(body.contains("\"a\": Integer(123)"));
assert!(!body.contains("\"c\":"));
clear();
}
#[actix_rt::test]
async fn test_intersect_key_value() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY intersect_ok1")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY intersect_ok2")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let uuid1 = Uuid::new_v4().to_string();
let payload = format!(
"INSERT {{a: 123, b: 234, c: true,}} INTO intersect_ok1 WITH {}",
uuid1
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let uuid2 = Uuid::new_v4().to_string();
let payload = format!(
"INSERT {{a: 123, b: 432, d: false,}} INTO intersect_ok2 WITH {}",
uuid2
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let payload = format!(
"INTERSECT KEY-VALUE Select * FROM intersect_ok1 ID {} | Select * FROM intersect_ok2 ID {}",
uuid1, uuid2
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
assert!(!body.contains("\"b\": Integer(234)"));
assert!(body.contains("\"a\": Integer(123)"));
assert!(!body.contains("\"c\":"));
clear();
}
#[actix_rt::test]
async fn test_diff_key() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY intersect_ok1")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY intersect_ok2")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let uuid1 = Uuid::new_v4().to_string();
let payload = format!(
"INSERT {{a: 123, b: 234, c: true,}} INTO intersect_ok1 WITH {}",
uuid1
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let uuid2 = Uuid::new_v4().to_string();
let payload = format!(
"INSERT {{a: 123, b: 432, d: false,}} INTO intersect_ok2 WITH {}",
uuid2
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let payload = format!(
"DIFFERENCE KEY Select * FROM intersect_ok1 ID {} | Select * FROM intersect_ok2 ID {}",
uuid1, uuid2
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
assert!(!body.contains("\"b\": Integer(234)"));
assert!(!body.contains("\"a\": Integer(123)"));
assert!(body.contains("\"c\": Boolean(true)"));
clear();
}
#[actix_rt::test]
async fn test_diff_key_value() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY intersect_ok1")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY intersect_ok2")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let uuid1 = Uuid::new_v4().to_string();
let payload = format!(
"INSERT {{a: 123, b: 234, c: true,}} INTO intersect_ok1 WITH {}",
uuid1
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let uuid2 = Uuid::new_v4().to_string();
let payload = format!(
"INSERT {{a: 123, b: 432, d: false,}} INTO intersect_ok2 WITH {}",
uuid2
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let payload = format!(
"DIFFERENCE KEY-VALUE Select * FROM intersect_ok1 ID {} | Select * FROM intersect_ok2 ID {}",
uuid1, uuid2
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
assert!(body.contains("\"b\": Integer(234)"));
assert!(!body.contains("\"a\": Integer(123)"));
assert!(body.contains("\"c\": Boolean(true)"));
clear();
}
#[actix_rt::test]
async fn test_union_key() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY intersect_ok1")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY intersect_ok2")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let uuid1 = Uuid::new_v4().to_string();
let payload = format!(
"INSERT {{a: 123, b: 234, c: true,}} INTO intersect_ok1 WITH {}",
uuid1
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let uuid2 = Uuid::new_v4().to_string();
let payload = format!(
"INSERT {{a: 123, b: 432, d: false,}} INTO intersect_ok2 WITH {}",
uuid2
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let payload = format!(
"UNION KEY Select * FROM intersect_ok1 ID {} | Select * FROM intersect_ok2 ID {}",
uuid1, uuid2
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
assert!(body.contains("\"b\": Integer(234)"));
assert!(body.contains("\"a\": Integer(123)"));
assert!(body.contains("\"c\": Boolean(true)"));
assert!(body.contains("\"d\": Boolean(false)"));
clear();
}
#[actix_rt::test]
async fn test_union_key_value() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY intersect_ok1")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY intersect_ok2")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let uuid1 = Uuid::new_v4().to_string();
let payload = format!(
"INSERT {{a: 123, b: 234, c: true,}} INTO intersect_ok1 WITH {}",
uuid1
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let uuid2 = Uuid::new_v4().to_string();
let payload = format!(
"INSERT {{a: 123, b: 432, d: false,}} INTO intersect_ok2 WITH {}",
uuid2
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let payload = format!(
"UNION KEY-VALUE Select * FROM intersect_ok1 ID {} | Select * FROM intersect_ok2 ID {}",
uuid1, uuid2
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
assert!(body.contains("\"b\": Integer(234)"));
assert!(body.contains("\"a\": Integer(123)"));
assert!(body.contains("\"c\": Boolean(true)"));
assert!(body.contains("\"d\": Boolean(false)"));
assert!(body.contains("\"b:duplicated\": Integer(432)"));
clear();
}
#[ignore]
#[actix_rt::test]
async fn test_join() {
let mut app = test::init_service(App::new().configure(routes)).await;
for req in inserts() {
let _ = test::call_service(&mut app, req).await;
}
let payload =
format!("JOIN (entity_A:c, entity_B:c) Select * FROM entity_A | Select * FROM entity_B");
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(body.contains("\"a:entity_B\": Integer(25),"));
assert!(body.contains("\"b:entity_B\": Float(12.3),"));
assert!(body.contains("\"c\": Char(\'c\'),"));
assert!(body.contains("\"b\": Float(12.3)"));
assert!(body.contains("\"a\": Integer(235)"));
assert!(!body.contains("\"c:entity_B\""));
assert!(!body.contains("\"tx_time:entity_B\""));
}
#[ignore]
#[actix_rt::test]
async fn test_join2() {
let mut app = test::init_service(App::new().configure(routes)).await;
for req in inserts2() {
let _ = test::call_service(&mut app, req).await;
}
let payload =
format!("JOIN (entity_AA:c, entity_BB:o) Select * FROM entity_AA order by c :asc | Select #{{g, f, o, b,}} FROM entity_BB ");
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/query")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body().as_str().to_string();
assert!(body.contains("b:entity_BB"));
assert!(!body.contains("g:entity_BB"));
assert!(body.contains("\"g\": Integer(475)"));
assert_eq!(body.matches("Char(\'d\')").count(), 4);
}
fn inserts() -> Vec<Request> {
vec![
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!("CREATE ENTITY {}", "entity_A"))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!("CREATE ENTITY {}", "entity_B"))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!("INSERT {{a: 123, b: 12.3,}} INTO {}", "entity_A"))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!(
"INSERT {{a: 235, b: 12.3, c: 'c',}} INTO {}",
"entity_A"
))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!(
"INSERT {{a: 235, b: 12.3, c: 'd',}} INTO {}",
"entity_A"
))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!(
"INSERT {{a: 25, b: 12.3, c: 'c',}} INTO {}",
"entity_B"
))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!(
"INSERT {{a: 475, b: 12.3, c: 'd',}} INTO {}",
"entity_B"
))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!(
"INSERT {{a: 295, b: 12.3, c: 'r',}} INTO {}",
"entity_B"
))
.uri("/wql/tx")
.to_request(),
]
}
fn inserts2() -> Vec<Request> {
vec![
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!("CREATE ENTITY {}", "entity_AA"))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!("CREATE ENTITY {}", "entity_BB"))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!("INSERT {{a: 123, b: 12.3,}} INTO {}", "entity_AA"))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!(
"INSERT {{a: 235, b: 17.3, c: 'c',}} INTO {}",
"entity_AA"
))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!(
"INSERT {{a: 476, b: 312.3, c: 'd',}} INTO {}",
"entity_AA"
))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!("INSERT {{a: 857, c: 'd',}} INTO {}", "entity_AA"))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!(
"INSERT {{a: 66, b: 66.3, c: 'r',}} INTO {}",
"entity_BB"
))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!(
"INSERT {{g: 25, f: 12.3, a: 'c',}} INTO {}",
"entity_BB"
))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!(
"INSERT {{g: 475, b: 12.3, f: 'h', o: 'd',}} INTO {}",
"entity_BB"
))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!(
"INSERT {{g: 756, b: 142.3, f: 'h', o: 'c',}} INTO {}",
"entity_BB"
))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!(
"INSERT {{g: 76, b: 12.3, f: 't', o: 'd',}} INTO {}",
"entity_BB"
))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!(
"INSERT {{t: 295, b: 12.3, o: 'r',}} INTO {}",
"entity_BB"
))
.uri("/wql/tx")
.to_request(),
test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(format!(
"INSERT {{t: 295, f: 12.3, o: Nil,}} INTO {}",
"entity_BB"
))
.uri("/wql/tx")
.to_request(),
]
}
trait BodyTest {
fn as_str(&self) -> &str;
}
impl BodyTest for ResponseBody<Body> {
fn as_str(&self) -> &str {
match self {
ResponseBody::Body(ref b) => match b {
Body::Bytes(ref by) => std::str::from_utf8(&by).unwrap(),
_ => panic!(),
},
ResponseBody::Other(ref b) => match b {
Body::Bytes(ref by) => std::str::from_utf8(&by).unwrap(),
_ => panic!(),
},
}
}
}
pub fn clear() {
std::process::Command::new("rm")
.arg("-rf")
.arg("data/*.log")
.output()
.expect("failed to execute process")
.stdout;
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/controllers/tx_test.rs | woori-db/src/controllers/tx_test.rs | use crate::io::read;
use crate::{http::routes, schemas::tx::TxResponse};
use actix_http::body::ResponseBody;
use actix_web::{body::Body, test, App};
use uuid::Uuid;
#[actix_rt::test]
async fn test_create_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY crete_test_ok")
.uri("/wql/tx")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body();
let body = body.as_ref().unwrap();
assert_eq!(
&Body::from(
"(\n tx_type: Create,\n entity: \"crete_test_ok\",\n uuid: None,\n state: \"\",\n message: \"Entity `crete_test_ok` created\",\n)"
),
body
);
read::assert_content("CREATE_ENTITY|crete_test_ok;");
clear();
}
#[actix_rt::test]
async fn test_select_post_err() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("SELECT * FROM test_ok")
.uri("/wql/tx")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_client_error());
let body = resp.take_body();
let body = body.as_ref().unwrap();
assert_eq!(
&Body::from("(\n error_type: \"SelectBadRequest\",\n error_message: \"SELECT expressions are handled by `/wql/query` endpoint\",\n)"),
body
);
clear();
}
#[actix_rt::test]
async fn test_create_uniques_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_uniques UNIQUES #{name, ssn, id,}")
.uri("/wql/tx")
.to_request();
let resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
read::assert_content("CREATE_ENTITY|test_uniques;");
read::assert_uniques("test_uniques");
read::assert_uniques("uniques: [\"name\",\"ssn\",\"id\",]");
clear();
}
#[actix_rt::test]
async fn test_create_encrypts_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_encrypt ENCRYPT #{name, ssn, id,}")
.uri("/wql/tx")
.to_request();
let resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
read::assert_content("CREATE_ENTITY|test_encrypt;");
read::assert_encrypt("test_encrypt");
read::assert_encrypt("encrypts: [\"name\",\"ssn\",\"id\",]");
clear();
}
#[actix_rt::test]
async fn test_create_post_duplicated_err() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_ok")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let duplicated_req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_ok")
.uri("/wql/tx")
.to_request();
let mut resp = test::call_service(&mut app, duplicated_req).await;
assert!(resp.status().is_client_error());
let body = resp.take_body();
let body = body.as_ref().unwrap();
assert_eq!(&Body::from("(\n error_type: \"EntityAlreadyCreated\",\n error_message: \"Entity `test_ok` already created\",\n)"), body);
clear();
}
#[actix_rt::test]
async fn test_create_post_bad_request() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "text/plain")
.set_payload("CREATE ENTITY test_ok")
.uri("/wql/tx")
.to_request();
let resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_client_error());
clear();
}
#[actix_rt::test]
async fn test_unkwon_wql_post() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("DO SOMETHIG weird")
.uri("/wql/tx")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_client_error());
let body = resp.take_body();
let body = body.as_ref().unwrap();
assert_eq!(
&Body::from("(\n error_type: \"QueryFormat\",\n error_message: \"\\\"Symbol `DO` not implemented\\\"\",\n)"),
body);
clear();
}
#[actix_rt::test]
async fn test_insert_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_ok")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123,} INTO test_ok")
.uri("/wql/tx")
.to_request();
let resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
read::assert_content("INSERT|");
read::assert_content("|test_ok|");
read::assert_content("\"a\": Integer(123)");
read::assert_content("\"tx_time\":");
clear();
}
#[actix_rt::test]
async fn test_insert_with_id_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_ok_with_id")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let uuid = Uuid::new_v4().to_string();
let payload = format!("INSERT {{a: 123,}} INTO test_ok_with_id WITH {}", uuid);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
read::assert_content("INSERT|");
read::assert_content(&uuid);
read::assert_content("|test_ok_with_id|");
read::assert_content("\"tx_time\":");
read::assert_content("\"a\": Integer(123)");
clear();
}
#[actix_rt::test]
async fn test_insert_with_tx_time_err() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_insert_err")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let uuid = Uuid::new_v4().to_string();
let payload = format!(
"INSERT {{a: 123, tx_time: 4}} INTO test_insert_err WITH {}",
uuid
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_client_error());
clear();
}
#[actix_rt::test]
async fn test_insert_unique_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_insert_unique UNIQUES #{id,}")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {id: 123, a: \"hello\",} INTO test_insert_unique")
.uri("/wql/tx")
.to_request();
let resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {id: 123, a: \"world\",} INTO test_insert_unique")
.uri("/wql/tx")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_client_error());
let body = resp.take_body();
let body = body.as_ref().unwrap();
assert_eq!(
&Body::from(
"(\n error_type: \"DuplicatedUnique\",\n error_message: \"key `id` in entity `test_insert_unique` already contains value `Integer(123)`\",\n)"
),
body
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {id: 234, a: \"hello\",} INTO test_insert_unique")
.uri("/wql/tx")
.to_request();
let resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
clear();
}
#[actix_rt::test]
async fn test_insert_entity_not_created() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123,} INTO missing")
.uri("/wql/tx")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_client_error());
let body = resp.take_body();
let body = body.as_ref().unwrap();
assert_eq!(&Body::from("(\n error_type: \"EntityNotCreated\",\n error_message: \"Entity `missing` not created\",\n)"), body);
clear();
}
#[ignore]
#[actix_rt::test]
async fn test_update_set_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_update")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: 12.3,} INTO test_update")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid = response.uuid;
let payload = format!(
"UPDATE test_update SET {{a: 12, c: Nil,}} INTO {}",
uuid.unwrap()
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
assert!(resp.status().is_success());
assert!(body.contains("entity: \"test_update\""));
read::assert_content("UPDATE_SET|");
read::assert_content(&uuid.unwrap().to_string());
read::assert_content("|test_update|");
read::assert_content("\"a\": Integer(12),");
read::assert_content("\"b\": Float(12.3),");
read::assert_content("\"c\": Nil,");
read::assert_content("\"tx_time\":");
clear();
}
#[ignore]
#[actix_rt::test]
async fn test_update_uniqueness_set_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_unique_set_update UNIQUES #{a,}")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: 12.3,} INTO test_unique_set_update")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 321, b: 12.3,} INTO test_unique_set_update")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid = response.uuid;
let payload = format!(
"UPDATE test_unique_set_update SET {{a: 123, c: Nil,}} INTO {}",
uuid.unwrap()
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_client_error());
let body = resp.take_body();
let body = body.as_ref().unwrap();
assert_eq!(
&Body::from(
"(\n error_type: \"DuplicatedUnique\",\n error_message: \"key `a` in entity `test_unique_set_update` already contains value `Integer(123)`\",\n)"
),
body
);
clear();
}
#[ignore]
#[actix_rt::test]
async fn test_update_content_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_update")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(
"INSERT {
a: 123,
b: 12.3,
c: 'd' ,
d: true ,
e: 43.21,
f: \"hello\",
g: NiL,
h: 7,}
INTO test_update",
)
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid = response.uuid;
let payload = format!(
"UPDATE test_update CONTENT {{
a: 12,
b: -1.3,
c: 'd' ,
d: false ,
e: 4,
f: \"world\",
g: true,
h: 3.6,}} INTO {}",
uuid.unwrap()
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
read::assert_content("UPDATE_CONTENT|");
read::assert_content(&uuid.unwrap().to_string());
read::assert_content("|test_update|");
read::assert_content("\"a\": Integer(135),");
read::assert_content("\"c\": Char('d'),");
read::assert_content("\"d\": Boolean(false),");
read::assert_content("\"e\": Float(47.21)");
read::assert_content("\"f\": String(\"helloworld\"),");
read::assert_content("\"g\": Boolean(true),");
read::assert_content("\"h\": Float(10.6)");
clear();
}
#[actix_rt::test]
async fn test_update_wrong_entity() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_update")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(
"INSERT {
a: 123,
g: NiL,}
INTO test_update",
)
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid = response.uuid;
let payload = format!(
"UPDATE test_anything CONTENT {{
a: 12,
g: true,}} INTO {}",
uuid.unwrap()
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_client_error());
let body = resp.take_body();
let body = body.as_ref().unwrap();
assert_eq!(&Body::from("(\n error_type: \"EntityNotCreated\",\n error_message: \"Entity `test_anything` not created\",\n)"), body);
clear();
}
#[actix_rt::test]
async fn test_update_any_uuid() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_update")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(
"INSERT {
a: 123,
g: NiL,}
INTO test_update",
)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let payload = format!(
"UPDATE test_update CONTENT {{
a: 12,
g: true,}} INTO {}",
uuid::Uuid::new_v4()
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_client_error());
let body = resp.take_body();
let body = body.as_str();
assert!(body.contains("not created for entity test_update"));
assert!(body.contains("Uuid"));
clear();
}
#[ignore]
#[actix_rt::test]
async fn test_delete_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_delete")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: 12.3,} INTO test_delete")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid = response.uuid;
let payload = format!(
"UPDATE test_delete SET {{a: 12, c: Nil,}} INTO {}",
uuid.unwrap()
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let payload = format!("Delete {} FROM test_delete", uuid.unwrap());
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
read::assert_content("DELETE");
clear();
}
#[ignore]
#[actix_rt::test]
async fn test_delete_without_update() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_delete")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: 12.3,} INTO test_delete")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid = response.uuid;
let payload = format!("Delete {} FROM test_delete", uuid.unwrap());
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
let body = resp.take_body().as_str().to_string();
assert_eq!(body, format!("(\n tx_type: Delete,\n entity: \"test_delete\",\n uuid: Some(\"{}\"),\n state: \"\",\n message: \"Entity test_delete with Uuid {} deleted\",\n)", uuid.unwrap(), uuid.unwrap()));
assert!(resp.status().is_success());
read::assert_content("DELETE");
read::assert_content("|{}|");
clear();
}
#[ignore]
#[actix_rt::test]
async fn test_match_all_update_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_match_all")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: 12.3,} INTO test_match_all")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid = response.uuid;
let payload = format!(
"MATCH ALL(a > 100, b <= 20.0) UPDATE test_match_all SET {{a: 43, c: Nil,}} INTO {}",
uuid.unwrap()
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
clear();
}
#[ignore]
#[actix_rt::test]
async fn test_match_any_update_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_match_all")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: 12.3,} INTO test_match_all")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid = response.uuid;
let payload = format!(
"MATCH ANY(a > 100, b <= 10.0) UPDATE test_match_all SET {{a: 43, c: Nil,}} INTO {}",
uuid.unwrap()
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
clear();
}
#[ignore]
#[actix_rt::test]
async fn test_match_any_update_fail() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_match_all")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: 12.3,} INTO test_match_all")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid = response.uuid;
let payload = format!(
"MATCH ANY(a > 200, b <= 10.0) UPDATE test_match_all SET {{a: 43, c: Nil,}} INTO {}",
uuid.unwrap()
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_client_error());
let body = resp.take_body();
let body = body.as_ref().unwrap();
assert_eq!(&Body::from("(\n error_type: \"FailedMatchCondition\",\n error_message: \"One or more MATCH CONDITIONS failed\",\n)"), body);
clear();
}
#[ignore]
#[actix_rt::test]
async fn test_match_any_update_fake_key() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_match_all")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: 12.3,} INTO test_match_all")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid = response.uuid;
let payload = format!(
"MATCH ANY(g > 100, b <= 20.0) UPDATE test_match_all SET {{a: 43, c: Nil,}} INTO {}",
uuid.unwrap()
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
clear();
}
#[ignore]
#[actix_rt::test]
async fn test_match_all_update_fake_key() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_match_all")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: 12.3,} INTO test_match_all")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid = response.uuid;
let payload = format!(
"MATCH ALL(g > 100, b <= 20.0) UPDATE test_match_all SET {{a: 43, c: Nil,}} INTO {}",
uuid.unwrap()
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_client_error());
clear();
}
#[actix_rt::test]
async fn test_evict_entity_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_evict")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("Evict test_evict")
.uri("/wql/tx")
.to_request();
let resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
read::assert_content("EVICT_ENTITY|");
read::assert_content("|test_evict;");
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: 12.3,} INTO test_evict")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
assert!(resp_insert.status().is_client_error());
assert_eq!("(\n error_type: \"EntityNotCreated\",\n error_message: \"Entity `test_evict` not created\",\n)", body);
clear();
}
#[actix_rt::test]
async fn test_evict_entity_id_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_evict_id")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, b: 12.3,} INTO test_evict_id")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid = response.uuid;
assert!(resp_insert.status().is_success());
let evict = format!("Evict {} from test_evict_id", uuid.unwrap());
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(evict)
.uri("/wql/tx")
.to_request();
let resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
read::assert_content("EVICT_ENTITY_ID|");
read::assert_content("|test_evict_id;");
let payload = format!(
"UPDATE test_evict_id SET {{a: 12, c: Nil,}} INTO {}",
uuid.unwrap()
);
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload(payload)
.uri("/wql/tx")
.to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_client_error());
let body = resp.take_body().as_str().to_string();
assert_eq!(
body,
format!("(\n error_type: \"UuidNotCreatedForEntity\",\n error_message: \"Uuid {} not created for entity test_evict_id\",\n)", uuid.unwrap())
);
clear();
}
#[actix_rt::test]
async fn test_insert_encrypt_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_ok ENCRYPT #{pswd,}")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, pswd: \"my_password\",} INTO test_ok")
.uri("/wql/tx")
.to_request();
let resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
read::assert_content("INSERT|");
read::assert_content("|test_ok|");
read::assert_content("\"a\": Integer(123)");
read::assert_not_content("my_password");
clear();
}
#[ignore]
#[actix_rt::test]
async fn test_update_set_encrypt_post_ok() {
let mut app = test::init_service(App::new().configure(routes)).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("CREATE ENTITY test_ok_encrypt ENCRYPT #{pswd,}")
.uri("/wql/tx")
.to_request();
let _ = test::call_service(&mut app, req).await;
let req = test::TestRequest::post()
.header("Content-Type", "application/wql")
.set_payload("INSERT {a: 123, pswd: \"my_password\",} INTO test_ok_encrypt")
.uri("/wql/tx")
.to_request();
let mut resp_insert = test::call_service(&mut app, req).await;
let body = resp_insert.take_body().as_str().to_string();
let response: TxResponse = ron::de::from_str(&body).unwrap();
let uuid = response.uuid;
let payload = format!(
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | true |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/actors/uniques.rs | woori-db/src/actors/uniques.rs | use std::{
collections::{HashMap, HashSet},
sync::{Arc, Mutex},
};
use actix::prelude::*;
use ron::ser::to_string_pretty;
use serde::Serialize;
use wql::Types;
use crate::{
actors::wql::Executor, core::pretty_config_inner, io::write::unique_data, model::error::Error,
repository::local::UniquenessContext,
};
#[derive(Serialize)]
pub struct WriteWithUniqueKeys {
pub entity: String,
pub uniques: Vec<String>,
}
impl Message for WriteWithUniqueKeys {
type Result = Result<(), Error>;
}
impl Handler<WriteWithUniqueKeys> for Executor {
type Result = Result<(), Error>;
fn handle(&mut self, msg: WriteWithUniqueKeys, _: &mut Self::Context) -> Self::Result {
use crate::io::write::write_to_uniques;
let unique_log =
to_string_pretty(&msg, pretty_config_inner()).map_err(Error::Serialization)?;
Ok(write_to_uniques(&unique_log)?)
}
}
pub struct CreateWithUniqueKeys {
pub entity: String,
pub uniques: Vec<String>,
pub data: Arc<Arc<Mutex<UniquenessContext>>>,
}
impl Message for CreateWithUniqueKeys {
type Result = Result<(), Error>;
}
impl Handler<CreateWithUniqueKeys> for Executor {
type Result = Result<(), Error>;
fn handle(&mut self, msg: CreateWithUniqueKeys, _: &mut Self::Context) -> Self::Result {
let mut uniqueness_data = if let Ok(guard) = msg.data.lock() {
guard
} else {
return Err(Error::LockData);
};
if uniqueness_data.contains_key(&msg.entity) {
msg.uniques.iter().for_each(|name| {
let mut hm = HashMap::new();
hm.insert(name.to_owned(), HashSet::new());
uniqueness_data.entry(msg.entity.to_owned()).or_insert(hm);
});
} else {
let hm = msg
.uniques
.into_iter()
.map(|name| (name, HashSet::new()))
.collect::<HashMap<String, HashSet<String>>>();
uniqueness_data.insert(msg.entity.to_owned(), hm);
}
let unique_ron =
ron::ser::to_string_pretty(&uniqueness_data.clone(), pretty_config_inner())?;
unique_data(&unique_ron)?;
Ok(())
}
}
pub struct CheckForUniqueKeys {
pub entity: String,
pub content: HashMap<String, Types>,
pub uniqueness: Arc<Arc<Mutex<UniquenessContext>>>,
}
impl CheckForUniqueKeys {
pub fn new(
entity: String,
content: &HashMap<String, Types>,
uniqueness: Arc<Arc<Mutex<UniquenessContext>>>,
) -> Self {
Self {
entity,
uniqueness,
content: content.to_owned(),
}
}
}
impl Message for CheckForUniqueKeys {
type Result = Result<(), Error>;
}
impl Handler<CheckForUniqueKeys> for Executor {
type Result = Result<(), Error>;
fn handle(&mut self, msg: CheckForUniqueKeys, _: &mut Self::Context) -> Self::Result {
let mut uniqueness_data = if let Ok(guard) = msg.uniqueness.lock() {
guard
} else {
return Err(Error::LockData);
};
if !uniqueness_data.is_empty() {
if let Some(uniques_for_entity) = uniqueness_data.get_mut(&msg.entity) {
msg.content.iter().try_for_each(|(k, v)| {
if uniques_for_entity.contains_key(k) {
let val = uniques_for_entity.get_mut(k).ok_or_else(|| {
Error::EntityNotCreatedWithUniqueness(msg.entity.to_owned())
})?;
if val.contains(&format!("{:?}", v)) {
Err(Error::DuplicatedUnique(
msg.entity.to_owned(),
k.to_owned(),
v.to_owned(),
))
} else {
val.insert(format!("{:?}", v));
Ok(())
}
} else {
Ok(())
}
})?;
}
let unique_ron =
ron::ser::to_string_pretty(&uniqueness_data.clone(), pretty_config_inner())?;
unique_data(&unique_ron)?;
}
Ok(())
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::actors::wql::Executor;
use crate::io::read::assert_uniques;
#[actix_rt::test]
async fn write_uniques() {
let uniques = WriteWithUniqueKeys {
entity: String::from("my-entity"),
uniques: vec![String::from("id"), String::from("ssn")],
};
let actor = Executor::new().start();
let resp = actor.send(uniques).await.unwrap();
assert!(resp.is_ok());
assert_uniques("uniques: [\"id\",\"ssn\",]");
}
#[actix_rt::test]
async fn create_uniques_test() {
let data = UniquenessContext::new();
let uniques = CreateWithUniqueKeys {
entity: String::from("my-entity"),
uniques: vec![String::from("id"), String::from("ssn")],
data: Arc::new(Arc::new(Mutex::new(data.clone()))),
};
let actor = Executor::new().start();
let resp = actor.send(uniques).await.unwrap();
assert!(resp.is_ok());
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/actors/wql.rs | woori-db/src/actors/wql.rs | use actix::prelude::*;
use chrono::{DateTime, Utc};
use std::io::Error;
use uuid::Uuid;
use crate::core::wql::{
create_entity, delete_entity_content, evict_entity_content, evict_entity_id_content,
insert_entity_content, update_content_entity_content, update_set_entity_content,
};
pub struct Executor;
impl Actor for Executor {
type Context = Context<Self>;
}
impl Executor {
pub fn new() -> Self {
Executor {}
}
}
pub struct CreateEntity {
name: String,
}
impl CreateEntity {
pub fn new(name: &str) -> Self {
Self {
name: name.to_owned(),
}
}
}
impl Message for CreateEntity {
type Result = Result<(usize, bool), Error>;
}
impl Handler<CreateEntity> for Executor {
type Result = Result<(usize, bool), Error>;
fn handle(&mut self, msg: CreateEntity, _: &mut Self::Context) -> Self::Result {
use crate::io::write::write_to_log;
let entity = create_entity(&msg.name);
write_to_log(&entity)
}
}
pub struct InsertEntityContent {
pub name: String,
pub content: String,
pub uuid: Option<Uuid>,
pub datetime: DateTime<Utc>,
}
impl InsertEntityContent {
pub fn new(name: &str, content: &str, uuid: Option<Uuid>, datetime: DateTime<Utc>) -> Self {
Self {
name: name.to_owned(),
content: content.to_owned(),
uuid,
datetime,
}
}
}
impl Message for InsertEntityContent {
type Result = Result<(DateTime<Utc>, Uuid, usize, bool), Error>;
}
impl Handler<InsertEntityContent> for Executor {
type Result = Result<(DateTime<Utc>, Uuid, usize, bool), Error>;
fn handle(&mut self, msg: InsertEntityContent, _: &mut Self::Context) -> Self::Result {
use crate::io::write::write_to_log;
let (date, uuid, content) = insert_entity_content(&msg);
let (bytes_written, is_empty) = write_to_log(&content)?;
Ok((date, uuid, bytes_written, is_empty))
}
}
pub struct UpdateSetEntityContent {
pub name: String,
pub current_state: String,
pub content_log: String,
pub id: Uuid,
pub datetime: DateTime<Utc>,
pub previous_registry: String,
}
impl UpdateSetEntityContent {
pub fn new(
name: &str,
current_state: &str,
content_log: &str,
id: Uuid,
datetime: DateTime<Utc>,
previous_registry: &str,
) -> Self {
Self {
name: name.to_owned(),
content_log: content_log.to_owned(),
current_state: current_state.to_owned(),
id,
datetime,
previous_registry: previous_registry.to_owned(),
}
}
}
impl Message for UpdateSetEntityContent {
type Result = Result<(DateTime<Utc>, usize, bool), Error>;
}
impl Handler<UpdateSetEntityContent> for Executor {
type Result = Result<(DateTime<Utc>, usize, bool), Error>;
fn handle(&mut self, msg: UpdateSetEntityContent, _: &mut Self::Context) -> Self::Result {
use crate::io::write::write_to_log;
let (date, content) = update_set_entity_content(&msg);
let (bytes_written, is_empty) = write_to_log(&content)?;
Ok((date, bytes_written, is_empty))
}
}
// I know it is duplicated
pub struct UpdateContentEntityContent {
pub name: String,
pub current_state: String,
pub content_log: String,
pub id: Uuid,
pub previous_registry: String,
}
impl UpdateContentEntityContent {
pub fn new(
name: &str,
current_state: &str,
content_log: &str,
id: Uuid,
previous_registry: &str,
) -> Self {
Self {
name: name.to_owned(),
content_log: content_log.to_owned(),
current_state: current_state.to_owned(),
id,
previous_registry: previous_registry.to_owned(),
}
}
}
impl Message for UpdateContentEntityContent {
type Result = Result<(DateTime<Utc>, usize, bool), Error>;
}
impl Handler<UpdateContentEntityContent> for Executor {
type Result = Result<(DateTime<Utc>, usize, bool), Error>;
fn handle(&mut self, msg: UpdateContentEntityContent, _: &mut Self::Context) -> Self::Result {
use crate::io::write::write_to_log;
let (date, content) = update_content_entity_content(&msg);
let (bytes_written, is_empty) = write_to_log(&content)?;
Ok((date, bytes_written, is_empty))
}
}
pub struct DeleteId {
pub name: String,
pub content_log: String,
pub uuid: Uuid,
pub previous_registry: String,
}
impl DeleteId {
pub fn new(name: &str, content_log: &str, uuid: Uuid, previous_registry: &str) -> Self {
Self {
name: name.to_owned(),
content_log: content_log.to_owned(),
uuid,
previous_registry: previous_registry.to_owned(),
}
}
}
impl Message for DeleteId {
type Result = Result<(DateTime<Utc>, usize, bool), Error>;
}
impl Handler<DeleteId> for Executor {
type Result = Result<(DateTime<Utc>, usize, bool), Error>;
fn handle(&mut self, msg: DeleteId, _: &mut Self::Context) -> Self::Result {
use crate::io::write::write_to_log;
let (date, content) = delete_entity_content(&msg);
let (bytes_written, is_empty) = write_to_log(&content)?;
Ok((date, bytes_written, is_empty))
}
}
pub struct EvictEntity {
pub name: String,
}
impl EvictEntity {
pub fn new(name: &str) -> Self {
Self {
name: name.to_owned(),
}
}
}
impl Message for EvictEntity {
type Result = Result<(usize, bool), Error>;
}
impl Handler<EvictEntity> for Executor {
type Result = Result<(usize, bool), Error>;
fn handle(&mut self, msg: EvictEntity, _: &mut Self::Context) -> Self::Result {
use crate::io::write::write_to_log;
let content = evict_entity_content(&msg.name);
Ok(write_to_log(&content)?)
}
}
pub struct EvictEntityId {
pub name: String,
pub id: Uuid,
}
impl EvictEntityId {
pub fn new(name: &str, id: Uuid) -> Self {
Self {
name: name.to_owned(),
id,
}
}
}
impl Message for EvictEntityId {
type Result = Result<(usize, bool), Error>;
}
impl Handler<EvictEntityId> for Executor {
type Result = Result<(usize, bool), Error>;
fn handle(&mut self, msg: EvictEntityId, _: &mut Self::Context) -> Self::Result {
use crate::io::write::write_to_log;
let content = evict_entity_id_content(&msg);
Ok(write_to_log(&content)?)
}
}
#[cfg(test)]
mod test {
use actix::Actor;
use chrono::Utc;
use crate::io::read;
use super::{
CreateEntity, DeleteId, EvictEntity, EvictEntityId, Executor, InsertEntityContent,
UpdateSetEntityContent,
};
#[actix_rt::test]
async fn create_test() {
let create = CreateEntity {
name: String::from("create-my-entity"),
};
let actor = Executor::new().start();
let resp = actor.send(create).await.unwrap();
assert!(resp.is_ok());
read::assert_content("CREATE_ENTITY|create-my-entity;")
}
#[actix_rt::test]
async fn insert_test() {
let insert = InsertEntityContent {
name: String::from("insert-my-entity"),
content: String::from("this is the content"),
uuid: None,
datetime: Utc::now(),
};
let actor = Executor::new().start();
let resp = actor.send(insert).await.unwrap();
assert!(resp.is_ok());
read::assert_content("INSERT|");
read::assert_content("insert-my-entity");
read::assert_content("this is the content");
}
#[actix_rt::test]
async fn update_set_test() {
let uuid = uuid::Uuid::new_v4();
let update_set = UpdateSetEntityContent::new(
"update-set-my-entity",
"this is the content",
"this is the current state",
uuid,
Utc::now(),
"this is the previous registry",
);
let actor = Executor::new().start();
let resp = actor.send(update_set).await.unwrap();
assert!(resp.is_ok());
read::assert_content("UPDATE_SET|");
read::assert_content("update-set-my-entity");
read::assert_content("this is the content");
read::assert_content(&uuid.to_string());
read::assert_content("this is the current state");
read::assert_content("this is the previous registry");
}
#[actix_rt::test]
async fn update_content_test() {
let uuid = uuid::Uuid::new_v4();
let update_content = UpdateSetEntityContent::new(
"update-content-my-entity",
"this is the content",
"this is the current state",
uuid,
Utc::now(),
"this is the previous registry",
);
let actor = Executor::new().start();
let resp = actor.send(update_content).await.unwrap();
assert!(resp.is_ok());
read::assert_content("UPDATE_SET|");
read::assert_content("update-content-my-entity");
read::assert_content(&uuid.to_string());
}
#[actix_rt::test]
async fn delete_test() {
let uuid = uuid::Uuid::new_v4();
let update_content = DeleteId::new(
"delete-my-entity",
"this is the content",
uuid,
"this is the previous registry",
);
let actor = Executor::new().start();
let resp = actor.send(update_content).await.unwrap();
assert!(resp.is_ok());
read::assert_content("DELETE|");
read::assert_content("delete-my-entity");
read::assert_content(&uuid.to_string());
}
#[actix_rt::test]
async fn evict_test() {
let evict = EvictEntity::new("evict-my-entity");
let actor = Executor::new().start();
let resp = actor.send(evict).await.unwrap();
assert!(resp.is_ok());
read::assert_content("EVICT_ENTITY|");
read::assert_content("evict-my-entity");
}
#[actix_rt::test]
async fn evict_id_test() {
let uuid = uuid::Uuid::new_v4();
let evict = EvictEntityId::new("evict-id-my-entity", uuid);
let actor = Executor::new().start();
let resp = actor.send(evict).await.unwrap();
assert!(resp.is_ok());
read::assert_content("EVICT_ENTITY_ID|");
read::assert_content("evict-id-my-entity");
read::assert_content(&uuid.to_string());
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/actors/state.rs | woori-db/src/actors/state.rs | use actix::prelude::*;
use rayon::prelude::*;
use std::collections::HashMap;
use wql::{MatchCondition, Types};
use crate::actors::wql::Executor;
use crate::{model::error::Error, repository::local::StateInfo};
pub struct State(pub String);
impl Message for State {
type Result = Result<HashMap<String, Types>, Error>;
}
impl Handler<State> for Executor {
type Result = Result<HashMap<String, Types>, Error>;
fn handle(&mut self, msg: State, _: &mut Self::Context) -> Self::Result {
use ron::de::from_str;
let fractions = msg.0.split('|').collect::<Vec<&str>>();
if fractions[0].eq("INSERT") {
let state = fractions
.last()
.ok_or(Error::FailedToParseState)?
.to_owned();
let state = &state[..(state.len() - 1)];
let resp: Result<HashMap<String, Types>, Error> = match from_str(state) {
Ok(x) => Ok(x),
Err(_) => Err(Error::FailedToParseState),
};
resp
} else if fractions[0].eq("UPDATE_SET")
|| fractions[0].eq("UPDATE_CONTENT")
|| fractions[0].eq("DELETE")
{
let state = fractions
.get(fractions.len() - 2)
.ok_or(Error::FailedToParseState)?
.to_owned();
let resp: Result<HashMap<String, Types>, Error> = match from_str(state) {
Ok(x) => Ok(x),
Err(_) => Err(Error::FailedToParseState),
};
resp
} else {
Err(Error::FailedToParseState)
}
}
}
pub struct PreviousRegistry(pub String);
impl Message for PreviousRegistry {
type Result = Result<Option<StateInfo>, Error>;
}
impl Handler<PreviousRegistry> for Executor {
type Result = Result<Option<StateInfo>, Error>;
fn handle(&mut self, msg: PreviousRegistry, _: &mut Self::Context) -> Self::Result {
use ron::de::from_str;
let fractions = msg.0.split('|').collect::<Vec<&str>>();
if fractions[0].eq("INSERT") {
Ok(None)
} else if fractions[0].eq("UPDATE_SET")
|| fractions[0].eq("UPDATE_CONTENT")
|| fractions[0].eq("DELETE")
{
let state = fractions
.last()
.ok_or(Error::FailedToParseRegistry)?
.to_owned();
let state = &state[..(state.len() - 1)];
let resp: Result<StateInfo, Error> = match from_str(state) {
Ok(x) => Ok(x),
Err(_) => Err(Error::FailedToParseRegistry),
};
Ok(Some(resp?))
} else {
Err(Error::FailedToParseRegistry)
}
}
}
pub struct MatchUpdate {
pub conditions: MatchCondition,
pub previous_state: HashMap<String, Types>,
}
impl Message for MatchUpdate {
type Result = Result<(), Error>;
}
impl Handler<MatchUpdate> for Executor {
type Result = Result<(), Error>;
fn handle(&mut self, msg: MatchUpdate, _: &mut Self::Context) -> Self::Result {
match msg.conditions.clone() {
MatchCondition::All(all) => match_all(all.as_slice(), &msg.previous_state),
MatchCondition::Any(any) => match_any(any.as_slice(), &msg.previous_state),
_ => Err(Error::UnknownCondition),
}?;
Ok(())
}
}
fn match_all(all: &[MatchCondition], previous_state: &HashMap<String, Types>) -> Result<(), Error> {
let conds = all
.par_iter()
.map(|cond| match cond.clone() {
MatchCondition::Eq(key, val) => {
if previous_state.get(&key).is_some() {
previous_state[&key] == val
} else {
false
}
}
MatchCondition::NotEq(key, val) => {
if previous_state.get(&key).is_some() {
previous_state[&key] != val
} else {
false
}
}
MatchCondition::GEq(key, val) => {
if previous_state.get(&key).is_some() {
ge_match_types(val, previous_state[&key].clone())
} else {
false
}
}
MatchCondition::LEq(key, val) => {
if previous_state.get(&key).is_some() {
le_match_types(val, previous_state[&key].clone())
} else {
false
}
}
MatchCondition::G(key, val) => {
if previous_state.get(&key).is_some() {
g_match_types(val, previous_state[&key].clone())
} else {
false
}
}
MatchCondition::L(key, val) => {
if previous_state.get(&key).is_some() {
l_match_types(val, previous_state[&key].clone())
} else {
false
}
}
_ => false,
})
.all(|c| c);
if conds {
Ok(())
} else {
Err(Error::FailedMatchCondition)
}
}
fn match_any(any: &[MatchCondition], previous_state: &HashMap<String, Types>) -> Result<(), Error> {
let conds = any
.par_iter()
.map(|cond| match cond.clone() {
MatchCondition::Eq(key, val) => {
if previous_state.get(&key).is_some() {
previous_state[&key] == val
} else {
false
}
}
MatchCondition::NotEq(key, val) => {
if previous_state.get(&key).is_some() {
previous_state[&key] != val
} else {
false
}
}
MatchCondition::GEq(key, val) => {
if previous_state.get(&key).is_some() {
ge_match_types(val, previous_state[&key].clone())
} else {
false
}
}
MatchCondition::LEq(key, val) => {
if previous_state.get(&key).is_some() {
le_match_types(val, previous_state[&key].clone())
} else {
false
}
}
MatchCondition::G(key, val) => {
if previous_state.get(&key).is_some() {
g_match_types(val, previous_state[&key].clone())
} else {
false
}
}
MatchCondition::L(key, val) => {
if previous_state.get(&key).is_some() {
l_match_types(val, previous_state[&key].clone())
} else {
false
}
}
_ => false,
})
.any(|c| c);
if conds {
Ok(())
} else {
Err(Error::FailedMatchCondition)
}
}
fn ge_match_types(cond: Types, state: Types) -> bool {
match (cond, state) {
(Types::Integer(c), Types::Integer(s)) => s >= c,
(Types::Float(c), Types::Float(s)) => s >= c,
_ => false,
}
}
fn g_match_types(cond: Types, state: Types) -> bool {
match (cond, state) {
(Types::Integer(c), Types::Integer(s)) => s > c,
(Types::Float(c), Types::Float(s)) => s > c,
_ => false,
}
}
fn le_match_types(cond: Types, state: Types) -> bool {
match (cond, state) {
(Types::Integer(c), Types::Integer(s)) => s <= c,
(Types::Float(c), Types::Float(s)) => s <= c,
_ => false,
}
}
fn l_match_types(cond: Types, state: Types) -> bool {
match (cond, state) {
(Types::Integer(c), Types::Integer(s)) => s < c,
(Types::Float(c), Types::Float(s)) => s < c,
_ => false,
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::actors::wql::Executor;
#[actix_rt::test]
async fn test_all_matches() {
let actor = Executor::new().start();
let conds = MatchCondition::All(vec![
MatchCondition::Eq("e".to_string(), Types::String(String::from("hello"))),
MatchCondition::NotEq("f".to_string(), Types::Boolean(false)),
MatchCondition::GEq("a".to_string(), Types::Float(3f64)),
MatchCondition::LEq("b".to_string(), Types::Integer(7isize)),
MatchCondition::G("c".to_string(), Types::Float(3f64)),
MatchCondition::L("d".to_string(), Types::Integer(7)),
]);
let result = actor
.send(MatchUpdate {
conditions: conds,
previous_state: previous_state(),
})
.await
.unwrap();
assert!(result.is_ok());
}
#[actix_rt::test]
async fn test_any_matches() {
let actor = Executor::new().start();
let conds = MatchCondition::Any(vec![
MatchCondition::Eq("e".to_string(), Types::String(String::from("hellwo"))),
MatchCondition::NotEq("f".to_string(), Types::Boolean(true)),
MatchCondition::GEq("a".to_string(), Types::Float(34f64)),
MatchCondition::LEq("b".to_string(), Types::Integer(7isize)),
MatchCondition::G("c".to_string(), Types::Float(34f64)),
MatchCondition::L("d".to_string(), Types::Integer(-7)),
]);
let result = actor
.send(MatchUpdate {
conditions: conds,
previous_state: previous_state(),
})
.await
.unwrap();
assert!(result.is_ok());
}
#[actix_rt::test]
async fn test_any_fail() {
let actor = Executor::new().start();
let conds = MatchCondition::Any(vec![
MatchCondition::Eq("e".to_string(), Types::String(String::from("hellwo"))),
MatchCondition::NotEq("f".to_string(), Types::Boolean(true)),
MatchCondition::GEq("a".to_string(), Types::Float(34f64)),
MatchCondition::LEq("b".to_string(), Types::Integer(-7isize)),
MatchCondition::G("c".to_string(), Types::Float(34f64)),
MatchCondition::L("d".to_string(), Types::Integer(-7)),
]);
let result = actor
.send(MatchUpdate {
conditions: conds,
previous_state: previous_state(),
})
.await
.unwrap();
assert!(result.is_err());
}
#[actix_rt::test]
async fn test_all_fail() {
let actor = Executor::new().start();
let conds = MatchCondition::All(vec![
MatchCondition::Eq("e".to_string(), Types::String(String::from("hello"))),
MatchCondition::NotEq("f".to_string(), Types::Boolean(false)),
MatchCondition::GEq("a".to_string(), Types::Float(3f64)),
MatchCondition::LEq("b".to_string(), Types::Integer(-7isize)),
MatchCondition::G("c".to_string(), Types::Float(3f64)),
MatchCondition::L("d".to_string(), Types::Integer(7)),
]);
let result = actor
.send(MatchUpdate {
conditions: conds,
previous_state: previous_state(),
})
.await
.unwrap();
assert!(result.is_err());
}
fn previous_state() -> HashMap<String, Types> {
let mut hm = HashMap::new();
hm.insert("a".to_string(), Types::Float(4.5f64));
hm.insert("b".to_string(), Types::Integer(4));
hm.insert("c".to_string(), Types::Float(5.5f64));
hm.insert("d".to_string(), Types::Integer(6));
hm.insert("e".to_string(), Types::String(String::from("hello")));
hm.insert("f".to_string(), Types::Boolean(true));
hm
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/actors/when.rs | woori-db/src/actors/when.rs | use actix::prelude::*;
use chrono::{DateTime, Utc};
use std::collections::{BTreeMap, HashMap, HashSet};
use uuid::Uuid;
use wql::Types;
use crate::{core::query::filter_keys_and_hash, io::read::read_date_log, model::error::Error};
use super::wql::Executor;
pub struct ReadEntityRange {
entity_name: String,
uuid: Uuid,
start_date: DateTime<Utc>,
end_date: DateTime<Utc>,
date_log: String,
}
impl ReadEntityRange {
pub fn new(
entity_name: &str,
uuid: Uuid,
start_date: DateTime<Utc>,
end_date: DateTime<Utc>,
date_log: String,
) -> Self {
Self {
entity_name: entity_name.to_owned(),
uuid,
start_date,
end_date,
date_log,
}
}
}
impl Message for ReadEntityRange {
type Result = Result<BTreeMap<DateTime<Utc>, HashMap<String, Types>>, Error>;
}
impl Handler<ReadEntityRange> for Executor {
type Result = Result<BTreeMap<DateTime<Utc>, HashMap<String, Types>>, Error>;
fn handle(&mut self, msg: ReadEntityRange, _: &mut Self::Context) -> Self::Result {
use ron::de::from_str;
let date_log = msg.date_log.clone();
let date_log = read_date_log(date_log)?;
let mut hm = BTreeMap::new();
date_log.split(';').try_for_each(|line| {
let fractions = line.split('|').collect::<Vec<&str>>();
if fractions[0].eq("INSERT")
&& fractions[3].eq(&msg.entity_name)
&& fractions[2].eq(&msg.uuid.to_string())
{
let state = fractions
.last()
.ok_or(Error::FailedToParseState)?
.to_owned();
let date: Result<DateTime<Utc>, Error> = match from_str(fractions[1]) {
Ok(x) => Ok(x),
Err(_) => Err(Error::FailedToParseDate),
};
let date = date?;
if date > msg.start_date && date < msg.end_date {
let resp: Result<HashMap<String, Types>, Error> = match from_str(state) {
Ok(x) => Ok(x),
Err(_) => Err(Error::FailedToParseState),
};
match resp {
Ok(map) => {
let map = filter_keys_and_hash(map, None);
hm.insert(date, map);
}
Err(e) => return Err(e),
};
}
} else if (fractions[0].eq("UPDATE_SET") || fractions[0].eq("UPDATE_CONTENT"))
&& fractions[3].eq(&msg.entity_name)
&& fractions[2].eq(&msg.uuid.to_string())
{
let state = fractions
.get(fractions.len() - 2)
.ok_or(Error::FailedToParseState)?
.to_owned();
let date: Result<DateTime<Utc>, Error> = match from_str(fractions[1]) {
Ok(x) => Ok(x),
Err(_) => Err(Error::FailedToParseDate),
};
let date = date?;
if date > msg.start_date && date < msg.end_date {
let resp: Result<HashMap<String, Types>, Error> = match from_str(state) {
Ok(x) => Ok(x),
Err(_) => Err(Error::FailedToParseState),
};
match resp {
Ok(map) => {
let map = filter_keys_and_hash(map, None);
hm.insert(date, map);
}
Err(e) => return Err(e),
};
}
}
Ok(())
})?;
Ok(hm)
}
}
pub struct ReadEntitiesAt {
entity_name: String,
date_log: String,
keys: Option<HashSet<String>>,
}
impl ReadEntitiesAt {
pub fn new(entity_name: &str, date_log: String, keys: Option<HashSet<String>>) -> Self {
Self {
entity_name: entity_name.to_owned(),
date_log,
keys,
}
}
}
impl Message for ReadEntitiesAt {
type Result = Result<HashMap<String, HashMap<String, Types>>, Error>;
}
impl Handler<ReadEntitiesAt> for Executor {
type Result = Result<HashMap<String, HashMap<String, Types>>, Error>;
fn handle(&mut self, msg: ReadEntitiesAt, _: &mut Self::Context) -> Self::Result {
use ron::de::from_str;
let date_log = msg.date_log.clone();
let date_log = read_date_log(date_log)?;
let mut hm = HashMap::new();
date_log.split(';').try_for_each(|line| {
let fractions = line.split('|').collect::<Vec<&str>>();
if fractions[0].eq("INSERT") && fractions[3].eq(&msg.entity_name) {
let state = fractions
.last()
.ok_or(Error::FailedToParseState)?
.to_owned();
let resp: Result<HashMap<String, Types>, Error> = match from_str(state) {
Ok(x) => Ok(x),
Err(_) => Err(Error::FailedToParseState),
};
match resp {
Ok(map) => {
let map = filter_keys_and_hash(map, msg.keys.clone());
hm.insert(fractions[2].to_owned(), map);
}
Err(e) => return Err(e),
};
} else if (fractions[0].eq("UPDATE_SET") || fractions[0].eq("UPDATE_CONTENT"))
&& fractions[3].eq(&msg.entity_name)
{
let state = fractions
.get(fractions.len() - 2)
.ok_or(Error::FailedToParseState)?
.to_owned();
let resp: Result<HashMap<String, Types>, Error> = match from_str(state) {
Ok(x) => Ok(x),
Err(_) => Err(Error::FailedToParseState),
};
match resp {
Ok(map) => {
let map = filter_keys_and_hash(map, msg.keys.clone());
hm.insert(fractions[2].to_owned(), map);
}
Err(e) => return Err(e),
};
}
Ok(())
})?;
Ok(hm)
}
}
pub struct ReadEntityIdAt {
entity_name: String,
uuid: Uuid,
date_log: String,
}
impl ReadEntityIdAt {
pub fn new(entity_name: &str, uuid: Uuid, date_log: String) -> Self {
Self {
entity_name: entity_name.to_owned(),
uuid,
date_log,
}
}
}
impl Message for ReadEntityIdAt {
type Result = Result<HashMap<String, Types>, Error>;
}
impl Handler<ReadEntityIdAt> for Executor {
type Result = Result<HashMap<String, Types>, Error>;
fn handle(&mut self, msg: ReadEntityIdAt, _: &mut Self::Context) -> Self::Result {
use ron::de::from_str;
let date_log = msg.date_log.clone();
let date_log = read_date_log(date_log)?;
let mut hm = HashMap::new();
date_log.split(';').try_for_each(|line| {
let fractions = line.split('|').collect::<Vec<&str>>();
if fractions[0].eq("INSERT")
&& fractions[3].eq(&msg.entity_name)
&& fractions[2].eq(&msg.uuid.to_string())
{
let state = fractions
.last()
.ok_or(Error::FailedToParseState)?
.to_owned();
let resp: Result<HashMap<String, Types>, Error> = match from_str(state) {
Ok(x) => Ok(x),
Err(_) => Err(Error::FailedToParseState),
};
match resp {
Ok(map) => {
hm = map;
}
Err(e) => return Err(e),
};
} else if (fractions[0].eq("UPDATE_SET") || fractions[0].eq("UPDATE_CONTENT"))
&& fractions[3].eq(&msg.entity_name)
&& fractions[2].eq(&msg.uuid.to_string())
{
let state = fractions
.get(fractions.len() - 2)
.ok_or(Error::FailedToParseState)?
.to_owned();
let resp: Result<HashMap<String, Types>, Error> = match from_str(state) {
Ok(x) => Ok(x),
Err(_) => Err(Error::FailedToParseState),
};
match resp {
Ok(map) => {
hm = map;
}
Err(e) => return Err(e),
};
}
Ok(())
})?;
Ok(hm)
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/actors/history.rs | woori-db/src/actors/history.rs | use actix::prelude::*;
use chrono::{DateTime, Utc};
use std::collections::HashMap;
use wql::Types;
use crate::model::error::Error;
use crate::{actors::wql::Executor, model::DataRegister};
pub type HistoryRegistry = (HashMap<String, Types>, DateTime<Utc>, Option<DataRegister>);
pub struct History(pub String);
impl Message for History {
type Result = Result<HistoryRegistry, Error>;
}
impl Handler<History> for Executor {
type Result = Result<HistoryRegistry, Error>;
fn handle(&mut self, msg: History, _: &mut Self::Context) -> Self::Result {
let fractions = msg.0.split('|').collect::<Vec<&str>>();
if fractions[0].eq("INSERT") {
let date = get_date(&fractions);
let content = get_insert_content(&fractions);
let previous_registry = None;
Ok((content?, date?, previous_registry))
} else if fractions[0].eq("UPDATE_SET")
|| fractions[0].eq("UPDATE_CONTENT")
|| fractions[0].eq("DELETE")
{
let date = get_date(&fractions);
let content = get_other_content(&fractions);
let previous_registry = get_previous_registry(&fractions);
Ok((content?, date?, previous_registry?))
} else {
Err(Error::FailedToParseState)
}
}
}
fn get_insert_content(fractions: &[&str]) -> Result<HashMap<String, Types>, Error> {
let state = fractions
.last()
.ok_or(Error::FailedToParseState)?
.to_owned();
let state = &state[..(state.len() - 1)];
let resp: Result<HashMap<String, Types>, Error> = match ron::de::from_str(state) {
Ok(x) => Ok(x),
Err(_) => Err(Error::FailedToParseState),
};
resp
}
fn get_other_content(fractions: &[&str]) -> Result<HashMap<String, Types>, Error> {
let state = fractions
.get(fractions.len() - 2)
.ok_or(Error::FailedToParseState)?
.to_owned();
let resp: Result<HashMap<String, Types>, Error> = match ron::de::from_str(state) {
Ok(x) => Ok(x),
Err(_) => Err(Error::FailedToParseState),
};
resp
}
fn get_date(fractions: &[&str]) -> Result<DateTime<Utc>, Error> {
let state = fractions
.get(1)
.ok_or(Error::FailedToParseState)?
.to_owned();
let resp: Result<DateTime<Utc>, Error> = match ron::de::from_str(state) {
Ok(x) => Ok(x),
Err(_) => Err(Error::FailedToParseState),
};
resp
}
fn get_previous_registry(fractions: &[&str]) -> Result<Option<DataRegister>, Error> {
let state = fractions
.last()
.ok_or(Error::FailedToParseRegistry)?
.to_owned();
let state = &state[..(state.len() - 1)];
let resp: Result<(DataRegister, Vec<u8>), Error> = match ron::de::from_str(state) {
Ok(x) => Ok(x),
Err(_) => Err(Error::FailedToParseRegistry),
};
let resp_insert: Result<DataRegister, Error> = match ron::de::from_str(state) {
Ok(x) => Ok(x),
Err(_) => Err(Error::FailedToParseRegistry),
};
if resp.is_ok() {
Ok(Some(resp?.0))
} else {
Ok(Some(resp_insert?))
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/actors/mod.rs | woori-db/src/actors/mod.rs | pub mod encrypts;
pub mod history;
pub mod recovery;
pub mod scheduler;
pub mod state;
pub mod uniques;
pub mod when;
pub mod wql;
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/actors/encrypts.rs | woori-db/src/actors/encrypts.rs | use std::{
collections::{HashMap, HashSet},
sync::{Arc, Mutex},
};
use actix::prelude::*;
use chrono::{DateTime, Utc};
use rayon::prelude::*;
use ron::ser::{to_string_pretty, PrettyConfig};
use serde::{Deserialize, Serialize};
use wql::Types;
use crate::{
actors::wql::Executor, model::error::Error, repository::local::EncryptContext,
schemas::query::Response as QueryResponse,
};
#[derive(Debug, Serialize, Deserialize)]
pub struct WriteWithEncryption {
pub entity: String,
pub encrypts: Vec<String>,
}
impl Message for WriteWithEncryption {
type Result = Result<(), Error>;
}
impl Handler<WriteWithEncryption> for Executor {
type Result = Result<(), Error>;
fn handle(&mut self, msg: WriteWithEncryption, _: &mut Self::Context) -> Self::Result {
use crate::io::write::write_to_encrypts;
let encrypt_log = to_string_pretty(&msg, pretty_config()).map_err(Error::Serialization)?;
Ok(write_to_encrypts(&encrypt_log)?)
}
}
pub struct CreateWithEncryption {
pub entity: String,
pub encrypts: Vec<String>,
pub data: Arc<Arc<Mutex<EncryptContext>>>,
}
impl Message for CreateWithEncryption {
type Result = Result<(), Error>;
}
impl Handler<CreateWithEncryption> for Executor {
type Result = Result<(), Error>;
fn handle(&mut self, msg: CreateWithEncryption, _: &mut Self::Context) -> Self::Result {
let mut encrypt_data = if let Ok(guard) = msg.data.lock() {
guard
} else {
return Err(Error::LockData);
};
if !encrypt_data.contains_key(&msg.entity) {
let hm = msg.encrypts.into_par_iter().collect::<HashSet<String>>();
encrypt_data.insert(msg.entity.to_owned(), hm);
}
Ok(())
}
}
pub struct EncryptContent {
pub entity: String,
pub content: HashMap<String, Types>,
pub encrypts: Arc<Arc<Mutex<EncryptContext>>>,
pub hashing_cost: u32,
}
impl EncryptContent {
pub fn new(
entity: &str,
mut content: HashMap<String, Types>,
encrypts: Arc<Arc<Mutex<EncryptContext>>>,
hashing_cost: u32,
datetime: DateTime<Utc>,
) -> Self {
content.insert("tx_time".to_owned(), Types::DateTime(datetime));
Self {
entity: entity.to_owned(),
content,
encrypts,
hashing_cost,
}
}
}
impl Message for EncryptContent {
type Result = Result<HashMap<String, Types>, Error>;
}
impl Handler<EncryptContent> for Executor {
type Result = Result<HashMap<String, Types>, Error>;
fn handle(&mut self, msg: EncryptContent, _: &mut Self::Context) -> Self::Result {
let mut encrypts_data = if let Ok(guard) = msg.encrypts.lock() {
guard
} else {
return Err(Error::LockData);
};
if encrypts_data.is_empty() {
Ok(msg.content)
} else {
encrypts_data.get_mut(&msg.entity).map_or(
Ok(msg.content.clone()),
|encrypts_for_entity| {
let mut new_content = HashMap::new();
msg.content.iter().for_each(|(k, v)| {
if encrypts_for_entity.contains(k) {
#[cfg(test)]
let hashed_v = v.to_hash(Some(4)).unwrap();
#[cfg(not(test))]
let hashed_v = v.to_hash(Some(msg.hashing_cost)).unwrap();
new_content.insert(k.to_owned(), hashed_v);
} else {
new_content.insert(k.to_owned(), v.to_owned());
}
});
Ok(new_content)
},
)
}
}
}
pub struct VerifyEncryption {
filtered: HashMap<String, Types>,
content: HashMap<String, String>,
}
impl VerifyEncryption {
pub fn new(filtered: HashMap<String, Types>, content: HashMap<String, String>) -> Self {
Self { filtered, content }
}
}
impl Message for VerifyEncryption {
type Result = Result<QueryResponse, Error>;
}
impl Handler<VerifyEncryption> for Executor {
type Result = Result<QueryResponse, Error>;
fn handle(&mut self, msg: VerifyEncryption, _: &mut Self::Context) -> Self::Result {
let type_nil = Types::Nil;
let results = msg
.content
.clone()
.into_par_iter()
.map(|(k, v)| {
let original = msg.filtered.clone();
let original_hash = original.get(&k).unwrap_or(&type_nil);
let result = if let Types::Hash(hash) = original_hash {
bcrypt::verify(v, hash).unwrap()
} else {
false
};
(k, result)
})
.collect::<HashMap<String, bool>>();
Ok(results.into())
}
}
fn pretty_config() -> PrettyConfig {
PrettyConfig::new()
.indentor("".to_string())
.new_line("".to_string())
}
#[cfg(test)]
mod test {
use super::*;
use crate::{actors::wql::Executor, io::read::assert_encrypt};
#[actix_rt::test]
async fn write_uniques() {
let encrypts = WriteWithEncryption {
entity: String::from("my-entity"),
encrypts: vec![String::from("id"), String::from("ssn")],
};
let actor = Executor::new().start();
let resp = actor.send(encrypts).await.unwrap();
assert!(resp.is_ok());
assert_encrypt("encrypts: [\"id\",\"ssn\",]");
}
#[actix_rt::test]
async fn create_uniques_test() {
let data = EncryptContext::new();
let encrypts = CreateWithEncryption {
entity: String::from("my-entity"),
encrypts: vec![String::from("id"), String::from("ssn")],
data: Arc::new(Arc::new(Mutex::new(data.clone()))),
};
let actor = Executor::new().start();
let resp = actor.send(encrypts).await.unwrap();
assert!(resp.is_ok());
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/actors/recovery.rs | woori-db/src/actors/recovery.rs | use actix::prelude::*;
use crate::{
core::pretty_config_inner,
io::write::{local_data, offset_counter},
model::error::Error,
repository::local::LocalContext,
};
use super::wql::Executor;
pub struct OffsetCounter {
pub offset: usize,
}
impl OffsetCounter {
pub fn new(offset: usize) -> Self {
Self { offset }
}
}
impl Message for OffsetCounter {
type Result = Result<(), Error>;
}
impl Handler<OffsetCounter> for Executor {
type Result = Result<(), Error>;
fn handle(&mut self, msg: OffsetCounter, _: &mut Self::Context) -> Self::Result {
Ok(offset_counter(msg.offset)?)
}
}
pub struct LocalData {
pub data: LocalContext,
}
impl LocalData {
pub fn new(data: LocalContext) -> Self {
Self { data }
}
}
impl Message for LocalData {
type Result = Result<(), Error>;
}
impl Handler<LocalData> for Executor {
type Result = Result<(), Error>;
fn handle(&mut self, msg: LocalData, _: &mut Self::Context) -> Self::Result {
let data_str = ron::ser::to_string_pretty(&msg.data, pretty_config_inner())?;
Ok(local_data(&data_str)?)
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/actors/scheduler.rs | woori-db/src/actors/scheduler.rs | use actix::prelude::*;
use chrono::{Local, TimeZone, Utc};
use cron::Schedule;
use std::{
fs::OpenOptions,
io::{BufReader, BufWriter},
path::PathBuf,
process::Command,
str::FromStr,
time::Duration,
};
pub struct Scheduler;
impl Actor for Scheduler {
type Context = Context<Self>;
fn started(&mut self, ctx: &mut Context<Self>) {
log::debug!("Actor is alive");
ctx.run_later(duration_until_next(), move |this, ctx| {
this.schedule_task(ctx)
});
}
fn stopped(&mut self, _ctx: &mut Context<Self>) {
log::debug!("Actor is stopped");
}
}
impl Scheduler {
fn schedule_task(&self, ctx: &mut Context<Self>) {
use glob::glob;
log::debug!("schedule_task event - {:?}", Local::now());
let date_to_clear = Utc::now() - chrono::Duration::days(10);
let files: Vec<PathBuf> = glob("*data/*.log")
.unwrap()
.map(std::result::Result::unwrap)
.collect();
files.iter().for_each(|f| {
if let Some(file_name) = f.to_str() {
let date = file_name.replace(".log", "").replace("data/", "");
let file_date =
Utc.datetime_from_str(&format!("{} 00:00:00", date), "%Y_%m_%d %H:%M:%S");
if file_date.is_ok() && file_date.unwrap() < date_to_clear {
use std::io;
let file_zip = file_name.replace(".log", ".zst");
let file = OpenOptions::new().read(true).open(file_name).unwrap();
let mut reader = BufReader::new(file);
let write = OpenOptions::new()
.create(true)
.write(true)
.append(false)
.open(&file_zip)
.unwrap();
let writer = BufWriter::new(write);
let mut encoder = zstd::stream::Encoder::new(writer, 22).unwrap();
io::copy(&mut reader, &mut encoder).unwrap();
encoder.finish().unwrap();
Command::new("rm")
.arg("-rf")
.arg(&file_name)
.output()
.expect("Couldn't remove file");
}
}
});
ctx.run_later(duration_until_next(), move |this, ctx| {
this.schedule_task(ctx)
});
}
}
pub fn duration_until_next() -> Duration {
let cron_expression = "@daily"; // or "0 0 0 */1 * *"; every day at midnight
let cron_schedule = Schedule::from_str(cron_expression).unwrap();
let now = Local::now();
let next = cron_schedule.upcoming(Local).next().unwrap();
let duration_until = next.signed_duration_since(now);
Duration::from_millis(duration_until.num_milliseconds() as u64)
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/repository/local.rs | woori-db/src/repository/local.rs | use std::collections::{BTreeMap, HashMap, HashSet};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::{auth::schemas::Role, model::DataRegister};
pub type StateInfo = (DataRegister, Vec<u8>);
pub type LocalContext = BTreeMap<String, BTreeMap<Uuid, StateInfo>>;
pub type UniquenessContext = BTreeMap<String, HashMap<String, HashSet<String>>>;
pub type EncryptContext = BTreeMap<String, HashSet<String>>;
pub type SessionContext = BTreeMap<String, SessionInfo>;
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct SessionInfo {
expiration: DateTime<Utc>,
roles: Vec<Role>,
}
impl SessionInfo {
pub fn new(expiration: DateTime<Utc>, roles: Vec<Role>) -> Self {
Self { expiration, roles }
}
#[cfg(not(debug_assertions))]
pub fn is_valid_role(&self, roles: Vec<Role>) -> bool {
roles.iter().any(|role| self.roles.contains(&role))
}
#[cfg(not(debug_assertions))]
pub fn is_valid_date(&self) -> bool {
let now = Utc::now();
self.expiration > now
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/repository/mod.rs | woori-db/src/repository/mod.rs | pub(crate) mod local;
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/core/wql.rs | woori-db/src/core/wql.rs | use std::collections::HashMap;
use chrono::{DateTime, Utc};
use uuid::Uuid;
use wql::Types;
use crate::{
actors::wql::{
DeleteId, EvictEntityId, InsertEntityContent, UpdateContentEntityContent,
UpdateSetEntityContent,
},
core::pretty_config_inner,
model::wql::Action,
};
use ron::ser::to_string_pretty;
pub fn create_entity(entity: &str) -> String {
format!("{}|{};", Action::CreateEntity, entity)
}
pub fn evict_entity_content(entity: &str) -> String {
let date: DateTime<Utc> = Utc::now();
let date = to_string_pretty(&date, pretty_config_inner()).unwrap();
format!("{}|{}|{};", Action::EvictEntity, date, entity)
}
pub fn evict_entity_id_content(entity: &EvictEntityId) -> String {
let date: DateTime<Utc> = Utc::now();
let date = to_string_pretty(&date, pretty_config_inner()).unwrap();
format!(
"{}|{}|{}|{};",
Action::EvictEntityId,
date,
entity.id,
entity.name
)
}
pub fn insert_entity_content(content: &InsertEntityContent) -> (DateTime<Utc>, Uuid, String) {
let uuid = content.uuid.map_or_else(Uuid::new_v4, |id| id);
let date = content.datetime;
let date_str = to_string_pretty(&date, pretty_config_inner()).unwrap();
let log = format!(
"{}|{}|{}|{}|{};",
Action::Insert,
date_str,
uuid.to_string(),
content.name,
content.content
);
(date, uuid, log)
}
pub fn update_set_entity_content(content: &UpdateSetEntityContent) -> (DateTime<Utc>, String) {
let uuid = content.id;
let date = content.datetime;
let date_str = to_string_pretty(&date, pretty_config_inner()).unwrap();
let log = format!(
"{}|{}|{}|{}|{}|{}|{};",
Action::UpdateSet,
date_str,
uuid.to_string(),
content.name,
content.content_log,
content.current_state,
content.previous_registry
);
(date, log)
}
pub fn update_content_entity_content(
content: &UpdateContentEntityContent,
) -> (DateTime<Utc>, String) {
let uuid = content.id;
let date: DateTime<Utc> = Utc::now();
let date_str = to_string_pretty(&date, pretty_config_inner()).unwrap();
let log = format!(
"{}|{}|{}|{}|{}|{}|{};",
Action::UpdateContent,
date_str,
uuid.to_string(),
content.name,
content.content_log,
content.current_state,
content.previous_registry
);
(date, log)
}
pub fn delete_entity_content(content: &DeleteId) -> (DateTime<Utc>, String) {
let date: DateTime<Utc> = Utc::now();
let date_str = to_string_pretty(&date, pretty_config_inner()).unwrap();
let log = format!(
"{}|{}|{}|{}|{}|{};",
Action::Delete,
date_str,
content.uuid.to_string(),
content.name,
content.content_log,
content.previous_registry
);
(date, log)
}
pub fn update_content_state(previous_state: &mut HashMap<String, Types>, k: String, v: Types) {
let local_state = previous_state
.entry(k)
.or_insert_with(|| v.default_values());
match v {
Types::Char(c) => {
*local_state = Types::Char(c);
}
Types::Integer(i) => {
if let Types::Integer(local) = *local_state {
*local_state = Types::Integer(local + i);
}
if let Types::Float(local) = *local_state {
*local_state = Types::Float(local + i as f64);
}
}
Types::String(s) => {
if let Types::String(local) = local_state {
*local_state = Types::String(local.to_owned() + &s);
}
}
Types::Uuid(uuid) => {
*local_state = Types::Uuid(uuid);
}
Types::Float(f) => {
if let Types::Float(local) = *local_state {
*local_state = Types::Float(local + f);
}
if let Types::Integer(local) = *local_state {
*local_state = Types::Float(local as f64 + f);
}
}
Types::Boolean(b) => {
*local_state = Types::Boolean(b);
}
Types::Hash(_) => {}
Types::Vector(mut v) => {
if let Types::Vector(local) = local_state {
local.append(&mut v);
*local_state = Types::Vector(local.to_owned());
}
}
Types::Map(m) => {
if let Types::Map(local) = local_state {
m.iter().for_each(|(key, value)| {
local
.entry(key.to_owned())
.and_modify(|v| *v = value.to_owned())
.or_insert_with(|| value.to_owned());
});
*local_state = Types::Map(local.to_owned());
}
}
Types::Nil => {
*local_state = Types::Nil;
}
Types::Precise(p) => {
*local_state = Types::Precise(p);
}
Types::DateTime(date) => {
*local_state = Types::DateTime(date);
}
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::actors::wql::{
DeleteId, InsertEntityContent, UpdateContentEntityContent, UpdateSetEntityContent,
};
#[test]
fn create_entity_test() {
let s = create_entity(&"my_entity".to_string());
assert_eq!(s, "CREATE_ENTITY|my_entity;");
}
#[test]
fn insert_entity_test() {
let entity = InsertEntityContent {
name: "my_entity".to_string(),
content: "suppose this is a log".to_string(),
uuid: None,
datetime: Utc::now(),
};
let (_, _, s) = insert_entity_content(&entity);
assert!(s.contains("INSERT"));
assert!(s.contains("my_entity"));
assert!(s.contains("suppose this is a log"));
}
#[test]
fn insert_entitywith_uuid_test() {
let uuid = Uuid::new_v4();
let entity = InsertEntityContent {
name: "my_entity".to_string(),
content: "suppose this is a log".to_string(),
uuid: Some(uuid),
datetime: Utc::now(),
};
let (_, _, s) = insert_entity_content(&entity);
assert!(s.contains("INSERT"));
assert!(s.contains("my_entity"));
assert!(s.contains(&uuid.to_string()));
assert!(s.contains("suppose this is a log"));
}
#[test]
fn update_set_entity_content_test() {
let id = uuid::Uuid::new_v4();
let entity = UpdateSetEntityContent {
name: "my-entity".to_string(),
current_state: "state".to_string(),
content_log: "log".to_string(),
id,
datetime: Utc::now(),
previous_registry: "reg".to_string(),
};
let (_, s) = update_set_entity_content(&entity);
assert!(s.contains("UPDATE_SET"));
assert!(s.contains("my-entity"));
assert!(s.contains("state"));
assert!(s.contains("log"));
assert!(s.contains("reg"));
}
#[test]
fn update_content_entity_content_test() {
let id = uuid::Uuid::new_v4();
let entity = UpdateContentEntityContent {
name: "my-entity".to_string(),
current_state: "state".to_string(),
content_log: "log".to_string(),
id,
previous_registry: "reg".to_string(),
};
let (_, s) = update_content_entity_content(&entity);
assert!(s.contains("UPDATE_CONTENT"));
assert!(s.contains("my-entity"));
assert!(s.contains("state"));
assert!(s.contains("log"));
assert!(s.contains("reg"));
}
#[test]
fn delete_entity_test() {
let id = uuid::Uuid::new_v4();
let entity = DeleteId {
name: "my-entity".to_string(),
content_log: "log".to_string(),
uuid: id,
previous_registry: "reg".to_string(),
};
let (_, s) = delete_entity_content(&entity);
assert!(s.contains("DELETE"));
assert!(s.contains("my-entity"));
assert!(s.contains("log"));
assert!(s.contains("reg"));
}
#[test]
fn evict_entity_test() {
let entity = "hello";
let actual = evict_entity_content(&entity);
assert!(actual.starts_with("EVICT_ENTITY"));
assert!(actual.contains("hello"))
}
#[test]
fn evict_entity_id_test() {
let uuid = Uuid::new_v4();
let entity = EvictEntityId {
name: "hello".to_string(),
id: uuid,
};
let actual = evict_entity_id_content(&entity);
assert!(actual.starts_with("EVICT_ENTITY_ID"));
assert!(actual.contains("hello"));
assert!(actual.contains(&uuid.to_string()));
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/core/registry.rs | woori-db/src/core/registry.rs | use std::collections::BTreeMap;
use uuid::Uuid;
use crate::{
model::{error::Error, DataLocalContext},
repository::local::StateInfo,
};
pub fn get_registries(
entity: &str,
local_data: &DataLocalContext,
) -> Result<BTreeMap<Uuid, StateInfo>, Error> {
let local_data = if let Ok(guard) = local_data.lock() {
guard
} else {
return Err(Error::LockData);
};
let registries = if let Some(id_to_registries) = local_data.get(entity) {
id_to_registries
} else {
return Err(Error::EntityNotCreated(entity.to_owned()));
}
.to_owned();
Ok(registries)
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/core/mod.rs | woori-db/src/core/mod.rs | extern crate wql as ewql;
use std::collections::HashMap;
use chrono::{DateTime, Utc};
use ewql::Types;
use ron::ser::PrettyConfig;
use crate::model::error::Error;
pub(crate) mod query;
pub(crate) mod registry;
pub(crate) mod wql;
pub fn pretty_config_output() -> PrettyConfig {
PrettyConfig::new()
.separate_tuple_members(true)
.indentor(" ".to_string())
.new_line("\n".to_string())
}
pub fn pretty_config_inner() -> PrettyConfig {
PrettyConfig::new()
.indentor("".to_string())
.new_line("".to_string())
}
pub fn tx_time(content: &HashMap<String, Types>) -> Result<DateTime<Utc>, Error> {
if content.contains_key("tx_time") {
return Err(Error::KeyTxTimeNotAllowed);
}
Ok(Utc::now())
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/core/query.rs | woori-db/src/core/query.rs | use std::{
cmp::Ordering,
collections::{BTreeMap, HashMap, HashSet},
};
use rayon::prelude::*;
use uuid::Uuid;
use wql::{Algebra, Types};
use crate::{
model::DataRegister,
schemas::query::{CountResponse, Response as QueryResponse},
};
pub(crate) fn filter_keys_and_hash(
state: HashMap<String, Types>,
keys: Option<HashSet<String>>,
) -> HashMap<String, Types> {
let filtered = state.into_par_iter().filter(|(_, v)| !v.is_hash());
if let Some(keys) = keys {
filtered.filter(|(k, _)| keys.contains(k)).collect()
} else {
filtered.collect()
}
}
pub fn registries_to_states(
registries: BTreeMap<Uuid, (DataRegister, Vec<u8>)>,
keys: Option<HashSet<String>>,
offset: usize,
limit: usize,
) -> BTreeMap<Uuid, HashMap<String, Types>> {
let mut states: BTreeMap<Uuid, HashMap<String, Types>> = BTreeMap::new();
for (uuid, (_, state)) in registries.into_iter().skip(offset).take(limit) {
let state: HashMap<String, Types> = bincode::deserialize(&state).unwrap();
let filtered = filter_keys_and_hash(state, keys.clone());
states.insert(uuid, filtered);
}
states
}
pub(crate) fn get_limit_offset_count(
functions: &HashMap<String, wql::Algebra>,
) -> (usize, usize, bool) {
let limit = if let Some(Algebra::Limit(l)) = functions.get("LIMIT") {
*l
} else {
usize::MAX
};
let offset = if let Some(Algebra::Offset(o)) = functions.get("OFFSET") {
*o
} else {
0
};
let count = matches!(functions.get("COUNT"), Some(Algebra::Count));
(limit, offset, count)
}
pub(crate) fn dedup_states(
states: BTreeMap<Uuid, HashMap<String, Types>>,
functions: &HashMap<String, wql::Algebra>,
) -> BTreeMap<Uuid, HashMap<String, Types>> {
if let Some(Algebra::Dedup(k)) = functions.get("DEDUP") {
let key = if k.starts_with("NIL(") {
String::from(&k[4..k.len() - 1])
} else {
k.to_owned()
};
let mut set: HashSet<String> = HashSet::new();
let mut new_states: BTreeMap<Uuid, HashMap<String, Types>> = BTreeMap::new();
for (id, state) in states {
let k_value = state.get(&key);
if k.starts_with("NIL(")
&& k_value.is_some()
&& k_value != Some(&Types::Nil)
&& !set.contains(&format!("{:?}", k_value.unwrap()))
{
set.insert(format!("{:?}", k_value.unwrap()));
new_states.insert(id.to_owned(), state.to_owned());
} else if !k.starts_with("NIL(")
&& !set.contains(&format!("{:?}", state.get(k).unwrap_or(&Types::Nil)))
{
set.insert(format!("{:?}", state.get(k).unwrap_or(&Types::Nil)));
new_states.insert(id, state);
}
}
new_states
} else {
states
}
}
pub(crate) fn dedup_option_states(
states: BTreeMap<Uuid, Option<HashMap<String, Types>>>,
functions: &HashMap<String, wql::Algebra>,
) -> BTreeMap<Uuid, Option<HashMap<String, Types>>> {
let dedup = functions.get("DEDUP");
if let Some(Algebra::Dedup(k)) = dedup {
let key = if k.starts_with("NIL(") {
String::from(&k[4..k.len() - 1])
} else {
k.to_owned()
};
let mut set: HashSet<String> = HashSet::new();
let mut new_states: BTreeMap<Uuid, Option<HashMap<String, Types>>> = BTreeMap::new();
for (id, state) in states.iter().filter(|(_, s)| s.is_some()) {
let some_state = state.clone().unwrap();
let k_value = some_state.get(&key);
if k.starts_with("NIL(")
&& k_value.is_some()
&& k_value != Some(&Types::Nil)
&& !set.contains("")
{
set.insert(format!("{:?}", k_value.unwrap()));
new_states.insert(id.to_owned(), state.to_owned());
} else if !set.contains(&format!("{:?}", k_value.unwrap_or(&Types::Nil))) {
set.insert(format!("{:?}", k_value.unwrap_or(&Types::Nil)));
new_states.insert(id.to_owned(), state.to_owned());
}
}
new_states
} else {
states
}
}
pub(crate) fn get_result_after_manipulation(
states: BTreeMap<Uuid, HashMap<String, Types>>,
functions: &HashMap<String, wql::Algebra>,
should_count: bool,
) -> QueryResponse {
if let (Some(Algebra::OrderBy(k, ord)), None) = (functions.get("ORDER"), functions.get("GROUP"))
{
let mut states = states
.into_par_iter()
.map(|(id, state)| (id, state))
.collect::<Vec<(Uuid, HashMap<String, Types>)>>();
if ord == &wql::Order::Asc {
states.sort_by(|a, b| {
a.1.get(k)
.partial_cmp(&b.1.get(k))
.unwrap_or(Ordering::Less)
});
} else {
states.sort_by(|a, b| {
b.1.get(k)
.partial_cmp(&a.1.get(k))
.unwrap_or(Ordering::Less)
});
}
if should_count {
let size = states.len();
CountResponse::new(size, states.into()).into()
} else {
states.into()
}
} else if let Some(Algebra::GroupBy(k)) = functions.get("GROUP") {
let mut groups: HashMap<String, BTreeMap<Uuid, HashMap<String, Types>>> = HashMap::new();
for (id, state) in states {
let key = state.get(k).unwrap_or(&Types::Nil);
let g = groups
.entry(format!("{:?}", key))
.or_insert_with(BTreeMap::new);
(*g).insert(id, state);
}
if let Some(Algebra::OrderBy(k, ord)) = functions.get("ORDER") {
let mut group_states = groups
.into_par_iter()
.map(|(key, states)| {
(
key,
states
.into_iter()
.map(|(id, state)| (id, state))
.collect::<Vec<(Uuid, HashMap<String, Types>)>>(),
)
})
.collect::<HashMap<String, Vec<(Uuid, HashMap<String, Types>)>>>();
if ord == &wql::Order::Asc {
let group_states = group_states
.iter_mut()
.map(|(key, states)| {
states.sort_by(|a, b| {
a.1.get(k)
.partial_cmp(&b.1.get(k))
.unwrap_or(Ordering::Less)
});
(key.to_owned(), states.to_owned())
})
.collect::<HashMap<String, Vec<(Uuid, HashMap<String, Types>)>>>();
group_states.into()
} else {
let group_states = group_states
.iter_mut()
.map(|(key, states)| {
states.sort_by(|a, b| {
b.1.get(k)
.partial_cmp(&a.1.get(k))
.unwrap_or(Ordering::Less)
});
(key.to_owned(), states.to_owned())
})
.collect::<HashMap<String, Vec<(Uuid, HashMap<String, Types>)>>>();
group_states.into()
}
} else if should_count {
let size = groups.keys().len();
CountResponse::new(size, groups.into()).into()
} else {
groups.into()
}
} else if should_count {
let size = states.keys().len();
CountResponse::new(size, states.into()).into()
} else {
states.into()
}
}
pub(crate) fn get_result_after_manipulation_for_options(
states: BTreeMap<Uuid, Option<HashMap<String, Types>>>,
functions: &HashMap<String, wql::Algebra>,
should_count: bool,
) -> QueryResponse {
if let (Some(Algebra::OrderBy(k, ord)), None) = (functions.get("ORDER"), functions.get("GROUP"))
{
let states = states
.into_par_iter()
.map(|(id, state)| (id, state))
.collect::<Vec<(Uuid, Option<HashMap<String, Types>>)>>();
let mut states = states
.into_par_iter()
.filter(|(_, s)| s.is_some())
.map(|(id, s)| (id, s.unwrap()))
.collect::<Vec<(Uuid, HashMap<String, Types>)>>();
if ord == &wql::Order::Asc {
states.sort_by(|a, b| {
a.1.get(k)
.partial_cmp(&b.1.get(k))
.unwrap_or(Ordering::Less)
});
} else {
states.sort_by(|a, b| {
b.1.get(k)
.partial_cmp(&a.1.get(k))
.unwrap_or(Ordering::Less)
});
}
if should_count {
let size = states.len();
CountResponse::new(size, states.into()).into()
} else {
states.into()
}
} else if let Some(Algebra::GroupBy(k)) = functions.get("GROUP") {
let mut groups: HashMap<String, BTreeMap<Uuid, Option<HashMap<String, Types>>>> =
HashMap::new();
for (id, state) in states {
if let Some(s) = state {
let key = s.get(k).unwrap_or(&Types::Nil);
let g = groups
.entry(format!("{:?}", key))
.or_insert_with(BTreeMap::new);
(*g).insert(id, Some(s));
} else {
let key = &Types::Nil;
let g = groups
.entry(format!("{:?}", key))
.or_insert_with(BTreeMap::new);
(*g).insert(id, None);
}
}
if let Some(Algebra::OrderBy(k, ord)) = functions.get("ORDER") {
let mut group_states = groups
.into_par_iter()
.map(|(key, states)| {
(
key,
states
.into_iter()
.filter(|(_, state)| state.is_some())
.map(|(id, state)| (id, state.unwrap()))
.collect::<Vec<(Uuid, HashMap<String, Types>)>>(),
)
})
.collect::<HashMap<String, Vec<(Uuid, HashMap<String, Types>)>>>();
if ord == &wql::Order::Asc {
let group_states = group_states
.iter_mut()
.map(|(key, states)| {
states.sort_by(|a, b| {
a.1.get(k)
.partial_cmp(&b.1.get(k))
.unwrap_or(Ordering::Less)
});
(key.to_owned(), states.to_owned())
})
.collect::<HashMap<String, Vec<(Uuid, HashMap<String, Types>)>>>();
group_states.into()
} else {
let group_states = group_states
.iter_mut()
.map(|(key, states)| {
states.sort_by(|a, b| {
b.1.get(k)
.partial_cmp(&a.1.get(k))
.unwrap_or(Ordering::Less)
});
(key.to_owned(), states.to_owned())
})
.collect::<HashMap<String, Vec<(Uuid, HashMap<String, Types>)>>>();
group_states.into()
}
} else if should_count {
let size = groups.keys().len();
CountResponse::new(size, groups.into()).into()
} else {
groups.into()
}
} else if should_count {
let size = states.keys().len();
CountResponse::new(size, states.into()).into()
} else {
states.into()
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/io/write.rs | woori-db/src/io/write.rs | use chrono::{DateTime, Utc};
use std::io::{Error, Seek, SeekFrom};
use std::path::Path;
use std::{fs::OpenOptions, io::Write};
pub fn write_to_log(log: &str) -> Result<(usize, bool), Error> {
let utc: DateTime<Utc> = Utc::now();
let date_log = utc.format("data/%Y_%m_%d.log").to_string();
let is_empty = !Path::new(&date_log).exists();
let mut file = OpenOptions::new()
.append(true)
.create(true)
.open(date_log)?;
let written_bytes = file.write(log.as_bytes())?;
file.flush()?;
Ok((written_bytes, is_empty))
}
pub fn write_to_uniques(log: &str) -> Result<(), Error> {
let mut file = OpenOptions::new()
.append(true)
.create(true)
.open("data/uniques.log")?;
let _ = file.write(log.as_bytes())?;
file.flush()?;
Ok(())
}
pub fn local_data(log: &str) -> Result<(), Error> {
let mut file = OpenOptions::new()
.write(true)
.append(false)
.create(true)
.open("data/local_data.log")?;
let _ = file.seek(SeekFrom::Start(0));
file.write_all(log.as_bytes())?;
file.flush()?;
Ok(())
}
pub fn unique_data(log: &str) -> Result<(), Error> {
let mut file = OpenOptions::new()
.write(true)
.append(false)
.create(true)
.open("data/unique_data.log")?;
let _ = file.seek(SeekFrom::Start(0));
file.write_all(log.as_bytes())?;
file.flush()?;
Ok(())
}
pub fn offset_counter(log: usize) -> Result<(), Error> {
let mut file = OpenOptions::new()
.write(true)
.append(false)
.create(true)
.open("data/offset_counter.log")?;
let _ = file.seek(SeekFrom::Start(0));
file.write_all(log.to_string().as_bytes())?;
file.flush()?;
Ok(())
}
pub fn write_to_encrypts(log: &str) -> Result<(), Error> {
let mut file = OpenOptions::new()
.append(true)
.create(true)
.open("data/encrypt.log")?;
let _ = file.write(log.as_bytes())?;
file.flush()?;
Ok(())
}
#[cfg(test)]
mod test {
use super::*;
use crate::io::read::{
assert_content, assert_local_data, assert_offset, assert_unique_data, assert_uniques,
};
#[test]
fn write_unique() {
let _ = write_to_uniques("oh crazy unique log");
assert_uniques("oh crazy unique log");
}
#[test]
fn write_log() {
let _ = write_to_log("oh crazy log");
assert_content("oh crazy log");
}
#[test]
fn offset_counter_test() {
let _ = offset_counter(5_usize);
assert_offset("5");
}
#[test]
fn local_data_test() {
let _ = local_data("some crazy date here");
assert_local_data("some crazy date here");
}
#[test]
fn unique_data_test() {
let _ = unique_data("some crazy date here");
assert_unique_data("some crazy date here");
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/io/mod.rs | woori-db/src/io/mod.rs | pub(crate) mod read;
pub(crate) mod write;
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/io/read.rs | woori-db/src/io/read.rs | use std::{
collections::{BTreeMap, HashMap, HashSet},
fs::OpenOptions,
io::{BufReader, Error, Read, Seek, SeekFrom},
};
use rayon::prelude::*;
use crate::{actors::encrypts::WriteWithEncryption, model::DataRegister};
use crate::{model::error, repository::local::LocalContext};
#[cfg(test)]
pub fn assert_content(pat: &str) {
use chrono::prelude::*;
let utc: DateTime<Utc> = Utc::now();
let date_log = utc.format("data/%Y_%m_%d.log").to_string();
let mut file = OpenOptions::new().read(true).open(date_log).unwrap();
let mut s = String::new();
file.read_to_string(&mut s).unwrap();
assert!(s.contains(pat));
}
#[cfg(test)]
pub fn assert_not_content(pat: &str) {
use chrono::prelude::*;
let utc: DateTime<Utc> = Utc::now();
let date_log = utc.format("data/%Y_%m_%d.log").to_string();
let mut file = OpenOptions::new().read(true).open(date_log).unwrap();
let mut s = String::new();
file.read_to_string(&mut s).unwrap();
assert!(!s.contains(pat));
}
#[cfg(test)]
pub fn assert_uniques(pat: &str) {
let mut file = OpenOptions::new()
.read(true)
.open("data/uniques.log")
.unwrap();
let mut s = String::new();
file.read_to_string(&mut s).unwrap();
assert!(s.contains(pat));
}
#[cfg(test)]
pub fn assert_offset(pat: &str) {
let mut file = OpenOptions::new()
.read(true)
.open("data/offset_counter.log")
.unwrap();
let mut s = String::new();
file.read_to_string(&mut s).unwrap();
assert!(s.contains(pat));
}
#[cfg(test)]
pub fn assert_local_data(pat: &str) {
let mut file = OpenOptions::new()
.read(true)
.open("data/local_data.log")
.unwrap();
let mut s = String::new();
file.read_to_string(&mut s).unwrap();
assert!(s.contains(pat));
}
#[cfg(test)]
pub fn assert_unique_data(pat: &str) {
let mut file = OpenOptions::new()
.read(true)
.open("data/unique_data.log")
.unwrap();
let mut s = String::new();
file.read_to_string(&mut s).unwrap();
assert!(s.contains(pat));
}
#[cfg(test)]
pub fn assert_encrypt(pat: &str) {
let mut file = OpenOptions::new()
.read(true)
.open("data/encrypt.log")
.unwrap();
let mut s = String::new();
file.read_to_string(&mut s).unwrap();
assert!(s.contains(pat));
}
pub fn read_log(registry: DataRegister) -> Result<String, Error> {
let mut res = String::with_capacity(registry.bytes_length);
let file_name = registry.file_name;
if let Ok(mut file) = OpenOptions::new().read(true).open(&file_name) {
file.seek(SeekFrom::Start(registry.offset as u64))?;
file.take(registry.bytes_length as u64)
.read_to_string(&mut res)?;
} else {
let zst_date = file_name.replace(".log", ".zst");
let file = OpenOptions::new().read(true).open(zst_date)?;
let reader = BufReader::new(file);
let utf8 = zstd::stream::decode_all(reader)?;
res = std::str::from_utf8(&utf8)
.unwrap_or("")
.chars()
.skip(registry.offset as usize)
.take(registry.bytes_length as usize)
.collect::<String>();
};
Ok(res)
}
pub fn read_date_log(date_log: String) -> Result<String, Error> {
let mut res = String::new();
if let Ok(mut file) = OpenOptions::new().read(true).open(&date_log) {
file.seek(SeekFrom::Start(0))?;
file.read_to_string(&mut res)?;
} else {
let zst_date = date_log.replace(".log", ".zst");
let file = OpenOptions::new().read(true).open(zst_date)?;
let reader = BufReader::new(file);
let utf8 = zstd::stream::decode_all(reader)?;
res = std::str::from_utf8(&utf8).unwrap_or("").to_owned();
};
Ok(res)
}
pub fn offset() -> Result<usize, error::Error> {
#[cfg(not(feature = "test_read"))]
let path = "data/offset_counter.log";
#[cfg(feature = "test_read")]
let path = "data/offset_counter.txt";
let mut file = OpenOptions::new().read(true).open(path)?;
let mut s = String::new();
file.read_to_string(&mut s)?;
Ok(s.parse::<usize>()
.map_err(|_| error::Error::FailedToParseState)?)
}
pub fn local_data() -> Result<LocalContext, error::Error> {
#[cfg(not(feature = "test_read"))]
let path = "data/local_data.log";
#[cfg(feature = "test_read")]
let path = "data/local_data.txt";
let mut file = OpenOptions::new().read(true).open(path)?;
let mut s = String::new();
file.read_to_string(&mut s)?;
let data: Result<LocalContext, error::Error> = match ron::de::from_str(&s) {
Ok(x) => Ok(x),
Err(_) => Err(error::Error::FailedToParseState),
};
data
}
pub fn unique_data() -> Result<BTreeMap<String, HashMap<String, HashSet<String>>>, error::Error> {
#[cfg(not(feature = "test_read"))]
let path = "data/unique_data.log";
#[cfg(feature = "test_read")]
let path = "data/unique_data.txt";
let mut file = OpenOptions::new().read(true).open(path)?;
let mut s = String::new();
file.read_to_string(&mut s)?;
let data: Result<BTreeMap<String, HashMap<String, HashSet<String>>>, error::Error> =
match ron::de::from_str(&s) {
Ok(x) => Ok(x),
Err(_) => Err(error::Error::FailedToParseState),
};
data
}
pub fn encryption() -> Result<BTreeMap<String, HashSet<String>>, error::Error> {
#[cfg(not(feature = "test_read"))]
let path = "data/encrypt.log";
#[cfg(feature = "test_read")]
let path = "data/encrypt.txt";
let mut file = OpenOptions::new().read(true).open(path)?;
let mut s = String::from('[');
file.read_to_string(&mut s)?;
s.push(']');
let s = s.replace(")(", "),(");
let data: Result<Vec<WriteWithEncryption>, error::Error> = match ron::de::from_str(&s) {
Ok(x) => Ok(x),
Err(_) => Err(error::Error::FailedToParseState),
};
let data = data?
.into_par_iter()
.map(|enc| {
(
enc.entity,
enc.encrypts.into_iter().collect::<HashSet<String>>(),
)
})
.collect::<BTreeMap<String, HashSet<String>>>();
Ok(data)
}
#[cfg(test)]
#[cfg(feature = "test_read")]
mod test {
use std::{fs::OpenOptions, io::Write};
use super::*;
use crate::model::DataRegister;
#[test]
fn read_log_range() {
let log_size = write_new();
let data = DataRegister {
file_name: "data/read_test.log".to_string(),
offset: 30,
bytes_length: log_size - 58,
};
let log = read_log(data).unwrap();
assert_eq!(log, "i am too lazy to create.");
}
fn write_new() -> usize {
let log =
"this is a very long text that i am too lazy to create. Guess it is enough already.";
let mut file = OpenOptions::new()
.create(true)
.append(true)
.open("data/read_test.log")
.unwrap();
file.write(log.as_bytes()).unwrap()
}
#[test]
fn encryption_test() {
let encrypt = encryption().unwrap();
let s = format!("{:?}", encrypt);
assert!(s.contains("encrypt_ent"));
assert!(s.contains("encrypt_ent2"));
assert!(s.contains("name"));
assert!(s.contains("cpf"));
}
#[test]
fn offset_test() {
let offset = offset();
assert_eq!(offset.unwrap(), 701);
}
#[test]
fn local_data_test() {
let local_data = local_data();
assert!(local_data.is_ok());
assert_eq!(
format!("{:?}", local_data),
"Ok({\"encrypt_ent\": {}, \"encrypt_ent2\": {}, \"hello\": {50e68bc1-0c3b-4ffc-93be-46e57f59b415: (DataRegister { file_name: \"2021_02_10.log\", offset: 447, bytes_length: 153 }, [0, 0, 0, 0, 0, 0, 0, 0])}, \"oh_yeah\": {27367bd0-1966-4005-a8b5-5e323e1c3524: (DataRegister { file_name: \"2021_02_10.log\", offset: 180, bytes_length: 247 }, [0, 0, 0, 0, 0, 0, 0, 0])}})"
);
}
#[test]
fn unique_data_test() {
let unique_data = unique_data();
assert!(unique_data.is_ok());
let body = format!("{:?}", unique_data);
assert!(body.contains("\"uniq2_ent2\": {"));
assert!(body.contains("\"rg\": {\"Precise"));
assert!(body.contains("\"uniq_ent\": {"));
assert!(body.contains("\"cpf\": {\"Precise"));
assert!(body.contains("\"snn\": {}}}"));
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/src/http/mod.rs | woori-db/src/http/mod.rs | #[cfg(not(debug_assertions))]
use crate::auth::{
controllers as auth,
io::read_admin_info,
middlewares::{history_validator, wql_validator},
};
#[cfg(not(debug_assertions))]
use actix_web_httpauth::middleware::HttpAuthentication;
use crate::{
actors::{scheduler::Scheduler, wql::Executor},
controllers::entity_history,
io::read::{encryption, local_data, offset, unique_data},
repository::local::{LocalContext, SessionContext, UniquenessContext},
};
use crate::{
controllers::{query, tx},
repository::local::EncryptContext,
};
use actix::Actor;
use actix_web::{get, guard, web, HttpResponse, Responder};
use std::sync::atomic::AtomicUsize;
use std::sync::{Arc, Mutex};
#[get("/ping")]
pub async fn ping() -> impl Responder {
HttpResponse::Ok().body("pong!")
}
#[get("/~/ready")]
pub async fn readiness() -> impl Responder {
let process = std::process::Command::new("sh")
.arg("-c")
.arg("echo hello")
.output();
match process {
Ok(_) => HttpResponse::Accepted(),
Err(_) => HttpResponse::InternalServerError(),
}
}
pub fn routes(config: &mut web::ServiceConfig) {
let local_context = local_data().map_or(LocalContext::new(), |map| map);
let encrypt_context = encryption().map_or(EncryptContext::new(), |e| e);
let uniqueness = unique_data().map_or(UniquenessContext::new(), |u| u);
let wql_context = Arc::new(Mutex::new(local_context));
let unique_context = Arc::new(Mutex::new(uniqueness));
let encrypt_context = Arc::new(Mutex::new(encrypt_context));
let write_offset = AtomicUsize::new(offset().map_or(0_usize, |o| o));
let actor = Executor::new().start();
let env_cost = std::env::var("HASHING_COST").unwrap_or_else(|_| "14".to_owned());
let cost = env_cost.parse::<u32>().expect("HASHING_COST must be a u32");
let session_context = Arc::new(Mutex::new(SessionContext::new()));
#[cfg(not(debug_assertions))]
let exp_time_str =
std::env::var("SESSION_EXPIRATION_TIME").unwrap_or_else(|_| "3600".to_owned());
#[cfg(not(debug_assertions))]
let exp_time = exp_time_str.parse::<i64>().unwrap_or(3600);
#[cfg(not(debug_assertions))]
let admin_info = read_admin_info().unwrap();
Scheduler.start();
#[cfg(not(debug_assertions))]
let wql_auth = HttpAuthentication::bearer(wql_validator);
#[cfg(not(debug_assertions))]
let history_auth = HttpAuthentication::bearer(history_validator);
#[cfg(not(debug_assertions))]
config
.data(session_context)
.data(wql_context)
.data(actor)
.service(
web::scope("/auth")
.data(admin_info)
.data(exp_time)
.route("/createUser", web::post().to(auth::create_user))
.route("/deleteUsers", web::post().to(auth::delete_users))
.route("/putUserSession", web::put().to(auth::put_user_session)),
)
.service(
web::scope("/wql")
.guard(guard::Header("Content-Type", "application/wql"))
.data(cost)
.data(unique_context)
.data(encrypt_context)
.data(write_offset)
.wrap(wql_auth)
.route("/tx", web::post().to(tx::wql_handler))
.route("/query", web::post().to(query::wql_handler)),
)
.service(
web::scope("/entity-history")
.wrap(history_auth)
.route("", web::post().to(entity_history::history_handler)),
)
.route("", web::get().to(HttpResponse::NotFound));
#[cfg(debug_assertions)]
config
.data(session_context)
.data(wql_context)
.data(actor)
.service(
web::scope("/wql")
.guard(guard::Header("Content-Type", "application/wql"))
.data(cost)
.data(unique_context)
.data(encrypt_context)
.data(write_offset)
.route("/tx", web::post().to(tx::wql_handler))
.route("/query", web::post().to(query::wql_handler)),
)
.route(
"/entity-history",
web::post().to(entity_history::history_handler),
)
.route("", web::get().to(HttpResponse::NotFound));
}
#[cfg(test)]
mod test {
use super::*;
use actix_web::{body::Body, test, App};
#[actix_rt::test]
async fn test_ping_get() {
let mut app = test::init_service(App::new().service(ping)).await;
let req = test::TestRequest::get().uri("/ping").to_request();
let mut resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
let body = resp.take_body();
let body = body.as_ref().unwrap();
assert_eq!(&Body::from("pong!"), body)
}
#[actix_rt::test]
async fn test_ready_get() {
let mut app = test::init_service(App::new().service(readiness)).await;
let req = test::TestRequest::get().uri("/~/ready").to_request();
let resp = test::call_service(&mut app, req).await;
assert!(resp.status().is_success());
}
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/benches/wql.rs | woori-db/benches/wql.rs | use std::str::FromStr;
use criterion::{criterion_group, criterion_main, Criterion};
use wql::Wql;
fn criterion_benchmark(c: &mut Criterion) {
c.bench_function("create_entity", |b| {
b.iter(|| Wql::from_str("create entity my_entity"))
});
c.bench_function("inser_entity", |b| {
b.iter(|| Wql::from_str("insert {a: 1, b: 2.3, c: 'g', d: \"str\",} into my_entity"))
});
c.bench_function("select_all", |b| {
b.iter(|| Wql::from_str("select * from my_entity"))
});
c.bench_function("select_args", |b| {
b.iter(|| Wql::from_str("select #{a, b, c,} from my_entity"))
});
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/benches/tx.rs | woori-db/benches/tx.rs | use criterion::{criterion_group, criterion_main, Criterion};
use rand::prelude::*;
use serde::{Deserialize, Serialize};
use std::process::Command;
use uuid::Uuid;
fn criterion_benchmark(c: &mut Criterion) {
let entity = get_rand_value();
let ent_str = entity.as_str();
c.bench_function("create_entity", |b| {
b.iter(|| {
curl_create(ent_str);
})
});
c.bench_function("insert_entity", |b| {
b.iter(|| {
curl_insert(ent_str);
})
});
let id = curl_insert_with_id(ent_str);
c.bench_function("update_set_entity", |b| {
b.iter(|| {
curl_update_set(ent_str, id);
})
});
c.bench_function("update_content_entity", |b| {
b.iter(|| {
curl_update_content(ent_str, id);
})
});
// c.bench_function("delete_entity", |b| {
// b.iter(|| {
// curl_delete(ent_str, id);
// })
// });
// c.bench_function("evict_entity_id", |b| {
// b.iter(|| {
// curl_evict_id(ent_str, id);
// })
// });
// c.bench_function("evict_entity", |b| {
// b.iter(|| {
// curl_evict_entity(ent_str);
// })
// });
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
fn curl_create(entity: &str) {
let action = format!("CREATE ENTITY {}", entity);
let val = Command::new("curl")
.args(&["-X", "POST"])
.args(&["-H", "Content-Type: application/wql"])
.arg("localhost:1438/wql/tx")
.args(&["-d", &action])
.output()
.expect("failed to execute process")
.stdout;
match String::from_utf8(val) {
Ok(_) => (),
Err(e) => panic!("{:?}", e),
};
}
fn curl_insert(entity: &str) {
let action = format!("INSERT {{a: 123,}} INTO {}", entity);
let val = Command::new("curl")
.args(&["-X", "POST"])
.args(&["-H", "Content-Type: application/wql"])
.arg("localhost:1438/wql/tx")
.args(&["-d", &action])
.output()
.expect("failed to execute process")
.stdout;
match String::from_utf8(val) {
Ok(_) => (),
Err(e) => panic!("{:?}", e),
};
}
fn curl_update_set(entity: &str, id: uuid::Uuid) {
let action = format!("UPDATE {} SET {{a: 3, g: NiL, }} into {}", entity, id);
let val = Command::new("curl")
.args(&["-X", "POST"])
.args(&["-H", "Content-Type: application/wql"])
.arg("localhost:1438/wql/tx")
.args(&["-d", &action])
.output()
.expect("failed to execute process")
.stdout;
match String::from_utf8(val) {
Ok(_) => (),
Err(e) => panic!("{:?}", e),
};
}
fn curl_update_content(entity: &str, id: uuid::Uuid) {
let action = format!("UPDATE {} CONTENT {{a: 3, g: NiL, }} into {}", entity, id);
let val = Command::new("curl")
.args(&["-X", "POST"])
.args(&["-H", "Content-Type: application/wql"])
.arg("localhost:1438/wql/tx")
.args(&["-d", &action])
.output()
.expect("failed to execute process")
.stdout;
match String::from_utf8(val) {
Ok(_) => (),
Err(e) => panic!("{:?}", e),
};
}
#[allow(dead_code)]
fn curl_delete(entity: &str, id: uuid::Uuid) {
let action = format!("DELETE {} FROM {}", id, entity);
let val = Command::new("curl")
.args(&["-X", "POST"])
.args(&["-H", "Content-Type: application/wql"])
.arg("localhost:1438/wql/tx")
.args(&["-d", &action])
.output()
.expect("failed to execute process")
.stdout;
match String::from_utf8(val) {
Ok(_) => (),
Err(e) => panic!("{:?}", e),
};
}
#[allow(dead_code)]
fn curl_evict_id(entity: &str, id: uuid::Uuid) {
let action = format!("EVICT {} FROM {}", id, entity);
let val = Command::new("curl")
.args(&["-X", "POST"])
.args(&["-H", "Content-Type: application/wql"])
.arg("localhost:1438/wql/tx")
.args(&["-d", &action])
.output()
.expect("failed to execute process")
.stdout;
match String::from_utf8(val) {
Ok(_) => (),
Err(e) => panic!("{:?}", e),
};
}
#[allow(dead_code)]
fn curl_evict_entity(entity: &str) {
let action = format!("EVICT {}", entity);
let val = Command::new("curl")
.args(&["-X", "POST"])
.args(&["-H", "Content-Type: application/wql"])
.arg("localhost:1438/wql/tx")
.args(&["-d", &action])
.output()
.expect("failed to execute process")
.stdout;
match String::from_utf8(val) {
Ok(_) => (),
Err(e) => panic!("{:?}", e),
};
}
fn curl_insert_with_id(entity: &str) -> uuid::Uuid {
let action = format!("INSERT {{a: 123,}} INTO {}", entity);
let val = Command::new("curl")
.args(&["-X", "POST"])
.args(&["-H", "Content-Type: application/wql"])
.arg("localhost:1438/wql/tx")
.args(&["-d", &action])
.output()
.expect("failed to execute process")
.stdout;
let entity = String::from_utf8(val).unwrap();
let inserted: TxResponse = ron::de::from_str(&entity).unwrap();
inserted.uuid.unwrap()
}
fn get_rand_value() -> String {
let mut rng = rand::thread_rng();
let rng: usize = rng.gen();
let mut rstr = String::from("fuck_yeah");
rstr.push_str(&rng.to_string());
rstr
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum TxType {
Create,
Insert,
UpdateSet,
UpdateContent,
Delete,
EvictEntity,
EvictEntityTree,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TxResponse {
tx_type: TxType,
entity: String,
pub uuid: Option<Uuid>,
state: String,
message: String,
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/benches/history.rs | woori-db/benches/history.rs | use criterion::{criterion_group, criterion_main, Criterion};
use rand::prelude::*;
use serde::{Deserialize, Serialize};
use std::process::Command;
use uuid::Uuid;
fn criterion_benchmark(c: &mut Criterion) {
let ent_str = "test_history_bench";
curl_create(ent_str);
let id = curl_insert_with_id(ent_str);
curl_update_set(ent_str, id);
curl_update_set(ent_str, id);
curl_update_set(ent_str, id);
curl_update_set(ent_str, id);
curl_update_set(ent_str, id);
curl_update_set(ent_str, id);
curl_update_set(ent_str, id);
curl_update_set(ent_str, id);
curl_update_set(ent_str, id);
c.bench_function("history_10_registries_for_entity", |b| {
b.iter(|| curl_history(ent_str, id))
});
curl_update_set(ent_str, id);
curl_update_set(ent_str, id);
curl_update_set(ent_str, id);
curl_update_set(ent_str, id);
curl_update_set(ent_str, id);
curl_update_set(ent_str, id);
curl_update_set(ent_str, id);
curl_update_set(ent_str, id);
curl_update_set(ent_str, id);
curl_update_set(ent_str, id);
c.bench_function("history_20_registries_for_entity", |b| {
b.iter(|| curl_history(ent_str, id))
});
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
fn curl_create(entity: &str) {
let action = format!("CREATE ENTITY {}", entity);
let val = Command::new("curl")
.args(&["-X", "POST"])
.args(&["-H", "Content-Type: application/wql"])
.arg("localhost:1438/wql/tx")
.args(&["-d", &action])
.output()
.expect("failed to execute process")
.stdout;
match String::from_utf8(val) {
Ok(_) => (),
Err(e) => panic!("{:?}", e),
};
}
fn curl_insert_with_id(entity: &str) -> uuid::Uuid {
let action = format!("INSERT {{a: 123,}} INTO {}", entity);
let val = Command::new("curl")
.args(&["-X", "POST"])
.args(&["-H", "Content-Type: application/wql"])
.arg("localhost:1438/wql/tx")
.args(&["-d", &action])
.output()
.expect("failed to execute process")
.stdout;
let entity = String::from_utf8(val).unwrap();
let inserted: TxResponse = ron::de::from_str(&entity).unwrap();
inserted.uuid.unwrap()
}
fn curl_update_set(entity: &str, id: uuid::Uuid) {
let val = get_rand_value();
let action = format!(
"UPDATE {} SET {{a: 3, b: \"{}\", }} into {}",
entity, val, id
);
let val = Command::new("curl")
.args(&["-X", "POST"])
.args(&["-H", "Content-Type: application/wql"])
.arg("localhost:1438/wql/tx")
.args(&["-d", &action])
.output()
.expect("failed to execute process")
.stdout;
match String::from_utf8(val) {
Ok(_) => (),
Err(e) => panic!("{:?}", e),
};
}
fn get_rand_value() -> String {
let mut rng = rand::thread_rng();
let rng: usize = rng.gen();
let mut rstr = String::from("fuck_yeah");
rstr.push_str(&rng.to_string());
rstr
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum TxType {
Create,
Insert,
UpdateSet,
UpdateContent,
Delete,
EvictEntity,
EvictEntityTree,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TxResponse {
tx_type: TxType,
entity: String,
pub uuid: Option<Uuid>,
state: String,
message: String,
}
fn curl_history(entity: &str, id: Uuid) {
let action = format!("(entity_key: \"{}\", entity_id: \"{}\",)", entity, id);
let val = Command::new("curl")
.args(&["-X", "POST"])
.arg("localhost:1438/entity-history")
.args(&["-d", &action])
.output()
.expect("failed to execute process")
.stdout;
match String::from_utf8(val) {
Ok(_) => print!("OK,"),
Err(e) => panic!("{:?}", e),
};
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
naomijub/wooridb | https://github.com/naomijub/wooridb/blob/bcef59acbf1bb1509a8b3d1b2f914e45921ba026/woori-db/benches/query.rs | woori-db/benches/query.rs | use criterion::{criterion_group, criterion_main, Criterion};
use std::process::Command;
fn criterion_benchmark(c: &mut Criterion) {
let ent_str = "bench_entity_name";
curl_create(ent_str);
curl_insert(ent_str);
c.bench_function("select_all_1_entity", |b| b.iter(|| curl_select(ent_str)));
curl_insert(ent_str);
curl_insert(ent_str);
curl_insert(ent_str);
curl_insert(ent_str);
curl_insert(ent_str);
curl_insert(ent_str);
curl_insert(ent_str);
curl_insert(ent_str);
curl_insert(ent_str);
c.bench_function("select_all_10_entity", |b| b.iter(|| curl_select(ent_str)));
curl_insert(ent_str);
curl_insert(ent_str);
curl_insert(ent_str);
curl_insert(ent_str);
curl_insert(ent_str);
curl_insert(ent_str);
curl_insert(ent_str);
curl_insert(ent_str);
curl_insert(ent_str);
curl_insert(ent_str);
c.bench_function("select_all_20_entity", |b| b.iter(|| curl_select(ent_str)));
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
fn curl_create(entity: &str) {
let action = format!("CREATE ENTITY {}", entity);
let val = Command::new("curl")
.args(&["-X", "POST"])
.args(&["-H", "Content-Type: application/wql"])
.arg("localhost:1438/wql/tx")
.args(&["-d", &action])
.output()
.expect("failed to execute process")
.stdout;
match String::from_utf8(val) {
Ok(_) => (),
Err(e) => panic!("{:?}", e),
};
}
fn curl_insert(entity: &str) {
let action = format!("INSERT {{a: 123,}} INTO {}", entity);
let val = Command::new("curl")
.args(&["-X", "POST"])
.args(&["-H", "Content-Type: application/wql"])
.arg("localhost:1438/wql/tx")
.args(&["-d", &action])
.output()
.expect("failed to execute process")
.stdout;
match String::from_utf8(val) {
Ok(_) => (),
Err(e) => panic!("{:?}", e),
};
}
fn curl_select(entity: &str) {
let action = format!("SELECT * FROM {}", entity);
let val = Command::new("curl")
.args(&["-X", "POST"])
.args(&["-H", "Content-Type: application/wql"])
.arg("localhost:1438/wql/query")
.args(&["-d", &action])
.output()
.expect("failed to execute process")
.stdout;
match String::from_utf8(val) {
Ok(_) => print!("OK,"),
Err(e) => panic!("{:?}", e),
};
}
| rust | MIT | bcef59acbf1bb1509a8b3d1b2f914e45921ba026 | 2026-01-04T20:17:21.692961Z | false |
KallDrexx/mmids | https://github.com/KallDrexx/mmids/blob/3d732616da3bc8976dbcd5f859758ed3f06dc38f/mmids-rtmp/src/lib.rs | mmids-rtmp/src/lib.rs | //! RTMP components for mmids. Includes the ability for a mmids application to act as an RTMP and
//! RTMPS server, accepting connections by RTMP clients and having their media routed into
//! mmids workflows
pub mod rtmp_server;
pub mod utils;
pub mod workflow_steps;
| rust | MIT | 3d732616da3bc8976dbcd5f859758ed3f06dc38f | 2026-01-04T20:17:23.887411Z | false |
KallDrexx/mmids | https://github.com/KallDrexx/mmids/blob/3d732616da3bc8976dbcd5f859758ed3f06dc38f/mmids-rtmp/src/utils.rs | mmids-rtmp/src/utils.rs | use mmids_core::VideoTimestamp;
use rml_rtmp::sessions::StreamMetadata;
use rml_rtmp::time::RtmpTimestamp;
use std::collections::HashMap;
use std::time::Duration;
use tracing::error;
/// Creates a new video timestamp from RTMP data. RTMP packets contain a timestamp in the
/// RTMP header itself and a composition time offset in the `AVCVIDEOPACKET` header. The RTMP
/// timestamp is the decoding timestamp (dts), while the composition time offset is added to the
/// dts to get the presentation timestamp (pts).
pub fn video_timestamp_from_rtmp_data(
rtmp_timestamp: RtmpTimestamp,
mut composition_time_offset: i32,
) -> VideoTimestamp {
if !(-8388608..838607).contains(&composition_time_offset) {
error!("Composition time offset of {composition_time_offset} is out of 24 bit range. Leaving at zero");
composition_time_offset = 0;
}
VideoTimestamp::from_durations(
Duration::from_millis(rtmp_timestamp.value as u64),
Duration::from_millis(rtmp_timestamp.value as u64 + composition_time_offset as u64),
)
}
/// Takes items from an RTMP stream metadata message and maps them to standardized key/value
/// entries in a hash map.
pub fn stream_metadata_to_hash_map(metadata: StreamMetadata) -> HashMap<String, String> {
let mut map = HashMap::new();
if let Some(codec) = metadata.video_codec {
map.insert("videocodecid".to_string(), codec);
}
if let Some(x) = metadata.audio_bitrate_kbps {
map.insert("audiodatarate".to_string(), x.to_string());
}
if let Some(x) = metadata.audio_channels {
map.insert("audiochannels".to_string(), x.to_string());
}
if let Some(codec) = metadata.audio_codec {
map.insert("audiocodecid".to_string(), codec);
}
if let Some(x) = metadata.audio_is_stereo {
map.insert("stereo".to_string(), x.to_string());
}
if let Some(x) = metadata.audio_sample_rate {
map.insert("audiosamplerate".to_string(), x.to_string());
}
if let Some(x) = metadata.encoder {
map.insert("encoder".to_string(), x);
}
if let Some(x) = metadata.video_bitrate_kbps {
map.insert("videodatarate".to_string(), x.to_string());
}
if let Some(x) = metadata.video_width {
map.insert("width".to_string(), x.to_string());
}
if let Some(x) = metadata.video_height {
map.insert("height".to_string(), x.to_string());
}
if let Some(x) = metadata.video_frame_rate {
map.insert("framerate".to_string(), x.to_string());
}
map
}
/// Attempts to extract RTMP stream metadata values from a hash map
pub fn hash_map_to_stream_metadata(properties: &HashMap<String, String>) -> StreamMetadata {
let mut metadata = StreamMetadata::new();
if let Some(Ok(video_codec_id)) = properties.get("videocodecid").map(|id| id.parse()) {
metadata.video_codec = Some(video_codec_id);
}
if let Some(audio_data_rate) = properties.get("audiodatarate") {
if let Ok(num) = audio_data_rate.parse() {
metadata.audio_bitrate_kbps = Some(num);
}
}
if let Some(count) = properties.get("audiochannels") {
if let Ok(num) = count.parse() {
metadata.audio_channels = Some(num);
}
}
if let Some(Ok(codec)) = properties.get("audiocodecid").map(|id| id.parse()) {
metadata.audio_codec = Some(codec);
}
if let Some(stereo) = properties.get("stereo") {
if let Ok(bool_val) = stereo.parse() {
metadata.audio_is_stereo = Some(bool_val);
}
}
if let Some(samples) = properties.get("audiosamplerate") {
if let Ok(sample_rate) = samples.parse() {
metadata.audio_sample_rate = Some(sample_rate);
}
}
if let Some(encoder) = properties.get("encoder") {
metadata.encoder = Some(encoder.clone());
}
if let Some(rate) = properties.get("videodatarate") {
if let Ok(rate) = rate.parse() {
metadata.video_bitrate_kbps = Some(rate);
}
}
if let Some(width) = properties.get("width") {
if let Ok(width) = width.parse() {
metadata.video_width = Some(width);
}
}
if let Some(height) = properties.get("height") {
if let Ok(height) = height.parse() {
metadata.video_height = Some(height);
}
}
if let Some(rate) = properties.get("framerate") {
if let Ok(rate) = rate.parse() {
metadata.video_frame_rate = Some(rate);
}
}
metadata
}
| rust | MIT | 3d732616da3bc8976dbcd5f859758ed3f06dc38f | 2026-01-04T20:17:23.887411Z | false |
KallDrexx/mmids | https://github.com/KallDrexx/mmids/blob/3d732616da3bc8976dbcd5f859758ed3f06dc38f/mmids-rtmp/src/rtmp_server/mod.rs | mmids-rtmp/src/rtmp_server/mod.rs | //! This endpoint acts as a server for RTMP clients that want to publish or watch RTMP live streams.
//! Workflow steps send a message requesting to allow RTMP publishers or watchers for specific
//! port, RTMP application and stream key combinations. The RTMP server endpoint will register the
//! specified port with the networking infrastructure for listening for connections, and any
//! networked traffic over that port will be forwarded to this endpoint.
//!
//! It will then perform handshaking and all other RTMP protocol actions, disconnecting clients if
//! they don't conform to the RTMP protocol correctly, or if they attempt to publish or watch an
//! application name and stream key combination that isn't actively registered.
//!
//! Incoming publish actions (such as new metadata, media packets, etc...) are passed to the workflow
//! steps that were registered for that application/stream key combination. Likewise, when the
//! endpoint receives media from workflow steps it will route that media to the correct RTMP watcher
//! clients
mod actor;
use crate::rtmp_server::actor::actor_types::FutureResult;
use crate::utils::hash_map_to_stream_metadata;
use actor::actor_types::RtmpServerEndpointActor;
use bytes::Bytes;
use mmids_core::actor_utils::notify_on_unbounded_recv;
use mmids_core::codecs::{AUDIO_CODEC_AAC_RAW, VIDEO_CODEC_H264_AVC};
use mmids_core::net::tcp::TcpSocketRequest;
use mmids_core::net::{ConnectionId, IpAddress};
use mmids_core::reactors::ReactorWorkflowUpdate;
use mmids_core::workflows::metadata::{MetadataKey, MetadataValue};
use mmids_core::workflows::MediaNotificationContent;
use mmids_core::StreamId;
use rml_rtmp::sessions::StreamMetadata;
use rml_rtmp::time::RtmpTimestamp;
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender};
use tokio::sync::oneshot::Sender;
/// Starts a new RTMP server endpoint, returning a channel that can be used to send notifications
/// and requests to it.
pub fn start_rtmp_server_endpoint(
socket_request_sender: UnboundedSender<TcpSocketRequest>,
) -> UnboundedSender<RtmpEndpointRequest> {
let (endpoint_sender, endpoint_receiver) = unbounded_channel();
let (actor_sender, actor_receiver) = unbounded_channel();
notify_on_unbounded_recv(
endpoint_receiver,
actor_sender.clone(),
FutureResult::EndpointRequestReceived,
|| FutureResult::NoMoreEndpointRequesters,
);
let endpoint = RtmpServerEndpointActor {
internal_actor: actor_sender,
ports: HashMap::new(),
};
tokio::spawn(endpoint.run(actor_receiver, socket_request_sender));
endpoint_sender
}
/// Specifies how a stream key should be registered for playback or publishing
#[derive(Clone, Hash, Eq, PartialEq, Debug)]
pub enum StreamKeyRegistration {
/// All stream keys for the the rtmp application should be registered
Any,
/// Only set up registration for the exact stream key
Exact(Arc<String>),
}
/// Specifies if there are any IP address restrictions as part of an RTMP server registration
#[derive(Debug, PartialEq, Eq)]
pub enum IpRestriction {
/// All IP addresses are allowed
None,
/// Only the specified IP addresses are allowed.
Allow(Vec<IpAddress>),
/// All IP addresses are allowed except for the ones specified.
Deny(Vec<IpAddress>),
}
/// Type of registration the request is related to
#[derive(Debug)]
pub enum RegistrationType {
Publisher,
Watcher,
}
/// Operations the rtmp server endpoint is being requested to make
#[derive(Debug)]
pub enum RtmpEndpointRequest {
/// Requests the RTMP server to allow publishers on the given port, app, and stream key
/// combinations.
ListenForPublishers {
/// Port to listen for RTMP publisher connections on
port: u16,
/// Name of the RTMP application publishers will connect to
rtmp_app: Arc<String>,
/// What stream key publishers should be using
rtmp_stream_key: StreamKeyRegistration,
/// Channel that the rtmp server endpoint should respond with
message_channel: UnboundedSender<RtmpEndpointPublisherMessage>,
/// If specified, new media streams being published from this registration will be given
/// the stream id specified. If no id is given than one will be generated. This is useful
/// to correlate media streams that may have been pulled, processed externally, then brought
/// back in for later workflow steps (e.g. an external transcoding workflow).
stream_id: Option<StreamId>,
/// What IP restriction rules should be in place for this registration
ip_restrictions: IpRestriction,
/// If true, this port should be on a TLS socket (i.e. RTMPS)
use_tls: bool,
/// If true, then publishers will not be automatically accepted even if they connect to
/// the correct app/stream key combination and pass ip restrictions. Instead the registrant
/// should be asked for final verification if the publisher should be allowed or not.
requires_registrant_approval: bool,
},
/// Requests the RTMP server to allow clients to receive video on the given port, app,
/// and stream key combinations
ListenForWatchers {
/// Port to listen on
port: u16,
/// Name of the RTMP application playback clients will connect to
rtmp_app: Arc<String>,
/// Stream keys clients can receive video on
rtmp_stream_key: StreamKeyRegistration,
/// The channel that the rtmp server endpoint will send notifications to
notification_channel: UnboundedSender<RtmpEndpointWatcherNotification>,
/// The channel that the registrant will send updated media data to the rtmp endpoint on
media_channel: UnboundedReceiver<RtmpEndpointMediaMessage>,
/// What IP restriction rules should be in place for this registration
ip_restrictions: IpRestriction,
/// If true, this port should be on a TLS socket (i.e. RTMPS)
use_tls: bool,
/// If true, then watchers will not be automatically accepted even if they connect to
/// the correct app/stream key combination and pass ip restrictions. Instead the registrant
/// should be asked for final verification if the watcher should be allowed or not.
requires_registrant_approval: bool,
},
/// Requests the specified registration should be removed
RemoveRegistration {
/// The type of registration that is being removed
registration_type: RegistrationType,
/// Port the removed registrant was listening on
port: u16,
/// The RTMP application name that the registrant was listening on
rtmp_app: Arc<String>,
/// The stream key the registrant had registered for
rtmp_stream_key: StreamKeyRegistration,
},
}
/// Response to approval/validation requests
#[derive(Debug)]
pub enum ValidationResponse {
Approve {
reactor_update_channel: UnboundedReceiver<ReactorWorkflowUpdate>,
},
Reject,
}
/// Messages the rtmp server endpoint will send to publisher registrants.
#[derive(Debug)]
pub enum RtmpEndpointPublisherMessage {
/// Notification that the publisher registration failed. No further messages will be sent
/// if this is sent.
PublisherRegistrationFailed,
/// Notification that the publisher registration succeeded.
PublisherRegistrationSuccessful,
/// Notification that a new RTMP connection has been made and they have requested to be a
/// publisher on a stream key, but they require validation before being approved.
PublisherRequiringApproval {
/// Unique identifier for the TCP connection that's requesting to be a publisher
connection_id: ConnectionId,
/// The stream key that the connection is requesting to be a publisher to
stream_key: Arc<String>,
/// Channel to send the approval or rejection response to
response_channel: Sender<ValidationResponse>,
},
/// Notification that a new RTMP connection has been made and is publishing media
NewPublisherConnected {
/// Unique identifier for the TCP connection that's publishing
connection_id: ConnectionId,
/// Unique identifier for the stream.
stream_id: StreamId,
/// Actual stream key that this stream is coming in from. Mostly used if the registrant
/// specified that Any stream key would be allowed.
stream_key: Arc<String>,
/// If provided, this is a channel which will receive workflow updates from a reactor
/// tied to this publisher
reactor_update_channel: Option<UnboundedReceiver<ReactorWorkflowUpdate>>,
},
/// Notification that a publisher has stopped publishing. It may still be connected to the
/// server, but it is no longer in a publisher state.
PublishingStopped {
/// Unique identifier for the TCP connection that stopped publishing
connection_id: ConnectionId,
},
/// An RTMP publisher has sent in new stream metadata information
StreamMetadataChanged {
publisher: ConnectionId,
metadata: StreamMetadata,
},
/// An RTMP publisher has sent in new video data
NewVideoData {
publisher: ConnectionId,
is_keyframe: bool,
is_sequence_header: bool,
data: Bytes,
timestamp: RtmpTimestamp,
composition_time_offset: i32,
},
/// An RTMP publisher has sent in new audio data
NewAudioData {
publisher: ConnectionId,
is_sequence_header: bool,
data: Bytes,
timestamp: RtmpTimestamp,
},
}
/// Messages the rtmp server endpoint will send to watcher registrants
#[derive(Debug)]
pub enum RtmpEndpointWatcherNotification {
/// The request to register for watchers has failed. No further messages will be sent
/// afterwards.
WatcherRegistrationFailed,
/// The request to register for watchers was successful
WatcherRegistrationSuccessful,
/// Notification that a new RTMP connection has been made and they have requested to be a
/// watcher on a stream key, but they require validation before being approved.
WatcherRequiringApproval {
/// Unique identifier for the TCP connection that's requesting to be a watcher
connection_id: ConnectionId,
/// The stream key that the connection is requesting to be a watcher of
stream_key: Arc<String>,
/// Channel to send the approval or rejection response to
response_channel: Sender<ValidationResponse>,
},
/// Notifies the registrant that at least one watcher is now watching on a particular
/// stream key,
StreamKeyBecameActive {
stream_key: Arc<String>,
reactor_update_channel: Option<UnboundedReceiver<ReactorWorkflowUpdate>>,
},
/// Notifies the registrant that the last watcher has disconnected on the stream key, and
/// there are no longer anyone watching
StreamKeyBecameInactive { stream_key: Arc<String> },
}
/// Message watcher registrants send to announce new media data that should be sent to watchers
#[derive(Debug)]
pub struct RtmpEndpointMediaMessage {
pub stream_key: Arc<String>,
pub data: RtmpEndpointMediaData,
}
/// New media data that should be sent to watchers
#[derive(Debug, Clone, PartialEq)]
pub enum RtmpEndpointMediaData {
NewStreamMetaData {
metadata: StreamMetadata,
},
NewVideoData {
is_keyframe: bool,
is_sequence_header: bool,
data: Bytes,
timestamp: RtmpTimestamp,
composition_time_offset: i32,
},
NewAudioData {
is_sequence_header: bool,
data: Bytes,
timestamp: RtmpTimestamp,
},
}
/// Failures that can occur when converting a `MediaNotificationContent` value to
/// `RtmpEndpointMediaData`.
#[derive(thiserror::Error, Debug)]
pub enum MediaDataConversionFailure {
#[error("MediaNotificationContent variant cannot be converted")]
IncompatibleType,
#[error("The media payload type of '{0}' is not supported")]
UnsupportedPayloadType(Arc<String>),
}
impl RtmpEndpointMediaData {
pub fn from_media_notification_content(
content: MediaNotificationContent,
is_keyframe_metadata_key: MetadataKey,
pts_offset_metadata_key: MetadataKey,
) -> Result<Self, MediaDataConversionFailure> {
match content {
MediaNotificationContent::StreamDisconnected => {
Err(MediaDataConversionFailure::IncompatibleType)
}
MediaNotificationContent::NewIncomingStream { stream_name: _ } => {
Err(MediaDataConversionFailure::IncompatibleType)
}
MediaNotificationContent::Metadata { data } => {
Ok(RtmpEndpointMediaData::NewStreamMetaData {
metadata: hash_map_to_stream_metadata(&data),
})
}
MediaNotificationContent::MediaPayload {
payload_type,
media_type: _,
is_required_for_decoding,
timestamp,
data,
metadata,
} => match payload_type {
x if x == *AUDIO_CODEC_AAC_RAW => Ok(RtmpEndpointMediaData::NewAudioData {
data,
is_sequence_header: is_required_for_decoding,
timestamp: RtmpTimestamp::new(timestamp.as_millis() as u32),
}),
x if x == *VIDEO_CODEC_H264_AVC => {
let is_keyframe = metadata
.iter()
.filter(|m| m.key() == is_keyframe_metadata_key)
.filter_map(|m| match m.value() {
MetadataValue::Bool(val) => Some(val),
_ => None,
})
.next()
.unwrap_or_default();
let pts_offset = metadata
.iter()
.filter(|m| m.key() == pts_offset_metadata_key)
.filter_map(|m| match m.value() {
MetadataValue::I32(val) => Some(val),
_ => None,
})
.next()
.unwrap_or_default();
Ok(RtmpEndpointMediaData::NewVideoData {
data,
is_sequence_header: is_required_for_decoding,
is_keyframe,
composition_time_offset: pts_offset,
timestamp: RtmpTimestamp::new(timestamp.as_millis() as u32),
})
}
other => Err(MediaDataConversionFailure::UnsupportedPayloadType(other)),
},
}
}
}
| rust | MIT | 3d732616da3bc8976dbcd5f859758ed3f06dc38f | 2026-01-04T20:17:23.887411Z | false |
KallDrexx/mmids | https://github.com/KallDrexx/mmids/blob/3d732616da3bc8976dbcd5f859758ed3f06dc38f/mmids-rtmp/src/rtmp_server/actor/actor_types.rs | mmids-rtmp/src/rtmp_server/actor/actor_types.rs | use super::connection_handler::{ConnectionRequest, ConnectionResponse};
use super::{RtmpEndpointPublisherMessage, RtmpEndpointRequest, StreamKeyRegistration};
use crate::rtmp_server::{
IpRestriction, RtmpEndpointMediaData, RtmpEndpointMediaMessage,
RtmpEndpointWatcherNotification, ValidationResponse,
};
use bytes::Bytes;
use mmids_core::net::tcp::TcpSocketResponse;
use mmids_core::net::ConnectionId;
use mmids_core::StreamId;
use std::collections::HashMap;
use std::net::SocketAddr;
use std::sync::Arc;
use tokio::sync::mpsc::{UnboundedReceiver, UnboundedSender};
pub enum FutureResult {
EndpointRequestReceived(RtmpEndpointRequest),
SocketResponseReceived {
port: u16,
response: TcpSocketResponse,
},
PublishingRegistrantGone {
port: u16,
app: Arc<String>,
stream_key: StreamKeyRegistration,
},
ConnectionHandlerRequestReceived {
port: u16,
connection_id: ConnectionId,
request: ConnectionRequest,
},
ConnectionHandlerGone {
port: u16,
connection_id: ConnectionId,
},
WatcherRegistrantGone {
port: u16,
app: Arc<String>,
stream_key: StreamKeyRegistration,
},
WatcherMediaDataReceived {
data: RtmpEndpointMediaData,
port: u16,
app: Arc<String>,
stream_key: Arc<String>,
},
PortGone {
port: u16,
},
NoMoreEndpointRequesters,
SocketManagerClosed,
ValidationApprovalResponseReceived(u16, ConnectionId, ValidationResponse),
}
pub struct PublishingRegistrant {
pub response_channel: UnboundedSender<RtmpEndpointPublisherMessage>,
pub stream_id: Option<StreamId>,
pub ip_restrictions: IpRestriction,
pub requires_registrant_approval: bool,
pub cancellation_notifier: UnboundedReceiver<()>,
}
pub struct WatcherRegistrant {
pub response_channel: UnboundedSender<RtmpEndpointWatcherNotification>,
pub ip_restrictions: IpRestriction,
pub requires_registrant_approval: bool,
pub cancellation_notifier: UnboundedReceiver<()>,
}
pub struct VideoSequenceHeader {
pub data: Bytes,
}
pub struct AudioSequenceHeader {
pub data: Bytes,
}
pub struct WatcherDetails {
pub media_sender: UnboundedSender<RtmpEndpointMediaData>,
}
pub struct StreamKeyConnections {
pub publisher: Option<ConnectionId>,
pub watchers: HashMap<ConnectionId, WatcherDetails>,
pub latest_video_sequence_header: Option<VideoSequenceHeader>,
pub latest_audio_sequence_header: Option<AudioSequenceHeader>,
}
pub struct RtmpAppMapping {
pub publisher_registrants: HashMap<StreamKeyRegistration, PublishingRegistrant>,
pub watcher_registrants: HashMap<StreamKeyRegistration, WatcherRegistrant>,
pub active_stream_keys: HashMap<Arc<String>, StreamKeyConnections>,
}
#[derive(PartialEq, Eq)]
pub enum PortStatus {
Requested,
Open,
}
pub struct RtmpServerEndpointActor {
pub internal_actor: UnboundedSender<FutureResult>,
pub ports: HashMap<u16, PortMapping>,
}
pub enum ListenerRequest {
Publisher {
channel: UnboundedSender<RtmpEndpointPublisherMessage>,
stream_id: Option<StreamId>,
requires_registrant_approval: bool,
},
Watcher {
notification_channel: UnboundedSender<RtmpEndpointWatcherNotification>,
media_channel: UnboundedReceiver<RtmpEndpointMediaMessage>,
requires_registrant_approval: bool,
},
}
pub enum ConnectionState {
None,
WaitingForPublishValidation {
rtmp_app: Arc<String>,
stream_key: Arc<String>,
},
WaitingForWatchValidation {
rtmp_app: Arc<String>,
stream_key: Arc<String>,
},
Publishing {
rtmp_app: Arc<String>,
stream_key: Arc<String>,
},
Watching {
rtmp_app: Arc<String>,
stream_key: Arc<String>,
},
}
pub struct Connection {
pub response_channel: UnboundedSender<ConnectionResponse>,
pub state: ConnectionState,
pub socket_address: SocketAddr,
pub received_registrant_approval: bool,
}
pub struct PortMapping {
pub rtmp_applications: HashMap<Arc<String>, RtmpAppMapping>,
pub status: PortStatus,
pub connections: HashMap<ConnectionId, Connection>,
pub tls: bool,
}
| rust | MIT | 3d732616da3bc8976dbcd5f859758ed3f06dc38f | 2026-01-04T20:17:23.887411Z | false |
KallDrexx/mmids | https://github.com/KallDrexx/mmids/blob/3d732616da3bc8976dbcd5f859758ed3f06dc38f/mmids-rtmp/src/rtmp_server/actor/mod.rs | mmids-rtmp/src/rtmp_server/actor/mod.rs | pub mod actor_types;
mod connection_handler;
#[cfg(test)]
mod tests;
use super::{
RtmpEndpointMediaData, RtmpEndpointPublisherMessage, RtmpEndpointRequest, StreamKeyRegistration,
};
use crate::rtmp_server::actor::connection_handler::ConnectionResponse;
use crate::rtmp_server::actor::internal_futures::notify_on_validation;
use crate::rtmp_server::{
IpRestriction, RegistrationType, RtmpEndpointWatcherNotification, ValidationResponse,
};
use actor_types::*;
use connection_handler::{ConnectionRequest, RtmpServerConnectionHandler};
use mmids_core::actor_utils::{
notify_on_future_completion, notify_on_unbounded_closed, notify_on_unbounded_recv,
};
use mmids_core::net::tcp::{TcpSocketRequest, TcpSocketResponse};
use mmids_core::net::ConnectionId;
use mmids_core::reactors::ReactorWorkflowUpdate;
use mmids_core::StreamId;
use rml_rtmp::time::RtmpTimestamp;
use std::collections::HashMap;
use std::net::SocketAddr;
use std::sync::Arc;
use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender};
use tokio::sync::oneshot::channel;
use tracing::{error, info, instrument, warn};
use uuid::Uuid;
struct RegisterListenerParams {
port: u16,
rtmp_app: Arc<String>,
stream_key: StreamKeyRegistration,
socket_sender: UnboundedSender<TcpSocketRequest>,
listener: ListenerRequest,
ip_restrictions: IpRestriction,
use_tls: bool,
}
impl RtmpServerEndpointActor {
#[instrument(name = "RtmpServer Endpoint Execution", skip_all)]
pub async fn run(
mut self,
mut actor_receiver: UnboundedReceiver<FutureResult>,
socket_request_sender: UnboundedSender<TcpSocketRequest>,
) {
info!("Starting RTMP server endpoint");
notify_on_unbounded_closed(
socket_request_sender.clone(),
self.internal_actor.clone(),
|| FutureResult::SocketManagerClosed,
);
while let Some(result) = actor_receiver.recv().await {
match result {
FutureResult::NoMoreEndpointRequesters => {
info!("No endpoint requesters exist");
break;
}
FutureResult::SocketManagerClosed => {
info!("Socket manager closed");
break;
}
FutureResult::EndpointRequestReceived(request) => {
self.handle_endpoint_request(request, socket_request_sender.clone());
}
FutureResult::PublishingRegistrantGone {
port,
app,
stream_key,
} => {
self.remove_publish_registration(port, app, stream_key);
}
FutureResult::WatcherRegistrantGone {
port,
app,
stream_key,
} => {
self.remove_watcher_registration(port, app, stream_key);
}
FutureResult::SocketResponseReceived { port, response } => {
self.handle_socket_response(port, response);
}
FutureResult::ConnectionHandlerRequestReceived {
port,
connection_id,
request,
} => {
self.handle_connection_handler_request(port, connection_id, request);
}
FutureResult::ConnectionHandlerGone {
port,
connection_id,
} => {
let port_map = match self.ports.get_mut(&port) {
Some(x) => x,
None => continue,
};
clean_disconnected_connection(connection_id, port_map);
}
FutureResult::WatcherMediaDataReceived {
port,
app,
stream_key,
data,
} => {
self.handle_watcher_media_received(port, app, stream_key, data);
}
FutureResult::ValidationApprovalResponseReceived(port, connection_id, response) => {
self.handle_validation_response(port, connection_id, response);
}
FutureResult::PortGone { port } => {
if self.ports.remove(&port).is_some() {
warn!("Port {port}'s response sender suddenly closed");
}
}
}
}
info!("Rtmp server endpoint closing");
}
#[instrument(skip(self))]
fn handle_validation_response(
&mut self,
port: u16,
connection_id: ConnectionId,
response: ValidationResponse,
) {
let port_map = match self.ports.get_mut(&port) {
Some(ports) => ports,
None => {
return;
} // Port has been closed prior to this response
};
let connection = match port_map.connections.get_mut(&connection_id) {
Some(connection) => connection,
None => {
return;
} // Disconnected before this response came in
};
match response {
ValidationResponse::Approve {
reactor_update_channel,
} => {
match &connection.state {
ConnectionState::None => {
warn!("Unexpected approval for connection in None state");
}
ConnectionState::Watching { .. } => {
warn!("Unexpected approval for connection in the Watching state");
}
ConnectionState::Publishing { .. } => {
warn!("Unexpected approval for connection in the publishing state");
}
ConnectionState::WaitingForPublishValidation {
rtmp_app,
stream_key,
} => {
info!(
rtmp_app = %rtmp_app,
stream_key = %stream_key,
"Request to publish was approved"
);
// Redefine as clones due to borrow checker
let rtmp_app = rtmp_app.clone();
let stream_key = stream_key.clone();
connection.received_registrant_approval = true;
handle_connection_request_publish(
&connection_id,
port_map,
port,
rtmp_app,
stream_key,
Some(reactor_update_channel),
self.internal_actor.clone(),
);
}
ConnectionState::WaitingForWatchValidation {
rtmp_app,
stream_key,
} => {
info!(
rtmp_app = %rtmp_app,
stream_key = %stream_key,
"Request to watch was approved",
);
// Redefine with clones due to borrow checker
let rtmp_app = rtmp_app.clone();
let stream_key = stream_key.clone();
connection.received_registrant_approval = true;
handle_connection_request_watch(
connection_id,
port_map,
port,
rtmp_app,
stream_key,
Some(reactor_update_channel),
self.internal_actor.clone(),
);
}
}
}
ValidationResponse::Reject => {
match &connection.state {
ConnectionState::None => {
warn!("Unexpected approval for connection in None state");
}
ConnectionState::Watching { .. } => {
warn!("Unexpected approval for connection in the Watching state");
}
ConnectionState::Publishing { .. } => {
warn!("Unexpected approval for connection in the publishing state");
}
ConnectionState::WaitingForPublishValidation {
rtmp_app,
stream_key,
} => {
info!(
rtmp_app = %rtmp_app,
stream_key = %stream_key,
"Request to publish was rejected"
);
}
ConnectionState::WaitingForWatchValidation {
rtmp_app,
stream_key,
} => {
info!(
rtmp_app = %rtmp_app,
stream_key = %stream_key,
"Request to watch was rejected"
);
}
}
let _ = connection
.response_channel
.send(ConnectionResponse::RequestRejected);
}
}
}
fn handle_watcher_media_received(
&mut self,
port: u16,
app: Arc<String>,
stream_key: Arc<String>,
data: RtmpEndpointMediaData,
) {
let port_map = match self.ports.get_mut(&port) {
Some(x) => x,
None => return,
};
let app_map = match port_map.rtmp_applications.get_mut(&app) {
Some(x) => x,
None => return,
};
let key_details =
app_map
.active_stream_keys
.entry(stream_key)
.or_insert(StreamKeyConnections {
watchers: HashMap::new(),
publisher: None,
latest_video_sequence_header: None,
latest_audio_sequence_header: None,
});
match &data {
RtmpEndpointMediaData::NewVideoData {
data,
is_sequence_header,
..
} => {
if *is_sequence_header {
key_details.latest_video_sequence_header =
Some(VideoSequenceHeader { data: data.clone() });
}
}
RtmpEndpointMediaData::NewAudioData {
data,
is_sequence_header,
..
} => {
if *is_sequence_header {
key_details.latest_audio_sequence_header =
Some(AudioSequenceHeader { data: data.clone() });
}
}
_ => (),
};
for watcher_details in key_details.watchers.values() {
let _ = watcher_details.media_sender.send(data.clone());
}
}
fn handle_endpoint_request(
&mut self,
request: RtmpEndpointRequest,
socket_request_sender: UnboundedSender<TcpSocketRequest>,
) {
match request {
RtmpEndpointRequest::ListenForPublishers {
port,
rtmp_app,
rtmp_stream_key,
message_channel,
stream_id,
ip_restrictions: ip_restriction,
use_tls,
requires_registrant_approval,
} => {
self.register_listener(RegisterListenerParams {
port,
rtmp_app,
stream_key: rtmp_stream_key,
socket_sender: socket_request_sender,
listener: ListenerRequest::Publisher {
channel: message_channel,
stream_id,
requires_registrant_approval,
},
ip_restrictions: ip_restriction,
use_tls,
});
}
RtmpEndpointRequest::ListenForWatchers {
port,
rtmp_app,
rtmp_stream_key,
media_channel,
notification_channel,
ip_restrictions,
use_tls,
requires_registrant_approval,
} => {
self.register_listener(RegisterListenerParams {
port,
rtmp_app,
stream_key: rtmp_stream_key,
socket_sender: socket_request_sender,
listener: ListenerRequest::Watcher {
notification_channel,
media_channel,
requires_registrant_approval,
},
ip_restrictions,
use_tls,
});
}
RtmpEndpointRequest::RemoveRegistration {
registration_type,
port,
rtmp_app,
rtmp_stream_key,
} => {
info!(
port = %port,
rtmp_app = %rtmp_app,
stream_key = ?rtmp_stream_key,
registration_type = ?registration_type,
"{:?} Registration removal requested for port {}, app {}, and stream key {:?}",
registration_type, port, rtmp_app, rtmp_stream_key
);
match registration_type {
RegistrationType::Publisher => {
self.remove_publish_registration(port, rtmp_app, rtmp_stream_key)
}
RegistrationType::Watcher => {
self.remove_watcher_registration(port, rtmp_app, rtmp_stream_key)
}
}
}
}
}
#[instrument(
skip(self, params),
fields(
port = %params.port, rtmp_app = %params.rtmp_app, stream_key = ?params.stream_key,
ip_restrictions = ?params.ip_restrictions, use_tls = %params.use_tls,
)
)]
fn register_listener(&mut self, params: RegisterListenerParams) {
let mut new_port_requested = false;
let port_map = self.ports.entry(params.port).or_insert_with(|| {
let port_map = PortMapping {
rtmp_applications: HashMap::new(),
status: PortStatus::Requested,
connections: HashMap::new(),
tls: params.use_tls,
};
new_port_requested = true;
port_map
});
if port_map.tls != params.use_tls {
error!(
"Request to open port {} with tls set to {} failed, as the port is already mapped \
with tls set to {}",
params.port, params.use_tls, port_map.tls
);
match params.listener {
ListenerRequest::Publisher { channel, .. } => {
let _ = channel.send(RtmpEndpointPublisherMessage::PublisherRegistrationFailed);
}
ListenerRequest::Watcher {
notification_channel,
..
} => {
let _ = notification_channel
.send(RtmpEndpointWatcherNotification::WatcherRegistrationFailed);
}
}
return;
}
if new_port_requested {
let (sender, receiver) = unbounded_channel();
let request = TcpSocketRequest::OpenPort {
port: params.port,
response_channel: sender,
use_tls: params.use_tls,
};
let _ = params.socket_sender.send(request);
notify_on_unbounded_recv(
receiver,
self.internal_actor.clone(),
move |response| FutureResult::SocketResponseReceived {
port: params.port,
response,
},
move || FutureResult::PortGone { port: params.port },
);
}
let app_map = port_map
.rtmp_applications
.entry(params.rtmp_app.clone())
.or_insert(RtmpAppMapping {
publisher_registrants: HashMap::new(),
watcher_registrants: HashMap::new(),
active_stream_keys: HashMap::new(),
});
match params.listener {
ListenerRequest::Publisher {
channel,
stream_id,
requires_registrant_approval,
} => {
let can_be_added = match ¶ms.stream_key {
StreamKeyRegistration::Any => {
if !app_map.publisher_registrants.is_empty() {
warn!("Rtmp server publish request registration failed for port {}, app '{}', all stream keys': \
Another system is registered for at least one stream key on this port and app", params.port, params.rtmp_app);
false
} else {
true
}
}
StreamKeyRegistration::Exact(key) => {
if app_map
.publisher_registrants
.contains_key(&StreamKeyRegistration::Any)
{
warn!("Rtmp server publish request registration failed for port {}, app '{}', stream key '{}': \
Another system is registered for all stream keys on this port/app", params.port, params.rtmp_app, key);
false
} else if app_map
.publisher_registrants
.contains_key(&StreamKeyRegistration::Exact(key.clone()))
{
warn!("Rtmp server publish request registration failed for port {}, app '{}', stream key '{}': \
Another system is registered for this port/app/stream key combo", params.port, params.rtmp_app, key);
false
} else {
true
}
}
};
if !can_be_added {
let _ =
channel.send(RtmpEndpointPublisherMessage::PublisherRegistrationFailed {});
return;
}
let (cancel_sender, cancel_receiver) = unbounded_channel();
app_map.publisher_registrants.insert(
params.stream_key.clone(),
PublishingRegistrant {
response_channel: channel.clone(),
stream_id,
ip_restrictions: params.ip_restrictions,
requires_registrant_approval,
cancellation_notifier: cancel_receiver,
},
);
{
let channel = channel.clone();
notify_on_future_completion(
async move {
tokio::select! {
_ = channel.closed() => (),
_ = cancel_sender.closed() => (),
}
},
self.internal_actor.clone(),
move |_| FutureResult::PublishingRegistrantGone {
port: params.port,
app: params.rtmp_app,
stream_key: params.stream_key,
},
)
}
// If the port isn't in a listening mode, we don't want to claim that
// registration was successful yet
if port_map.status == PortStatus::Open {
let _ = channel
.send(RtmpEndpointPublisherMessage::PublisherRegistrationSuccessful {});
}
}
ListenerRequest::Watcher {
media_channel,
notification_channel,
requires_registrant_approval,
} => {
let can_be_added = match ¶ms.stream_key {
StreamKeyRegistration::Any => {
if !app_map.watcher_registrants.is_empty() {
warn!("Rtmp server watcher registration failed for port {}, app '{}', all stream keys': \
Another system is registered for at least one stream key on this port and app", params.port, params.rtmp_app);
false
} else {
true
}
}
StreamKeyRegistration::Exact(key) => {
if app_map
.watcher_registrants
.contains_key(&StreamKeyRegistration::Any)
{
warn!("Rtmp server watcher registration failed for port {}, app '{}', stream key '{}': \
Another system is registered for all stream keys on this port/app", params.port, params.rtmp_app, key);
false
} else if app_map
.watcher_registrants
.contains_key(&StreamKeyRegistration::Exact(key.clone()))
{
warn!("Rtmp server watcher registration failed for port {}, app '{}', stream key '{}': \
Another system is registered for this port/app/stream key combo", params.port, params.rtmp_app, key);
false
} else {
true
}
}
};
if !can_be_added {
let _ = notification_channel
.send(RtmpEndpointWatcherNotification::WatcherRegistrationFailed);
return;
}
let (cancel_sender, cancel_receiver) = unbounded_channel();
app_map.watcher_registrants.insert(
params.stream_key.clone(),
WatcherRegistrant {
response_channel: notification_channel.clone(),
ip_restrictions: params.ip_restrictions,
requires_registrant_approval,
cancellation_notifier: cancel_receiver,
},
);
{
let notification_channel = notification_channel.clone();
let app = params.rtmp_app.clone();
let stream_key = params.stream_key.clone();
notify_on_future_completion(
async move {
tokio::select! {
_ = notification_channel.closed() => (),
_ = cancel_sender.closed() => (),
}
},
self.internal_actor.clone(),
move |_| FutureResult::WatcherRegistrantGone {
port: params.port,
app,
stream_key,
},
);
}
let success_app_name = params.rtmp_app.clone();
let closed_app_name = params.rtmp_app;
notify_on_unbounded_recv(
media_channel,
self.internal_actor.clone(),
move |msg| FutureResult::WatcherMediaDataReceived {
port: params.port,
app: success_app_name.clone(),
stream_key: msg.stream_key,
data: msg.data,
},
move || FutureResult::WatcherRegistrantGone {
port: params.port,
app: closed_app_name,
stream_key: params.stream_key,
},
);
// If the port isn't open yet, we don't want to claim registration was successful yet
if port_map.status == PortStatus::Open {
let _ = notification_channel
.send(RtmpEndpointWatcherNotification::WatcherRegistrationSuccessful);
}
}
}
}
#[instrument(skip(self))]
fn handle_socket_response(&mut self, port: u16, response: TcpSocketResponse) {
let mut remove_port = false;
{
let port_map = match self.ports.get_mut(&port) {
Some(x) => x,
None => {
error!("Received socket response for port {} but that port has not been registered", port);
return;
}
};
match response {
TcpSocketResponse::RequestDenied { reason } => {
warn!("Port {} could not be opened: {:?}", port, reason);
for app_map in port_map.rtmp_applications.values() {
for publisher in app_map.publisher_registrants.values() {
let _ = publisher
.response_channel
.send(RtmpEndpointPublisherMessage::PublisherRegistrationFailed {});
}
for watcher in app_map.watcher_registrants.values() {
let _ = watcher
.response_channel
.send(RtmpEndpointWatcherNotification::WatcherRegistrationFailed);
}
}
remove_port = true;
}
TcpSocketResponse::PortForciblyClosed { port: _ } => {
warn!("Port {} closed", port);
remove_port = true;
}
TcpSocketResponse::RequestAccepted {} => {
info!("Port {} successfully opened", port);
// Since the port was successfully opened, any pending registrants need to be
// informed that their registration has now been successful
for app_map in port_map.rtmp_applications.values() {
for publisher in app_map.publisher_registrants.values() {
let _ = publisher.response_channel.send(
RtmpEndpointPublisherMessage::PublisherRegistrationSuccessful {},
);
}
for watcher in app_map.watcher_registrants.values() {
let _ = watcher.response_channel.send(
RtmpEndpointWatcherNotification::WatcherRegistrationSuccessful,
);
}
}
port_map.status = PortStatus::Open;
}
TcpSocketResponse::NewConnection {
port: _,
connection_id,
outgoing_bytes,
incoming_bytes,
socket_address,
} => {
let (request_sender, request_receiver) = unbounded_channel();
let (response_sender, response_receiver) = unbounded_channel();
let (actor_sender, actor_receiver) = unbounded_channel();
let handler = RtmpServerConnectionHandler::new(
connection_id.clone(),
outgoing_bytes,
request_sender,
actor_sender,
);
tokio::spawn(handler.run_async(
response_receiver,
incoming_bytes,
actor_receiver,
));
port_map.connections.insert(
connection_id.clone(),
Connection {
response_channel: response_sender,
state: ConnectionState::None,
socket_address,
received_registrant_approval: false,
},
);
let success_conn_id = connection_id.clone();
notify_on_unbounded_recv(
request_receiver,
self.internal_actor.clone(),
move |request| FutureResult::ConnectionHandlerRequestReceived {
request,
port,
connection_id: success_conn_id.clone(),
},
move || FutureResult::ConnectionHandlerGone {
port,
connection_id,
},
);
}
TcpSocketResponse::Disconnection { connection_id } => {
// Clean this connection up
clean_disconnected_connection(connection_id, port_map);
}
}
}
if remove_port {
info!("Port {port} removed");
self.ports.remove(&port);
}
}
#[instrument(skip(self))]
fn handle_connection_handler_request(
&mut self,
port: u16,
connection_id: ConnectionId,
request: ConnectionRequest,
) {
let port_map = match self.ports.get_mut(&port) {
Some(x) => x,
None => {
error!(
"Connection handler for connection {:?} sent {:?} on port {}, but that \
port isn't managed yet!",
connection_id, request, port
);
return;
}
};
match request {
ConnectionRequest::RequestConnectToApp { rtmp_app } => {
handle_connection_request_connect_to_app(&connection_id, port_map, port, rtmp_app);
}
ConnectionRequest::RequestPublish {
rtmp_app,
stream_key,
} => {
handle_connection_request_publish(
&connection_id,
port_map,
port,
rtmp_app,
stream_key,
None,
self.internal_actor.clone(),
);
}
ConnectionRequest::RequestWatch {
rtmp_app,
stream_key,
} => {
handle_connection_request_watch(
connection_id,
port_map,
port,
rtmp_app,
stream_key,
None,
self.internal_actor.clone(),
);
}
ConnectionRequest::PublishFinished => {
handle_connection_stop_publish(connection_id, port_map);
}
ConnectionRequest::PlaybackFinished => {
handle_connection_stop_watch(connection_id, port_map);
}
}
}
fn remove_publish_registration(
&mut self,
port: u16,
app: Arc<String>,
stream_key: StreamKeyRegistration,
) {
let port_map = match self.ports.get_mut(&port) {
Some(x) => x,
None => return,
};
| rust | MIT | 3d732616da3bc8976dbcd5f859758ed3f06dc38f | 2026-01-04T20:17:23.887411Z | true |
KallDrexx/mmids | https://github.com/KallDrexx/mmids/blob/3d732616da3bc8976dbcd5f859758ed3f06dc38f/mmids-rtmp/src/rtmp_server/actor/connection_handler.rs | mmids-rtmp/src/rtmp_server/actor/connection_handler.rs | use super::RtmpEndpointPublisherMessage;
use crate::rtmp_server::RtmpEndpointMediaData;
use anyhow::{anyhow, Result};
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use bytes::{BufMut, Bytes, BytesMut};
use mmids_core::actor_utils::{notify_on_unbounded_closed, notify_on_unbounded_recv};
use mmids_core::net::tcp::OutboundPacket;
use mmids_core::net::ConnectionId;
use rml_rtmp::handshake::{Handshake, HandshakeProcessResult, PeerType};
use rml_rtmp::sessions::{
PublishMode, ServerSession, ServerSessionConfig, ServerSessionEvent, ServerSessionResult,
StreamMetadata,
};
use rml_rtmp::time::RtmpTimestamp;
use std::io::Cursor;
use std::sync::Arc;
use tokio::sync::mpsc::{UnboundedReceiver, UnboundedSender};
use tracing::{error, info, instrument};
pub struct RtmpServerConnectionHandler {
internal_sender: UnboundedSender<FutureResult>,
id: ConnectionId,
state: ConnectionState,
handshake: Handshake,
rtmp_session: Option<ServerSession>,
outgoing_byte_channel: UnboundedSender<OutboundPacket>,
request_sender: UnboundedSender<ConnectionRequest>,
force_disconnect: bool,
published_event_channel: Option<UnboundedSender<RtmpEndpointPublisherMessage>>,
}
#[derive(Debug)]
pub enum ConnectionRequest {
RequestConnectToApp {
rtmp_app: Arc<String>,
},
RequestPublish {
rtmp_app: Arc<String>,
stream_key: Arc<String>,
},
RequestWatch {
rtmp_app: Arc<String>,
stream_key: Arc<String>,
},
PublishFinished,
PlaybackFinished,
}
pub enum ConnectionResponse {
RequestRejected,
AppConnectRequestAccepted,
PublishRequestAccepted {
channel: UnboundedSender<RtmpEndpointPublisherMessage>,
},
WatchRequestAccepted {
channel: UnboundedReceiver<RtmpEndpointMediaData>,
},
Disconnect,
}
#[derive(Debug, PartialEq)]
enum ConnectionState {
Handshaking,
RtmpSessionActive,
RequestedAppConnection {
rtmp_app: Arc<String>,
rtmp_request_id: u32,
},
ConnectedToApp {
rtmp_app: Arc<String>,
},
RequestedPublishing {
rtmp_app: Arc<String>,
stream_key: Arc<String>,
rtmp_request_id: u32,
},
Publishing {
rtmp_app: Arc<String>,
stream_key: Arc<String>,
},
RequestedWatch {
rtmp_app: Arc<String>,
stream_key: Arc<String>,
rtmp_request_id: u32,
stream_id: u32,
},
Watching {
rtmp_app: Arc<String>,
stream_key: Arc<String>,
stream_id: u32,
},
}
pub enum FutureResult {
ResponseReceived(ConnectionResponse),
BytesReceived(Bytes),
WatchedMediaReceived(RtmpEndpointMediaData),
Disconnected,
RtmpServerEndpointGone,
}
struct UnwrappedVideo {
is_keyframe: bool,
is_sequence_header: bool,
data: Bytes,
composition_time_in_ms: i32,
}
struct UnwrappedAudio {
is_sequence_header: bool,
data: Bytes,
}
impl RtmpServerConnectionHandler {
pub fn new(
id: ConnectionId,
outgoing_bytes: UnboundedSender<OutboundPacket>,
request_sender: UnboundedSender<ConnectionRequest>,
actor_sender: UnboundedSender<FutureResult>,
) -> Self {
RtmpServerConnectionHandler {
internal_sender: actor_sender,
id,
state: ConnectionState::Handshaking,
handshake: Handshake::new(PeerType::Server),
rtmp_session: None,
outgoing_byte_channel: outgoing_bytes,
request_sender,
force_disconnect: false,
published_event_channel: None,
}
}
#[instrument(name = "Connection Handler Execution",
skip_all,
fields(connection_id = ?self.id))]
pub async fn run_async(
mut self,
response_receiver: UnboundedReceiver<ConnectionResponse>,
incoming_bytes: UnboundedReceiver<Bytes>,
mut actor_receiver: UnboundedReceiver<FutureResult>,
) {
info!("Starting new rtmp connection handler");
notify_on_unbounded_recv(
response_receiver,
self.internal_sender.clone(),
FutureResult::ResponseReceived,
|| FutureResult::RtmpServerEndpointGone,
);
notify_on_unbounded_recv(
incoming_bytes,
self.internal_sender.clone(),
FutureResult::BytesReceived,
|| FutureResult::Disconnected,
);
notify_on_unbounded_closed(
self.outgoing_byte_channel.clone(),
self.internal_sender.clone(),
|| FutureResult::Disconnected,
);
// Start the handshake process
let p0_and_p1 = match self.handshake.generate_outbound_p0_and_p1() {
Ok(x) => x,
Err(error) => {
error!(
"failed to generate p0 and p1 handshake packets: {:?}",
error
);
return;
}
};
let _ = self.outgoing_byte_channel.send(OutboundPacket {
bytes: Bytes::from(p0_and_p1),
can_be_dropped: false,
});
while let Some(result) = actor_receiver.recv().await {
match result {
FutureResult::Disconnected => {
info!("Connection disconnected");
break;
}
FutureResult::RtmpServerEndpointGone => {
error!("Connection's rtmp server endpoint is gone");
break;
}
FutureResult::BytesReceived(bytes) => {
if self.handle_bytes(bytes).is_err() {
break;
}
}
FutureResult::ResponseReceived(response) => {
self.handle_endpoint_response(response);
}
FutureResult::WatchedMediaReceived(data) => {
self.handle_media_from_endpoint(data);
}
}
if self.force_disconnect {
break;
}
}
info!("Rtmp server handler closing");
}
fn handle_bytes(&mut self, bytes: Bytes) -> Result<(), ()> {
match &self.state {
ConnectionState::Handshaking => {
let result = match self.handshake.process_bytes(bytes.as_ref()) {
Ok(x) => x,
Err(error) => {
error!("Error handshaking: {:?}", error);
return Err(());
}
};
match result {
HandshakeProcessResult::InProgress { response_bytes } => {
let _ = self.outgoing_byte_channel.send(OutboundPacket {
bytes: Bytes::from(response_bytes),
can_be_dropped: false,
});
}
HandshakeProcessResult::Completed {
response_bytes,
remaining_bytes,
} => {
let _ = self.outgoing_byte_channel.send(OutboundPacket {
bytes: Bytes::from(response_bytes),
can_be_dropped: false,
});
let config = ServerSessionConfig::new();
let (session, results) = match ServerSession::new(config) {
Ok(x) => x,
Err(e) => {
error!("Failed to create an rtmp server session: {:?}", e);
return Err(());
}
};
self.rtmp_session = Some(session);
self.handle_rtmp_results(results);
self.state = ConnectionState::RtmpSessionActive;
let results = match self
.rtmp_session
.as_mut()
.unwrap()
.handle_input(&remaining_bytes)
{
Ok(x) => x,
Err(e) => {
error!("Failed to handle initial post-handshake input: {:?}", e);
return Err(());
}
};
self.handle_rtmp_results(results);
}
}
}
_ => {
// Any other state means that we have a server session active
let session_results = match self
.rtmp_session
.as_mut()
.unwrap()
.handle_input(bytes.as_ref())
{
Ok(x) => x,
Err(e) => {
error!("Connection Sent invalid bytes: {:?}", e);
return Err(());
}
};
self.handle_rtmp_results(session_results);
}
};
Ok(())
}
fn handle_rtmp_results(&mut self, results: Vec<ServerSessionResult>) {
for result in results {
match result {
ServerSessionResult::OutboundResponse(packet) => {
let packet = OutboundPacket {
can_be_dropped: packet.can_be_dropped,
bytes: Bytes::from(packet.bytes),
};
let _ = self.outgoing_byte_channel.send(packet);
}
ServerSessionResult::RaisedEvent(event) => match event {
ServerSessionEvent::ConnectionRequested {
request_id,
app_name,
} => {
self.handle_rtmp_event_connection_requested(request_id, Arc::new(app_name));
}
ServerSessionEvent::PublishStreamRequested {
request_id,
app_name,
stream_key,
mode,
} => {
self.handle_rtmp_event_publish_stream_requested(
request_id,
Arc::new(app_name),
Arc::new(stream_key),
mode,
);
}
ServerSessionEvent::StreamMetadataChanged {
app_name,
stream_key,
metadata,
} => self.handle_rtmp_event_stream_metadata_changed(
Arc::new(app_name),
Arc::new(stream_key),
metadata,
),
ServerSessionEvent::VideoDataReceived {
app_name,
stream_key,
data,
timestamp,
} => self.handle_rtmp_event_video_data_received(
Arc::new(app_name),
Arc::new(stream_key),
data,
timestamp,
),
ServerSessionEvent::AudioDataReceived {
app_name,
stream_key,
data,
timestamp,
} => self.handle_rtmp_event_audio_data_received(
Arc::new(app_name),
Arc::new(stream_key),
data,
timestamp,
),
ServerSessionEvent::PlayStreamRequested {
app_name,
stream_key,
stream_id,
request_id,
reset: _,
duration: _,
start_at: _,
} => self.handle_rtmp_event_play_stream_requested(
Arc::new(app_name),
Arc::new(stream_key),
stream_id,
request_id,
),
ServerSessionEvent::PublishStreamFinished {
app_name,
stream_key,
} => self.handle_rtmp_event_publish_finished(
Arc::new(app_name),
Arc::new(stream_key),
),
ServerSessionEvent::PlayStreamFinished {
app_name,
stream_key,
} => self
.handle_rtmp_event_play_finished(Arc::new(app_name), Arc::new(stream_key)),
event => {
info!("Connection raised RTMP event: {:?}", event);
}
},
ServerSessionResult::UnhandleableMessageReceived(payload) => {
info!(
"Connection sent an unhandleable RTMP message: {:?}",
payload
);
}
}
}
}
fn handle_rtmp_event_play_finished(&mut self, app_name: Arc<String>, stream_key: Arc<String>) {
match &self.state {
ConnectionState::Watching {
rtmp_app: active_app,
stream_key: active_key,
stream_id: _,
} => {
if *active_app != app_name {
error!(
requested_app = %app_name,
active_app = %active_app,
"Connection requested to stop playback on an app it's not connected to"
);
self.force_disconnect = true;
return;
}
if *active_key != stream_key {
error!(
requested_key = %stream_key,
active_key = %active_key,
"Connection requested to stop playback on a stream key it's not watching"
);
self.force_disconnect = true;
return;
}
self.state = ConnectionState::ConnectedToApp { rtmp_app: app_name };
let _ = self
.request_sender
.send(ConnectionRequest::PlaybackFinished);
}
_ => {
error!(
"Connection {} requested to stop playback but was in an invalid state: {:?}",
self.id, self.state
);
self.force_disconnect = true;
}
}
}
fn handle_rtmp_event_publish_finished(
&mut self,
app_name: Arc<String>,
stream_key: Arc<String>,
) {
match &self.state {
ConnectionState::Publishing {
rtmp_app: current_app,
stream_key: current_key,
} => {
if *current_app != app_name {
error!(
requested_app = %app_name,
active_app = %current_app,
"Connection requested to stop publishing on an app it's not connected to"
);
self.force_disconnect = true;
return;
}
if *current_key != stream_key {
error!(
requested_key = %stream_key,
active_key = %current_key,
"Connection requested to stop publishing on a stream key it's not publishing on"
);
self.force_disconnect = true;
return;
}
self.state = ConnectionState::ConnectedToApp { rtmp_app: app_name };
let _ = self.request_sender.send(ConnectionRequest::PublishFinished);
}
_ => {
error!(
"Connection {} requested to stop publishing but was in an invalid state: {:?}",
self.id, self.state
);
self.force_disconnect = true;
}
}
}
fn handle_rtmp_event_play_stream_requested(
&mut self,
app_name: Arc<String>,
stream_key: Arc<String>,
stream_id: u32,
request_id: u32,
) {
match &self.state {
ConnectionState::ConnectedToApp {
rtmp_app: current_rtmp_app,
} => {
if *current_rtmp_app != app_name {
error!("Connection requested playback on rtmp app {}, but it's currently connected \
to rtmp app '{}'", app_name, current_rtmp_app);
self.force_disconnect = true;
return;
}
self.state = ConnectionState::RequestedWatch {
rtmp_app: app_name.clone(),
stream_key: stream_key.clone(),
stream_id,
rtmp_request_id: request_id,
};
let _ = self.request_sender.send(ConnectionRequest::RequestWatch {
rtmp_app: app_name,
stream_key,
});
}
_ => {
error!(
"Connection {} requested playback but was in an invalid state: {:?}",
self.id, self.state
);
self.force_disconnect = true;
}
}
}
fn handle_rtmp_event_audio_data_received(
&mut self,
app_name: Arc<String>,
stream_key: Arc<String>,
data: Bytes,
timestamp: RtmpTimestamp,
) {
match &self.state {
ConnectionState::Publishing {
stream_key: current_stream_key,
rtmp_app: current_rtmp_app,
} => {
if *current_rtmp_app != app_name || *current_stream_key != stream_key {
error!(
"Connection sent audio data for '{}/{}', but this connection is \
currently publishing on '{}/{}'",
app_name, stream_key, current_rtmp_app, current_stream_key
);
self.force_disconnect = true;
return;
}
let unwrapped_audio = match unwrap_audio_from_flv(data) {
Ok(audio) => audio,
Err(error) => {
error!("Failed to unwrap audio from FLV: {:?}", error);
return;
}
};
let _ = self.published_event_channel.as_ref().unwrap().send(
RtmpEndpointPublisherMessage::NewAudioData {
publisher: self.id.clone(),
data: unwrapped_audio.data,
timestamp,
is_sequence_header: unwrapped_audio.is_sequence_header,
},
);
}
_ => {
error!(
"Connection sent audio data is not in a publishing state: {:?}",
self.state
);
self.force_disconnect = true;
}
}
}
fn handle_rtmp_event_video_data_received(
&mut self,
app_name: Arc<String>,
stream_key: Arc<String>,
data: Bytes,
timestamp: RtmpTimestamp,
) {
match &self.state {
ConnectionState::Publishing {
stream_key: current_stream_key,
rtmp_app: current_rtmp_app,
} => {
if *current_rtmp_app != app_name || *current_stream_key != stream_key {
error!(
"Connection sent video data for '{}/{}', but this connection is currently publishing on '{}/{}'",
app_name, stream_key, current_rtmp_app, current_stream_key
);
self.force_disconnect = true;
return;
}
let unwrapped_video = match unwrap_video_from_flv(data) {
Ok(video) => video,
Err(error) => {
error!("Video is using an unsupported set of flv video tags: {error}");
return;
}
};
let _ = self.published_event_channel.as_ref().unwrap().send(
RtmpEndpointPublisherMessage::NewVideoData {
publisher: self.id.clone(),
is_keyframe: unwrapped_video.is_keyframe,
is_sequence_header: unwrapped_video.is_sequence_header,
data: unwrapped_video.data,
timestamp,
composition_time_offset: unwrapped_video.composition_time_in_ms,
},
);
}
_ => {
error!(
"Connection sent video data is not in a publishing state: {:?}",
self.state
);
self.force_disconnect = true;
}
}
}
fn handle_rtmp_event_stream_metadata_changed(
&mut self,
app_name: Arc<String>,
stream_key: Arc<String>,
metadata: StreamMetadata,
) {
match &self.state {
ConnectionState::Publishing {
stream_key: current_stream_key,
rtmp_app: current_rtmp_app,
} => {
if *current_rtmp_app != app_name || *current_stream_key != stream_key {
error!(
"Connection sent a stream metadata changed for '{}/{}', but \
this connection is currently publishing on '{}/{}'",
app_name, stream_key, current_rtmp_app, current_stream_key
);
self.force_disconnect = true;
return;
}
info!("Connection sent new stream metadata: {:?}", metadata);
let _ = self.published_event_channel.as_ref().unwrap().send(
RtmpEndpointPublisherMessage::StreamMetadataChanged {
publisher: self.id.clone(),
metadata,
},
);
}
_ => {
error!(
"Connection sent stream metadata but is not in a publishing state: {:?}",
self.state
);
self.force_disconnect = true;
}
}
}
fn handle_rtmp_event_publish_stream_requested(
&mut self,
request_id: u32,
app_name: Arc<String>,
stream_key: Arc<String>,
mode: PublishMode,
) {
info!(
"Connection requesting publishing to '{}/{}'",
app_name, stream_key
);
if mode != PublishMode::Live {
error!("Connection requested publishing with publish mode {:?}, but only publish mode Live is supported", mode);
self.force_disconnect = true;
return;
}
match &self.state {
ConnectionState::ConnectedToApp {
rtmp_app: connected_app,
} => {
if *connected_app != app_name {
error!(
"Connection's publish request was for rtmp app '{}' but it's already connected to rtmp app '{}'",
connected_app, app_name
);
self.force_disconnect = true;
return;
}
}
_ => {
error!(
"Connection was in state {:?}, which isn't meant for publishing",
self.state
);
self.force_disconnect = true;
return;
}
};
let _ = self.request_sender.send(ConnectionRequest::RequestPublish {
rtmp_app: app_name.clone(),
stream_key: stream_key.clone(),
});
self.state = ConnectionState::RequestedPublishing {
rtmp_app: app_name,
stream_key,
rtmp_request_id: request_id,
};
}
fn handle_rtmp_event_connection_requested(&mut self, request_id: u32, app_name: Arc<String>) {
info!(
"Connection requesting connection to rtmp app '{}'",
app_name
);
let _ = self
.request_sender
.send(ConnectionRequest::RequestConnectToApp {
rtmp_app: app_name.clone(),
});
self.state = ConnectionState::RequestedAppConnection {
rtmp_app: app_name,
rtmp_request_id: request_id,
};
}
fn handle_endpoint_response(&mut self, response: ConnectionResponse) {
match response {
ConnectionResponse::RequestRejected => {
info!("Disconnecting connection due to rejected request");
self.force_disconnect = true;
}
ConnectionResponse::AppConnectRequestAccepted => {
self.handle_endpoint_app_connect_request_accepted();
}
ConnectionResponse::PublishRequestAccepted { channel } => {
self.handle_endpoint_publish_request_accepted(channel);
}
ConnectionResponse::WatchRequestAccepted { channel } => {
self.handle_endpoint_watch_request_accepted(channel);
}
ConnectionResponse::Disconnect => {
info!("Disconnect requested");
self.force_disconnect = true;
}
}
}
fn handle_endpoint_watch_request_accepted(
&mut self,
media_channel: UnboundedReceiver<RtmpEndpointMediaData>,
) {
notify_on_unbounded_recv(
media_channel,
self.internal_sender.clone(),
FutureResult::WatchedMediaReceived,
|| FutureResult::RtmpServerEndpointGone,
);
match &self.state {
ConnectionState::RequestedWatch {
rtmp_app,
stream_key,
rtmp_request_id,
stream_id,
} => {
info!(
"Connections request to watch '{}/{}' was accepted",
rtmp_app, stream_key
);
let results = match self
.rtmp_session
.as_mut()
.unwrap()
.accept_request(*rtmp_request_id)
{
Ok(x) => x,
Err(e) => {
error!("Error when accepting watch request: {:?}", e);
self.force_disconnect = true;
return;
}
};
self.state = ConnectionState::Watching {
rtmp_app: (*rtmp_app).clone(),
stream_key: (*stream_key).clone(),
stream_id: *stream_id,
};
self.handle_rtmp_results(results);
}
state => {
error!(
"Connection had a watch request accepted, but it isn't in a valid requesting \
state (current state: {:?})",
state
);
self.force_disconnect = true;
}
}
}
fn handle_endpoint_publish_request_accepted(
&mut self,
channel: UnboundedSender<RtmpEndpointPublisherMessage>,
) {
match &self.state {
ConnectionState::RequestedPublishing {
rtmp_app,
stream_key,
rtmp_request_id,
} => {
info!(
"Connections request to publish on '{}/{}' was accepted",
rtmp_app, stream_key
);
let results = match self
.rtmp_session
.as_mut()
.unwrap()
.accept_request(*rtmp_request_id)
{
Ok(x) => x,
Err(e) => {
error!("Error when accepting publish request: {:?}", e);
self.force_disconnect = true;
return;
}
};
self.published_event_channel = Some(channel);
self.state = ConnectionState::Publishing {
rtmp_app: (*rtmp_app).clone(),
stream_key: (*stream_key).clone(),
};
self.handle_rtmp_results(results);
}
state => {
error!("Connection had a request accepted, but isn't in a requesting state (current state: {:?})", state);
self.force_disconnect = true;
}
}
}
#[instrument(skip(self), fields(connection_id = ?self.id))]
fn handle_endpoint_app_connect_request_accepted(&mut self) {
match &self.state {
ConnectionState::RequestedAppConnection {
rtmp_request_id,
rtmp_app,
} => {
info!(
"Connection's request to connect to the rtmp app {} was accepted",
rtmp_app
);
let results = match self
.rtmp_session
.as_mut()
.unwrap()
.accept_request(*rtmp_request_id)
{
Ok(x) => x,
Err(e) => {
error!("Error when accepting app connection request: {:?}", e);
self.force_disconnect = true;
return;
}
};
self.state = ConnectionState::ConnectedToApp {
rtmp_app: (*rtmp_app).clone(),
};
self.handle_rtmp_results(results);
}
state => {
error!(
"Connection had an rtmp app request accepted, but isn't in a requesting state \
(current state: {:?})",
state
);
}
}
}
#[instrument(skip(self), fields(connection_id = ?self.id))]
fn handle_media_from_endpoint(&mut self, media_data: RtmpEndpointMediaData) {
let stream_id = match &self.state {
ConnectionState::Watching { stream_id, .. } => *stream_id,
_ => return, // Not in a state that can receive media
};
let session = self.rtmp_session.as_mut().unwrap();
let session_results = match media_data {
RtmpEndpointMediaData::NewStreamMetaData { metadata } => {
session.send_metadata(stream_id, &metadata)
}
RtmpEndpointMediaData::NewVideoData {
data,
timestamp,
is_keyframe,
is_sequence_header,
composition_time_offset,
} => {
let flv_video = wrap_video_into_flv(
data,
is_keyframe,
is_sequence_header,
composition_time_offset,
);
session.send_video_data(stream_id, flv_video, timestamp, !is_keyframe)
}
RtmpEndpointMediaData::NewAudioData {
data,
timestamp,
is_sequence_header,
} => {
let flv_audio = wrap_audio_into_flv(data, is_sequence_header);
session.send_audio_data(stream_id, flv_audio, timestamp, is_sequence_header)
}
};
let packet = match session_results {
Ok(x) => x,
Err(e) => {
| rust | MIT | 3d732616da3bc8976dbcd5f859758ed3f06dc38f | 2026-01-04T20:17:23.887411Z | true |
KallDrexx/mmids | https://github.com/KallDrexx/mmids/blob/3d732616da3bc8976dbcd5f859758ed3f06dc38f/mmids-rtmp/src/rtmp_server/actor/tests/test_context.rs | mmids-rtmp/src/rtmp_server/actor/tests/test_context.rs | use crate::rtmp_server::actor::tests::rtmp_client::RtmpTestClient;
use crate::rtmp_server::{
start_rtmp_server_endpoint, IpRestriction, RtmpEndpointMediaMessage,
RtmpEndpointPublisherMessage, RtmpEndpointRequest, RtmpEndpointWatcherNotification,
StreamKeyRegistration,
};
use mmids_core::{test_utils, StreamId};
use std::sync::Arc;
use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender};
const RTMP_APP: &str = "app";
pub struct TestContextBuilder {
port: Option<u16>,
use_tls: Option<bool>,
requires_registrant_approval: Option<bool>,
stream_id: Option<Option<StreamId>>,
ip_restriction: Option<IpRestriction>,
rtmp_app: Option<Arc<String>>,
rtmp_stream_key: Option<StreamKeyRegistration>,
}
pub struct TestContext {
pub endpoint: UnboundedSender<RtmpEndpointRequest>,
pub client: RtmpTestClient,
pub publish_receiver: Option<UnboundedReceiver<RtmpEndpointPublisherMessage>>,
pub watch_receiver: Option<UnboundedReceiver<RtmpEndpointWatcherNotification>>,
pub media_sender: Option<UnboundedSender<RtmpEndpointMediaMessage>>,
pub rtmp_app: String,
}
impl TestContextBuilder {
pub fn new() -> Self {
Self {
port: None,
use_tls: None,
requires_registrant_approval: None,
stream_id: None,
ip_restriction: None,
rtmp_app: None,
rtmp_stream_key: None,
}
}
pub fn set_stream_key(mut self, stream_key: StreamKeyRegistration) -> Self {
self.rtmp_stream_key = Some(stream_key);
self
}
pub fn set_requires_registrant_approval(mut self, requires_approval: bool) -> Self {
self.requires_registrant_approval = Some(requires_approval);
self
}
pub async fn into_publisher(self) -> TestContext {
let (sender, receiver) = unbounded_channel();
let request = RtmpEndpointRequest::ListenForPublishers {
port: self.port.unwrap_or(9999),
use_tls: self.use_tls.unwrap_or(false),
requires_registrant_approval: self.requires_registrant_approval.unwrap_or(false),
stream_id: self.stream_id.unwrap_or(None),
ip_restrictions: self.ip_restriction.unwrap_or(IpRestriction::None),
rtmp_app: self
.rtmp_app
.unwrap_or_else(|| Arc::new(RTMP_APP.to_string())),
rtmp_stream_key: self.rtmp_stream_key.unwrap_or(StreamKeyRegistration::Any),
message_channel: sender,
};
TestContext::new_publisher(request, receiver).await
}
pub async fn into_watcher(self) -> TestContext {
let (notification_sender, notification_receiver) = unbounded_channel();
let (media_sender, media_receiver) = unbounded_channel();
let request = RtmpEndpointRequest::ListenForWatchers {
port: self.port.unwrap_or(9999),
use_tls: self.use_tls.unwrap_or(false),
requires_registrant_approval: self.requires_registrant_approval.unwrap_or(false),
ip_restrictions: self.ip_restriction.unwrap_or(IpRestriction::None),
rtmp_app: self
.rtmp_app
.unwrap_or_else(|| Arc::new(RTMP_APP.to_string())),
rtmp_stream_key: self.rtmp_stream_key.unwrap_or(StreamKeyRegistration::Any),
notification_channel: notification_sender,
media_channel: media_receiver,
};
TestContext::new_watcher(request, notification_receiver, media_sender).await
}
}
impl TestContext {
pub async fn set_as_active_publisher(&mut self) {
self.client.perform_handshake().await;
self.client
.connect_to_app(self.rtmp_app.clone(), true)
.await;
self.client
.publish_to_stream_key("key".to_string(), true)
.await;
let receiver = self.publish_receiver.as_mut().unwrap();
let response = test_utils::expect_mpsc_response(receiver).await;
match response {
RtmpEndpointPublisherMessage::NewPublisherConnected { .. } => (),
message => panic!("Unexpected publisher message received: {:?}", message),
};
}
pub async fn set_as_active_watcher(&mut self) {
self.client.perform_handshake().await;
self.client
.connect_to_app(self.rtmp_app.clone(), true)
.await;
self.client.watch_stream_key("key".to_string(), true).await;
let receiver = self.watch_receiver.as_mut().unwrap();
let response = test_utils::expect_mpsc_response(receiver).await;
match response {
RtmpEndpointWatcherNotification::StreamKeyBecameActive { .. } => (),
message => panic!("Unexpected publisher message received: {:?}", message),
};
}
async fn new_publisher(
request: RtmpEndpointRequest,
mut receiver: UnboundedReceiver<RtmpEndpointPublisherMessage>,
) -> TestContext {
let (mut client, sender) = RtmpTestClient::new();
let endpoint = start_rtmp_server_endpoint(sender);
endpoint
.send(request)
.expect("Endpoint request failed to send");
client.accept_port_request(9999, false).await;
let response = test_utils::expect_mpsc_response(&mut receiver).await;
match response {
RtmpEndpointPublisherMessage::PublisherRegistrationSuccessful => (),
x => panic!("Unexpected endpoint response: {:?}", x),
}
TestContext {
client,
endpoint,
publish_receiver: Some(receiver),
watch_receiver: None,
media_sender: None,
rtmp_app: RTMP_APP.to_string(),
}
}
async fn new_watcher(
request: RtmpEndpointRequest,
mut notification_receiver: UnboundedReceiver<RtmpEndpointWatcherNotification>,
media_sender: UnboundedSender<RtmpEndpointMediaMessage>,
) -> TestContext {
let (mut client, sender) = RtmpTestClient::new();
let endpoint = start_rtmp_server_endpoint(sender);
endpoint
.send(request)
.expect("Endpoint request failed to send");
client.accept_port_request(9999, false).await;
let response = test_utils::expect_mpsc_response(&mut notification_receiver).await;
match response {
RtmpEndpointWatcherNotification::WatcherRegistrationSuccessful => (),
x => panic!("Unexpected endpoint response: {:?}", x),
}
TestContext {
client,
endpoint,
publish_receiver: None,
watch_receiver: Some(notification_receiver),
media_sender: Some(media_sender),
rtmp_app: RTMP_APP.to_string(),
}
}
}
| rust | MIT | 3d732616da3bc8976dbcd5f859758ed3f06dc38f | 2026-01-04T20:17:23.887411Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.