repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_feed/src/feed/cache/mod.rs | crates/synd_feed/src/feed/cache/mod.rs | use std::{sync::Arc, time::Duration};
use async_trait::async_trait;
use crate::{
feed::service::{FetchFeed, FetchFeedResult},
types::{self, FeedUrl},
};
mod periodic_refresher;
pub use periodic_refresher::PeriodicRefresher;
type Cache = moka::future::Cache<FeedUrl, Arc<types::Feed>>;
#[derive(Clone, Copy)]
pub struct CacheConfig {
max_cache_size: u64,
time_to_live: Duration,
}
impl Default for CacheConfig {
fn default() -> Self {
Self {
// 10MiB
max_cache_size: 10 * 1024 * 1024,
time_to_live: Duration::from_secs(60 * 60),
}
}
}
impl CacheConfig {
#[must_use]
pub fn with_max_cache_size(self, max_cache_size: u64) -> Self {
Self {
max_cache_size,
..self
}
}
#[must_use]
pub fn with_time_to_live(self, time_to_live: Duration) -> Self {
Self {
time_to_live,
..self
}
}
}
#[async_trait]
pub trait FetchCachedFeed: Send + Sync {
async fn fetch_feed(&self, url: FeedUrl) -> FetchFeedResult<Arc<types::Feed>>;
/// Fetch feeds by spawning tasks
async fn fetch_feeds_parallel(
&self,
urls: &[FeedUrl],
) -> Vec<FetchFeedResult<Arc<types::Feed>>>;
}
#[derive(Clone)]
pub struct CacheLayer<S> {
service: S,
// Use Arc to avoid expensive clone
// https://github.com/moka-rs/moka?tab=readme-ov-file#avoiding-to-clone-the-value-at-get
cache: Cache,
}
impl<S> CacheLayer<S> {
/// Construct `CacheLayer` with default config
pub fn new(service: S) -> Self {
Self::with(service, CacheConfig::default())
}
/// Construct `CacheLayer` with given config
pub fn with(service: S, config: CacheConfig) -> Self {
let CacheConfig {
max_cache_size,
time_to_live,
} = config;
let cache = moka::future::Cache::builder()
.weigher(|_key, value: &Arc<types::Feed>| -> u32 {
value.approximate_size().try_into().unwrap_or(u32::MAX)
})
.max_capacity(max_cache_size)
.time_to_live(time_to_live)
.build();
Self { service, cache }
}
}
impl<S> CacheLayer<S>
where
S: Clone,
{
pub fn periodic_refresher(&self) -> PeriodicRefresher<S> {
PeriodicRefresher::new(self.service.clone(), self.cache.clone())
}
}
#[async_trait]
impl<S> FetchCachedFeed for CacheLayer<S>
where
S: FetchFeed + Clone + 'static,
{
#[tracing::instrument(skip_all, fields(%url))]
async fn fetch_feed(&self, url: FeedUrl) -> FetchFeedResult<Arc<types::Feed>> {
// lookup cache
if let Some(feed) = self.cache.get(&url).await {
tracing::debug!(url = url.as_str(), "Feed cache hit");
return Ok(feed);
}
let feed = self.service.fetch_feed(url.clone()).await.map(Arc::new)?;
self.cache.insert(url, Arc::clone(&feed)).await;
Ok(feed)
}
/// Fetch feeds by spawning tasks
async fn fetch_feeds_parallel(
&self,
urls: &[FeedUrl],
) -> Vec<FetchFeedResult<Arc<types::Feed>>> {
let mut handles = Vec::with_capacity(urls.len());
for url in urls {
let this = self.clone();
let url = url.clone();
handles.push(tokio::spawn(async move { this.fetch_feed(url).await }));
}
let mut results = Vec::with_capacity(handles.len());
for handle in handles {
// panic on join error
results.push(handle.await.unwrap());
}
results
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/config.rs | crates/synd_api/src/config.rs | pub const USER_AGENT: &str = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"));
pub mod app {
pub const VERSION: &str = env!("CARGO_PKG_VERSION");
pub const NAME: &str = env!("CARGO_PKG_NAME");
}
pub const PORT: u16 = 5959;
pub mod env {
macro_rules! env_key {
($key:expr) => {
concat!("SYND", "_", $key)
};
}
pub(crate) use env_key;
}
pub mod serve {
pub const DEFAULT_ADDR: &str = "127.0.0.1";
pub const DEFAULT_PORT: u16 = 5959;
pub const DEFAULT_REQUEST_TIMEOUT: &str = "30s";
pub const DEFAULT_REQUEST_BODY_LIMIT_BYTES: usize = 1024 * 2;
pub const DEFAULT_REQUEST_CONCURRENCY_LIMIT: usize = 100;
pub const HEALTH_CHECK_PATH: &str = "/health";
}
pub mod metrics {
use std::time::Duration;
pub const MONITOR_INTERVAL: Duration = Duration::from_secs(60);
}
pub mod cache {
pub const DEFAULT_FEED_CACHE_SIZE_MB: u64 = 100;
pub const DEFAULT_FEED_CACHE_TTL: &str = "180min";
pub const DEFAULT_FEED_CACHE_REFRESH_INTERVAL: &str = "120min";
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/lib.rs | crates/synd_api/src/lib.rs | //! syndicationd graphql api server crate
#![allow(clippy::new_without_default)]
#![warn(rustdoc::broken_intra_doc_links)]
pub mod cli;
pub mod client;
pub mod config;
pub mod dependency;
pub(crate) mod gql;
pub mod monitor;
pub(crate) mod principal;
pub mod repository;
pub mod serve;
pub mod shutdown;
pub mod usecase;
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/shutdown.rs | crates/synd_api/src/shutdown.rs | use std::{future::Future, io, net::SocketAddr, time::Duration};
use axum_server::Handle;
use tokio_util::sync::CancellationToken;
/// `CancellationToken` wrapper
pub struct Shutdown {
root: CancellationToken,
handle: Handle<SocketAddr>,
}
impl Shutdown {
/// When the given signal Future is resolved, call the `cancel` method of the held `CancellationToken`.
pub fn watch_signal<Fut, F>(signal: Fut, on_graceful_shutdown: F) -> Self
where
F: FnOnce() + Send + 'static,
Fut: Future<Output = io::Result<()>> + Send + 'static,
{
// Root cancellation token which is cancelled when signal received
let root = CancellationToken::new();
let notify = root.clone();
tokio::spawn(async move {
match signal.await {
Ok(()) => tracing::info!("Received signal"),
Err(err) => tracing::error!("Failed to handle signal {err}"),
}
notify.cancel();
});
// Notify graceful shutdown to axum server
let ct = root.clone();
let handle = axum_server::Handle::new();
let notify = handle.clone();
tokio::spawn(async move {
ct.cancelled().await;
on_graceful_shutdown();
tracing::info!("Notify axum handler to shutdown");
notify.graceful_shutdown(Some(Duration::from_secs(3)));
});
Self { root, handle }
}
/// Request shutdown
pub fn shutdown(&self) {
self.root.cancel();
}
pub fn into_handle(self) -> Handle<SocketAddr> {
self.handle
}
/// Return `CancellationToken which is cancelled at shutdown`
pub fn cancellation_token(&self) -> CancellationToken {
self.root.clone()
}
}
#[cfg(test)]
mod tests {
use std::{
io::ErrorKind,
sync::{
Arc,
atomic::{AtomicBool, Ordering},
},
};
use futures_util::future;
use super::*;
#[tokio::test(flavor = "multi_thread")]
async fn signal_trigger_graceful_shutdown() {
for signal_result in [Ok(()), Err(io::Error::from(ErrorKind::Other))] {
let called = Arc::new(AtomicBool::new(false));
let called_cloned = Arc::clone(&called);
let on_graceful_shutdown = move || {
called_cloned.store(true, Ordering::Relaxed);
};
let (tx, rx) = tokio::sync::oneshot::channel::<io::Result<()>>();
let s = Shutdown::watch_signal(
async move {
rx.await.unwrap().ok();
signal_result
},
on_graceful_shutdown,
);
let ct = s.cancellation_token();
// Mock signal triggered
tx.send(Ok(())).unwrap();
// Check cancellation token is cancelled and axum handler called
let mut ok = false;
let mut count = 0;
loop {
count += 1;
if count >= 10 {
break;
}
if s.root.is_cancelled() && ct.is_cancelled() && called.load(Ordering::Relaxed) {
ok = true;
break;
}
tokio::time::sleep(Duration::from_millis(100)).await;
}
assert!(ok, "cancelation does not work");
}
}
#[tokio::test(flavor = "multi_thread")]
async fn shutdown_trigger_graceful_shutdown() {
let called = Arc::new(AtomicBool::new(false));
let called_cloned = Arc::clone(&called);
let on_graceful_shutdown = move || {
called_cloned.store(true, Ordering::Relaxed);
};
let s = Shutdown::watch_signal(future::pending(), on_graceful_shutdown);
let ct = s.cancellation_token();
s.shutdown();
// Check cancellation token is cancelled and axum handler called
let mut ok = false;
let mut count = 0;
loop {
count += 1;
if count >= 10 {
break;
}
if s.root.is_cancelled() && ct.is_cancelled() && called.load(Ordering::Relaxed) {
ok = true;
break;
}
tokio::time::sleep(Duration::from_millis(100)).await;
}
assert!(ok, "cancelation does not work");
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/dependency.rs | crates/synd_api/src/dependency.rs | use std::sync::Arc;
use anyhow::Context;
use axum_server::tls_rustls::RustlsConfig;
use synd_feed::feed::{
cache::{CacheConfig, CacheLayer},
service::FeedService,
};
use tokio_util::sync::CancellationToken;
use crate::{
cli::{self, CacheOptions, TlsOptions},
config,
monitor::Monitors,
repository::sqlite::DbPool,
serve::{ServeOptions, auth::Authenticator},
usecase::{MakeUsecase, Runtime, authorize::Authorizer},
};
pub struct Dependency {
pub authenticator: Authenticator,
pub runtime: Runtime,
pub tls_config: RustlsConfig,
pub serve_options: ServeOptions,
pub monitors: Monitors,
}
impl Dependency {
pub async fn new(
db: DbPool,
tls: TlsOptions,
serve_options: cli::ServeOptions,
cache: CacheOptions,
ct: CancellationToken,
) -> anyhow::Result<Self> {
let cache_feed_service = {
let CacheOptions {
feed_cache_size_mb,
feed_cache_ttl,
feed_cache_refresh_interval,
} = cache;
let feed_service = FeedService::new(config::USER_AGENT, 10 * 1024 * 1024);
let cache_feed_service = CacheLayer::with(
feed_service,
CacheConfig::default()
.with_max_cache_size(feed_cache_size_mb * 1024 * 1024)
.with_time_to_live(feed_cache_ttl),
);
let periodic_refresher = cache_feed_service
.periodic_refresher()
.with_emit_metrics(true);
tokio::spawn(periodic_refresher.run(feed_cache_refresh_interval, ct));
cache_feed_service
};
let make_usecase = MakeUsecase {
subscription_repo: Arc::new(db),
fetch_feed: Arc::new(cache_feed_service),
};
let authenticator = Authenticator::new()?;
let authorizer = Authorizer::new();
let runtime = Runtime::new(make_usecase, authorizer);
let tls_config = RustlsConfig::from_pem_file(&tls.certificate, &tls.private_key)
.await
.with_context(|| format!("tls options: {tls:?}"))?;
let monitors = Monitors::new();
Ok(Dependency {
authenticator,
runtime,
tls_config,
serve_options: serve_options.into(),
monitors,
})
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/cli.rs | crates/synd_api/src/cli.rs | use std::{ffi::OsString, net::IpAddr, path::PathBuf, str::FromStr, time::Duration};
use clap::{ArgAction, Parser};
use synd_stdx::time::humantime;
use crate::{
config::{self, env::env_key},
serve,
};
#[derive(Parser, Debug)]
#[command(version, propagate_version = true, disable_help_subcommand = true)]
pub struct Args {
#[command(flatten)]
pub sqlite: SqliteOptions,
#[command(flatten)]
pub bind: BindOptions,
#[command(flatten)]
pub serve: ServeOptions,
#[command(flatten)]
pub tls: TlsOptions,
#[command(flatten)]
pub o11y: ObservabilityOptions,
#[command(flatten)]
pub cache: CacheOptions,
#[arg(hide = true, long = "dry-run", hide_long_help = true)]
pub dry_run: bool,
}
#[derive(clap::Args, Debug, Clone)]
#[command(next_help_heading = "sqlite options")]
pub struct SqliteOptions {
#[arg(long, env = env_key!("SQLITE_DB"))]
pub sqlite_db: PathBuf,
}
#[derive(clap::Args, Debug, Clone)]
#[command(next_help_heading = "Bind options")]
pub struct BindOptions {
#[arg(long, value_parser = IpAddr::from_str, default_value = config::serve::DEFAULT_ADDR, env = env_key!("BIND_ADDR"))]
pub addr: IpAddr,
#[arg(long, default_value_t = config::serve::DEFAULT_PORT, env = env_key!("BIND_PORT"))]
pub port: u16,
}
#[derive(clap::Args, Debug, Clone)]
#[command(next_help_heading = "Serve options")]
pub struct ServeOptions {
/// Request timeout duration
#[arg(long, value_parser = humantime::parse_duration, default_value = config::serve::DEFAULT_REQUEST_TIMEOUT)]
pub timeout: Duration,
/// Request body limit
#[arg(long, default_value_t = config::serve::DEFAULT_REQUEST_BODY_LIMIT_BYTES)]
pub body_limit_bytes: usize,
#[arg(long, default_value_t = config::serve::DEFAULT_REQUEST_CONCURRENCY_LIMIT)]
pub concurrency_limit: usize,
}
#[derive(clap::Args, Debug)]
#[command(next_help_heading = "Tls options")]
pub struct TlsOptions {
/// Tls certificate file path
#[arg(long = "tls-cert", env = env_key!("TLS_CERT"), value_name = "CERT_PATH")]
pub certificate: PathBuf,
/// Tls private key file path
#[arg(long = "tls-key", env = env_key!("TLS_KEY"), value_name = "KEY_PATH")]
pub private_key: PathBuf,
}
#[derive(clap::Args, Debug)]
#[command(next_help_heading = "Observability options")]
pub struct ObservabilityOptions {
/// Show code location(file, line number) in logs
#[arg(long, env = env_key!("LOG_SHOW_LOCATION"), default_value_t = false, action = ArgAction::Set )]
pub show_code_location: bool,
/// Show event target(module in default) in logs
#[arg(long, env = env_key!("LOG_SHOW_TARGET"), default_value_t = true, action = ArgAction::Set)]
pub show_target: bool,
/// Opentelemetry otlp exporter endpoint
#[arg(long, env = "OTEL_EXPORTER_OTLP_ENDPOINT")]
pub otlp_endpoint: Option<String>,
/// Opentelemetry trace sampler ratio
#[arg(long, env = "OTEL_TRACES_SAMPLER_ARG", default_value_t = 1.0)]
pub trace_sampler_ratio: f64,
}
#[derive(clap::Args, Debug, Clone)]
#[command(next_help_heading = "Cache options")]
pub struct CacheOptions {
/// Max feed cache size in MiB
#[arg(long, default_value_t = config::cache::DEFAULT_FEED_CACHE_SIZE_MB, env = env_key!("FEED_CACHE_SIZE") )]
pub feed_cache_size_mb: u64,
#[arg(long, value_parser = humantime::parse_duration, default_value = config::cache::DEFAULT_FEED_CACHE_TTL, env = env_key!("FEED_CACHE_TTL"))]
pub feed_cache_ttl: Duration,
#[arg(long, value_parser = humantime::parse_duration, default_value = config::cache::DEFAULT_FEED_CACHE_REFRESH_INTERVAL, env = env_key!("FEED_CACHE_REFRESH_INTERVAL"))]
pub feed_cache_refresh_interval: Duration,
}
pub fn try_parse<I, T>(iter: I) -> Result<Args, clap::Error>
where
I: IntoIterator<Item = T>,
T: Into<OsString> + Clone,
{
Args::try_parse_from(iter)
}
impl From<BindOptions> for serve::BindOptions {
fn from(BindOptions { addr, port }: BindOptions) -> Self {
Self { port, addr }
}
}
impl From<ServeOptions> for serve::ServeOptions {
fn from(
ServeOptions {
timeout,
body_limit_bytes,
concurrency_limit,
}: ServeOptions,
) -> Self {
Self {
timeout,
body_limit_bytes,
concurrency_limit,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn should_parse() {
assert_eq!(
try_parse(["synd-api", "--version"]).unwrap_err().kind(),
clap::error::ErrorKind::DisplayVersion
);
assert_eq!(
try_parse(["synd-api", "--help"]).unwrap_err().kind(),
clap::error::ErrorKind::DisplayHelp,
);
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/monitor.rs | crates/synd_api/src/monitor.rs | use std::time::Duration;
use synd_o11y::metric;
use tokio_metrics::{RuntimeMetrics, RuntimeMonitor, TaskMetrics, TaskMonitor};
use tokio_util::sync::CancellationToken;
struct Metrics {
runtime_total_polls_count: u64,
runtime_busy_duration_secs: f64,
gql_mean_poll_duration_secs: f64,
gql_mean_slow_poll_duration_secs: f64,
gql_mean_first_poll_delay_secs: f64,
gql_mean_scheduled_duration_secs: f64,
gql_mean_idle_duration_secs: f64,
}
pub struct Monitors {
gql: TaskMonitor,
}
impl Monitors {
pub fn new() -> Self {
Self {
gql: TaskMonitor::new(),
}
}
pub(crate) fn graphql_task_monitor(&self) -> TaskMonitor {
self.gql.clone()
}
pub async fn emit_metrics(self, interval: Duration, ct: CancellationToken) {
let handle = tokio::runtime::Handle::current();
let runtime_monitor = RuntimeMonitor::new(&handle);
let intervals = runtime_monitor.intervals().zip(self.gql.intervals());
for (runtime_metrics, gql_metrics) in intervals {
let Metrics {
runtime_total_polls_count,
runtime_busy_duration_secs,
gql_mean_poll_duration_secs,
gql_mean_slow_poll_duration_secs,
gql_mean_first_poll_delay_secs,
gql_mean_scheduled_duration_secs,
gql_mean_idle_duration_secs,
} = Self::collect_metrics(&runtime_metrics, &gql_metrics);
// Runtime metrics
metric!(monotonic_counter.runtime.poll = runtime_total_polls_count);
metric!(monotonic_counter.runtime.busy_duration = runtime_busy_duration_secs);
// Tasks poll metrics
metric!(
monotonic_counter.task.graphql.mean_poll_duration = gql_mean_poll_duration_secs
);
metric!(
monotonic_counter.task.graphql.mean_slow_poll_duration =
gql_mean_slow_poll_duration_secs
);
// Tasks schedule metrics
metric!(
monotonic_counter.task.graphql.mean_first_poll_delay =
gql_mean_first_poll_delay_secs,
);
metric!(
monotonic_counter.task.graphql.mean_scheduled_duration =
gql_mean_scheduled_duration_secs,
);
// Tasks idle metrics
metric!(
monotonic_counter.task.graphql.mean_idle_duration = gql_mean_idle_duration_secs,
);
tokio::select! {
biased;
// Make sure to respect cancellation
() = ct.cancelled() => break,
() = tokio::time::sleep(interval) => ()
}
}
}
fn collect_metrics(runtime_metrics: &RuntimeMetrics, gql_metrics: &TaskMetrics) -> Metrics {
Metrics {
runtime_total_polls_count: runtime_metrics.total_polls_count,
runtime_busy_duration_secs: runtime_metrics.total_busy_duration.as_secs_f64(),
gql_mean_poll_duration_secs: gql_metrics.mean_poll_duration().as_secs_f64(),
gql_mean_slow_poll_duration_secs: gql_metrics.mean_slow_poll_duration().as_secs_f64(),
gql_mean_first_poll_delay_secs: gql_metrics.mean_first_poll_delay().as_secs_f64(),
gql_mean_scheduled_duration_secs: gql_metrics.mean_scheduled_duration().as_secs_f64(),
gql_mean_idle_duration_secs: gql_metrics.mean_idle_duration().as_secs_f64(),
}
}
}
#[cfg(test)]
mod tests {
use std::sync::{Arc, Mutex};
use tracing::{Event, Subscriber, instrument::WithSubscriber};
use tracing_subscriber::{
Layer, Registry,
layer::{Context, SubscriberExt as _},
registry::LookupSpan,
};
use super::*;
struct TestLayer<F> {
on_event: F,
}
impl<S, F> Layer<S> for TestLayer<F>
where
S: Subscriber + for<'span> LookupSpan<'span>,
F: Fn(&Event<'_>) + 'static,
{
fn on_event(&self, event: &Event<'_>, _ctx: Context<'_, S>) {
(self.on_event)(event);
}
}
#[tokio::test]
async fn emit_metrics() {
let events = Arc::new(Mutex::new(Vec::new()));
let events_cloned = events.clone();
let on_event = move |event: &Event<'_>| {
let field = event.fields().next().unwrap().name();
events_cloned.lock().unwrap().push(field);
};
let layer = TestLayer { on_event };
let registry = Registry::default().with(layer);
let m = Monitors::new();
let ct = CancellationToken::new();
ct.cancel();
m.emit_metrics(Duration::from_millis(0), ct)
.with_subscriber(registry)
.await;
let events = events.lock().unwrap().clone();
insta::with_settings!({
description => "metrics which monitor emits",
omit_expression => true ,
}, {
insta::assert_yaml_snapshot!(events);
});
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/main.rs | crates/synd_api/src/main.rs | use std::env;
use fdlimit::Outcome;
use synd_o11y::{
opentelemetry::OpenTelemetryGuard, tracing_subscriber::initializer::TracingInitializer,
};
use synd_stdx::io::color::{ColorSupport, is_color_supported};
use tracing::{error, info};
use synd_api::{
cli::{self, Args, ObservabilityOptions},
config,
dependency::Dependency,
repository::{kvsd::ConnectKvsdFailed, sqlite::DbPool},
serve::listen_and_serve,
shutdown::Shutdown,
};
fn init_tracing(options: &ObservabilityOptions) -> Option<OpenTelemetryGuard> {
let ObservabilityOptions {
show_code_location,
show_target,
otlp_endpoint,
trace_sampler_ratio,
} = options;
TracingInitializer::default()
.app_name(config::app::NAME)
.app_version(config::app::VERSION)
.otlp_endpoint(otlp_endpoint.clone())
.trace_sampler_ratio(*trace_sampler_ratio)
.enable_ansi(is_color_supported() == ColorSupport::Supported)
.show_code_location(*show_code_location)
.show_target(*show_target)
.init()
}
async fn run(
Args {
sqlite,
bind,
serve,
tls,
o11y,
cache,
dry_run,
}: Args,
shutdown: Shutdown,
) -> anyhow::Result<()> {
let db = DbPool::connect(sqlite.sqlite_db).await?;
db.migrate().await?;
let dep = Dependency::new(db, tls, serve, cache.clone(), shutdown.cancellation_token()).await?;
info!(
version = config::app::VERSION,
otlp_endpoint=?o11y.otlp_endpoint,
request_timeout=?dep.serve_options.timeout,
request_body_limit_bytes=dep.serve_options.body_limit_bytes,
concurrency_limit=?dep.serve_options.concurrency_limit,
feed_cache_ttl_minutes=?cache.feed_cache_ttl.as_secs() / 60,
feed_cache_refresh_interval_minutes=?cache.feed_cache_refresh_interval.as_secs() / 60,
"Runinng...",
);
dry_run.then(|| shutdown.shutdown());
listen_and_serve(dep, bind.into(), shutdown).await
}
fn init_file_descriptor_limit() {
fdlimit::raise_fd_limit()
.inspect(|outcome| match outcome {
Outcome::LimitRaised { from, to } => {
tracing::info!("Raise fd limit {from} to {to}");
}
Outcome::Unsupported => tracing::info!("Raise fd limit unsupported"),
})
.ok();
}
#[tokio::main]
async fn main() {
let args = match cli::try_parse(env::args_os()) {
Ok(args) => args,
Err(err) => err.exit(),
};
let _guard = init_tracing(&args.o11y);
let shutdown = Shutdown::watch_signal(tokio::signal::ctrl_c(), || {});
init_file_descriptor_limit();
if let Err(err) = run(args, shutdown).await {
if let Some(err) = err.downcast_ref::<ConnectKvsdFailed>() {
error!("{err}: make sure kvsd is running");
} else {
error!("{err:?}");
}
std::process::exit(1);
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/principal.rs | crates/synd_api/src/principal.rs | use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
#[derive(Clone, Debug)]
pub enum Principal {
User(User),
}
impl Principal {
#[allow(clippy::unnecessary_wraps)]
pub fn user_id(&self) -> Option<&str> {
match self {
Principal::User(User { id, .. }) => Some(id.as_str()),
}
}
}
#[derive(Clone, Debug)]
pub struct User {
id: String,
#[allow(dead_code)]
email: String,
}
impl User {
pub fn from_email(email: impl Into<String>) -> Self {
let mut s = DefaultHasher::new();
let email = email.into();
email.hash(&mut s);
let id = s.finish();
let id = format!("{id:016x}");
User { id, email }
}
pub fn id(&self) -> &str {
self.id.as_str()
}
}
#[cfg(test)]
mod tests {
use super::User;
#[test]
fn user_from_email() {
let u = User::from_email("foo@ymgyt.io");
assert_eq!(u.id().len(), 16);
assert_eq!(u.id(), "585779d8c9b2e06d");
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/gql/scalar.rs | crates/synd_api/src/gql/scalar.rs | use async_graphql::{InputValueError, Scalar, ScalarType, Value};
use chrono::Utc;
/// RFC3339 Time
pub struct Rfc3339Time(synd_feed::types::Time);
#[Scalar]
impl ScalarType for Rfc3339Time {
fn parse(value: async_graphql::Value) -> async_graphql::InputValueResult<Self> {
let Value::String(value) = value else {
return Err(InputValueError::expected_type(value));
};
chrono::DateTime::parse_from_rfc3339(&value)
.map(|t| t.with_timezone(&Utc))
.map(Rfc3339Time)
.map_err(InputValueError::custom)
}
fn to_value(&self) -> async_graphql::Value {
async_graphql::Value::String(self.0.to_rfc3339())
}
}
impl From<synd_feed::types::Time> for Rfc3339Time {
fn from(value: synd_feed::types::Time) -> Self {
Self(value)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_rfc3339_time() {
assert!(Rfc3339Time::parse(async_graphql::Value::Null).is_err());
assert!(
Rfc3339Time::parse(async_graphql::Value::String("2024-06-09T01:02:03Z".into())).is_ok()
);
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/gql/mod.rs | crates/synd_api/src/gql/mod.rs | mod query;
pub(crate) use query::Query;
mod mutation;
use async_graphql::{EmptySubscription, Schema, SchemaBuilder};
pub(crate) use mutation::Mutation;
use crate::{gql::mutation::ResponseCode, principal::Principal, usecase};
pub(crate) mod object;
pub(crate) mod scalar;
pub(crate) type SyndSchema = Schema<Query, Mutation, EmptySubscription>;
pub(crate) mod handler {
use async_graphql::http::GraphiQLSource;
use async_graphql_axum::{GraphQLRequest, GraphQLResponse};
use axum::{Extension, response::IntoResponse};
use synd_o11y::audit_span;
use tokio_metrics::TaskMonitor;
use tracing::Instrument;
use crate::{principal::Principal, serve::Context};
pub(crate) async fn graphiql() -> impl IntoResponse {
axum::response::Html(GraphiQLSource::build().endpoint("/graphql").finish())
}
pub(crate) async fn graphql(
Extension(Context {
schema,
gql_monitor,
}): Extension<Context>,
Extension(principal): Extension<Principal>,
req: GraphQLRequest,
) -> GraphQLResponse {
// Inject authentication
let req = req.into_inner().data(principal);
TaskMonitor::instrument(&gql_monitor, schema.execute(req).instrument(audit_span!()))
.await
.into()
}
}
#[must_use]
pub(crate) fn schema_builder() -> SchemaBuilder<Query, Mutation, EmptySubscription> {
let schema = Schema::build(Query, Mutation, EmptySubscription);
if cfg!(not(feature = "introspection")) {
schema
.disable_introspection()
.limit_depth(10)
.limit_complexity(60)
} else {
schema.limit_depth(20).limit_complexity(300)
}
// disabled
// schema.extension(Tracing)
}
impl usecase::Context for &async_graphql::Context<'_> {
fn principal(&self) -> Principal {
self.data_unchecked::<Principal>().clone()
}
}
impl<E> async_graphql::ErrorExtensions for usecase::Error<E>
where
E: std::fmt::Display + Send + Sync + 'static,
{
fn extend(&self) -> async_graphql::Error {
async_graphql::Error::new(format!("{self}")).extend_with(|_, ext| match self {
usecase::Error::Usecase(_) => unreachable!(),
usecase::Error::Unauthorized(_) => ext.set("code", ResponseCode::Unauthorized),
usecase::Error::Repository(_) => ext.set("code", ResponseCode::InternalError),
})
}
}
impl async_graphql::ErrorExtensions for usecase::FetchEntriesError {
fn extend(&self) -> async_graphql::Error {
async_graphql::Error::new(format!("{self}"))
.extend_with(|_, ext| ext.set("code", ResponseCode::InternalError))
}
}
impl async_graphql::ErrorExtensions for usecase::FetchSubscribedFeedsError {
fn extend(&self) -> async_graphql::Error {
async_graphql::Error::new(format!("{self}"))
.extend_with(|_, ext| ext.set("code", ResponseCode::InternalError))
}
}
macro_rules! run_usecase {
($usecase:ty, $cx:expr, $input:expr,$err_handle:expr) => {{
let runtime = $cx.data_unchecked::<crate::usecase::Runtime>();
let err_handle = $err_handle;
match runtime.run::<$usecase, _, _>($cx, $input).await {
Ok(output) => Ok(output.into()),
Err($crate::usecase::Error::Usecase(uc_err)) => err_handle(uc_err),
Err(err) => Err(async_graphql::ErrorExtensions::extend(&err)),
}
}};
}
pub(super) use run_usecase;
#[cfg(test)]
mod tests {
use async_graphql::ErrorExtensions;
use crate::usecase::authorize::Unauthorized;
use super::*;
#[test]
fn usecase_error_impl_gql_error() {
insta::assert_yaml_snapshot!(
"unauthorized",
usecase::Error::<String>::Unauthorized(Unauthorized).extend()
);
insta::assert_yaml_snapshot!(
"repository",
usecase::Error::<String>::Repository(crate::repository::RepositoryError::Internal(
anyhow::anyhow!("error")
))
.extend()
);
insta::assert_yaml_snapshot!("fetch_entries", usecase::FetchEntriesError {}.extend());
insta::assert_yaml_snapshot!(
"fetch_subscribed_feeds",
usecase::FetchSubscribedFeedsError {}.extend()
);
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/gql/query.rs | crates/synd_api/src/gql/query.rs | use std::borrow::Cow;
use async_graphql::{
Context, Object, Result, SimpleObject,
connection::{Connection, Edge},
};
use synd_feed::types::FeedUrl;
use crate::{
gql::{
object::{self, Entry, id},
run_usecase,
},
usecase::{
FetchEntries, FetchEntriesError, FetchEntriesInput, FetchEntriesOutput,
FetchSubscribedFeeds, FetchSubscribedFeedsError, FetchSubscribedFeedsInput,
FetchSubscribedFeedsOutput, Output,
},
};
#[derive(SimpleObject)]
struct FeedsConnectionFields {
errors: Vec<FetchFeedError>,
}
#[derive(SimpleObject)]
struct FetchFeedError {
url: FeedUrl,
error_message: String,
}
struct Subscription;
#[Object]
impl Subscription {
/// Return Subscribed feeds
async fn feeds(
&self,
cx: &Context<'_>,
after: Option<String>,
#[graphql(default = 20)] first: Option<i32>,
) -> Result<Connection<String, object::Feed, FeedsConnectionFields>> {
#[allow(clippy::cast_sign_loss)]
let first = first.unwrap_or(10).min(100) as usize;
let has_prev = after.is_some();
let input = FetchSubscribedFeedsInput {
after,
first: first + 1,
};
let Output {
output: FetchSubscribedFeedsOutput { feeds },
} = run_usecase!(
FetchSubscribedFeeds,
cx,
input,
|err: FetchSubscribedFeedsError| Err(async_graphql::ErrorExtensions::extend(&err))
)?;
let has_next = feeds.len() > first;
let (feeds, errors): (Vec<_>, Vec<_>) = feeds.into_iter().partition(Result::is_ok);
let fields = FeedsConnectionFields {
errors: errors
.into_iter()
.map(|err| {
let (url, fetch_err) = err.unwrap_err();
FetchFeedError {
url,
error_message: fetch_err.to_string(),
}
})
.collect::<Vec<_>>(),
};
let mut connection = Connection::with_additional_fields(has_prev, has_next, fields);
let edges = feeds
.into_iter()
.take(first)
.map(Result::unwrap)
.map(|feed| (feed.feed.meta().url().as_str().to_owned(), feed))
.map(|(cursor, feed)| (cursor, object::Feed::from(feed)))
.map(|(cursor, feed)| Edge::new(cursor, feed));
connection.edges.extend(edges);
Ok(connection)
}
/// Return subscribed latest entries order by published time.
async fn entries<'cx>(
&'_ self,
cx: &Context<'_>,
after: Option<String>,
#[graphql(default = 20)] first: Option<i32>,
) -> Result<Connection<id::EntryId<'_>, Entry<'cx>>> {
#[allow(clippy::cast_sign_loss)]
let first = first.unwrap_or(20).min(200) as usize;
let has_prev = after.is_some();
let input = FetchEntriesInput {
after: after.map(Into::into),
first: first + 1,
};
let Output {
output: FetchEntriesOutput { entries, feeds },
} = run_usecase!(FetchEntries, cx, input, |err: FetchEntriesError| Err(
async_graphql::ErrorExtensions::extend(&err)
))?;
let has_next = entries.len() > first;
let mut connection = Connection::new(has_prev, has_next);
let edges = entries
.into_iter()
.take(first)
.map(move |(entry, feed_url)| {
let meta = feeds
.get(&feed_url)
.expect("FeedMeta not found. this is a bug")
.clone();
let cursor = entry.id().into();
let node = Entry::new(Cow::Owned(meta), entry);
Edge::new(cursor, node)
});
connection.edges.extend(edges);
Ok(connection)
}
}
pub(crate) struct Query;
#[Object]
impl Query {
async fn subscription(&self) -> Subscription {
Subscription {}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/gql/mutation/unsubscribe_feed.rs | crates/synd_api/src/gql/mutation/unsubscribe_feed.rs | use async_graphql::{InputObject, Object, Union};
use synd_feed::types::FeedUrl;
use crate::{gql::mutation::ResponseStatus, usecase};
#[derive(InputObject)]
pub struct UnsubscribeFeedInput {
/// Feed url to unsubscribe
pub url: FeedUrl,
}
impl From<UnsubscribeFeedInput> for usecase::UnsubscribeFeedInput {
fn from(value: UnsubscribeFeedInput) -> Self {
usecase::UnsubscribeFeedInput { url: value.url }
}
}
#[allow(clippy::large_enum_variant)]
#[derive(Union)]
pub(crate) enum UnsubscribeFeedResponse {
Success(UnsubscribeFeedSuccess),
Error(UnsubscribeFeedError),
}
pub(crate) struct UnsubscribeFeedSuccess {
pub status: ResponseStatus,
}
#[Object]
impl UnsubscribeFeedSuccess {
pub async fn status(&self) -> ResponseStatus {
self.status.clone()
}
}
pub(crate) struct UnsubscribeFeedError {
pub status: ResponseStatus,
pub message: String,
}
#[Object]
impl UnsubscribeFeedError {
pub async fn status(&self) -> ResponseStatus {
self.status.clone()
}
/// Error message
pub async fn message(&self) -> String {
self.message.clone()
}
}
impl From<anyhow::Error> for UnsubscribeFeedResponse {
fn from(err: anyhow::Error) -> Self {
UnsubscribeFeedResponse::Error(UnsubscribeFeedError {
status: ResponseStatus::internal(),
message: format!("{err}"),
})
}
}
impl From<usecase::Output<usecase::UnsubscribeFeedOutput>> for UnsubscribeFeedResponse {
fn from(_output: usecase::Output<usecase::UnsubscribeFeedOutput>) -> Self {
UnsubscribeFeedResponse::Success(UnsubscribeFeedSuccess {
status: ResponseStatus::ok(),
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn internal_error() {
let r = UnsubscribeFeedResponse::from(anyhow::anyhow!("error"));
assert!(
matches!(r, UnsubscribeFeedResponse::Error(UnsubscribeFeedError { status, ..}) if status == ResponseStatus::internal())
);
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/gql/mutation/subscribe_feed.rs | crates/synd_api/src/gql/mutation/subscribe_feed.rs | use async_graphql::{InputObject, Object, Union};
use synd_feed::{
feed::service::FetchFeedError,
types::{Category, FeedUrl, Requirement},
};
use crate::{
gql::{
mutation::ResponseStatus,
object::{self, Feed},
},
usecase::{self, SubscribeFeedError as UsecaseSubscribeFeedError},
};
#[derive(InputObject, Debug)]
pub(crate) struct SubscribeFeedInput {
/// Feed url to subscribe
pub url: FeedUrl,
/// Requirement level for feed
pub requirement: Option<Requirement>,
/// Feed category
pub category: Option<Category<'static>>,
}
impl From<SubscribeFeedInput> for usecase::SubscribeFeedInput {
fn from(value: SubscribeFeedInput) -> Self {
usecase::SubscribeFeedInput {
url: value.url,
requirement: value.requirement,
category: value.category,
}
}
}
#[allow(clippy::large_enum_variant)]
#[derive(Union)]
pub(crate) enum SubscribeFeedResponse {
Success(SubscribeFeedSuccess),
Error(SubscribeFeedError),
}
pub(crate) struct SubscribeFeedSuccess {
pub status: ResponseStatus,
/// Subscribed feed
pub feed: object::Feed,
}
#[Object]
impl SubscribeFeedSuccess {
pub async fn status(&self) -> ResponseStatus {
self.status.clone()
}
pub async fn feed(&self) -> &object::Feed {
&self.feed
}
}
pub(crate) struct SubscribeFeedError {
pub status: ResponseStatus,
pub message: String,
}
#[Object]
impl SubscribeFeedError {
pub async fn status(&self) -> ResponseStatus {
self.status.clone()
}
/// Error message
pub async fn message(&self) -> String {
self.message.clone()
}
}
impl From<usecase::Output<usecase::SubscribeFeedOutput>> for SubscribeFeedResponse {
fn from(output: usecase::Output<usecase::SubscribeFeedOutput>) -> Self {
SubscribeFeedResponse::Success(SubscribeFeedSuccess {
status: ResponseStatus::ok(),
feed: Feed::from(output.output.feed),
})
}
}
impl From<UsecaseSubscribeFeedError> for SubscribeFeedResponse {
fn from(err: UsecaseSubscribeFeedError) -> Self {
SubscribeFeedResponse::Error(err.into())
}
}
impl From<UsecaseSubscribeFeedError> for SubscribeFeedError {
fn from(err: UsecaseSubscribeFeedError) -> Self {
match err {
UsecaseSubscribeFeedError::FetchFeed(fetch_err) => match fetch_err {
FetchFeedError::InvalidFeed(kind) => Self {
status: ResponseStatus::invalid_feed_url(),
message: format!("{kind}"),
},
FetchFeedError::Fetch(request_err) => Self {
status: ResponseStatus::feed_unavailable(),
message: format!("feed unavailable: {request_err}"),
},
fetch_err => Self {
status: ResponseStatus::internal(),
message: format!("{fetch_err}"),
},
},
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn internal_error() {
let r = SubscribeFeedError::from(UsecaseSubscribeFeedError::FetchFeed(
FetchFeedError::Other(anyhow::anyhow!("error")),
));
assert_eq!(r.status, ResponseStatus::internal());
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/gql/mutation/mod.rs | crates/synd_api/src/gql/mutation/mod.rs | use async_graphql::{Context, Enum, Interface, Object, SimpleObject};
use crate::{
gql::run_usecase,
usecase::{SubscribeFeed, SubscribeFeedError, UnsubscribeFeed},
};
pub mod subscribe_feed;
pub mod unsubscribe_feed;
#[derive(Enum, PartialEq, Eq, Clone, Copy, Debug)]
pub(crate) enum ResponseCode {
/// Operation success
Ok,
/// Principal does not have enough permissions
Unauthorized,
/// Given url is not valid feed url
InvalidFeedUrl,
/// The feed server returned a status other than 200
FeedUnavailable,
/// Something went wrong
InternalError,
}
#[derive(SimpleObject, Clone, PartialEq, Eq, Debug)]
pub(crate) struct ResponseStatus {
code: ResponseCode,
}
impl ResponseStatus {
fn ok() -> Self {
ResponseStatus {
code: ResponseCode::Ok,
}
}
fn invalid_feed_url() -> Self {
Self {
code: ResponseCode::InvalidFeedUrl,
}
}
fn feed_unavailable() -> Self {
Self {
code: ResponseCode::FeedUnavailable,
}
}
fn internal() -> Self {
Self {
code: ResponseCode::InternalError,
}
}
}
#[expect(dead_code)]
#[allow(clippy::large_enum_variant)]
#[derive(Interface)]
#[graphql(field(name = "status", method = "status", ty = "ResponseStatus"))]
enum MutationResponse {
SubscribeFeed(subscribe_feed::SubscribeFeedSuccess),
UnsubscribeFeed(unsubscribe_feed::UnsubscribeFeedSuccess),
}
#[expect(dead_code)]
#[derive(Interface)]
#[graphql(
field(name = "status", ty = "ResponseStatus"),
field(name = "message", ty = "String")
)]
enum ErrorResponse {
SubscribeFeed(subscribe_feed::SubscribeFeedError),
UnsubscribeFeed(unsubscribe_feed::UnsubscribeFeedError),
}
pub(crate) struct Mutation;
#[Object]
impl Mutation {
/// Subscribe feed
async fn subscribe_feed(
&self,
cx: &Context<'_>,
input: subscribe_feed::SubscribeFeedInput,
) -> async_graphql::Result<subscribe_feed::SubscribeFeedResponse> {
run_usecase!(SubscribeFeed, cx, input, |err: SubscribeFeedError| Ok(
err.into()
))
}
/// Unsubscribe feed
/// If given feed is not subscribed, this mutation will succeed
async fn unsubscribe_feed(
&self,
cx: &Context<'_>,
input: unsubscribe_feed::UnsubscribeFeedInput,
) -> async_graphql::Result<unsubscribe_feed::UnsubscribeFeedResponse> {
run_usecase!(UnsubscribeFeed, cx, input, |err: anyhow::Error| Ok(
err.into()
))
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/gql/object/id.rs | crates/synd_api/src/gql/object/id.rs | use std::convert::Infallible;
use async_graphql::connection::CursorType;
use synd_feed::types;
pub(crate) struct FeedIdV1(String);
impl FeedIdV1 {
pub fn new(url: impl AsRef<str>) -> Self {
let url = url.as_ref();
Self(format!("v1:feed:{url}"))
}
}
impl From<FeedIdV1> for async_graphql::ID {
fn from(v: FeedIdV1) -> Self {
Self(v.0)
}
}
#[derive(Debug, PartialEq, Eq)]
pub(in crate::gql) struct EntryId<'a>(types::EntryId<'a>);
impl CursorType for EntryId<'_> {
type Error = Infallible;
fn decode_cursor(s: &str) -> Result<Self, Self::Error> {
let s = s.to_string();
Ok(EntryId(s.into()))
}
fn encode_cursor(&self) -> String {
self.0.to_string()
}
}
impl<'a> From<types::EntryId<'a>> for EntryId<'a> {
fn from(value: types::EntryId<'a>) -> Self {
Self(value)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn entry_id_decode() {
let id: EntryId = types::EntryId::from("123").into();
assert_eq!(EntryId::decode_cursor(&id.encode_cursor()), Ok(id));
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/gql/object/mod.rs | crates/synd_api/src/gql/object/mod.rs | use std::{borrow::Cow, sync::Arc};
use async_graphql::{
ID, Object, SimpleObject,
connection::{Connection, ConnectionNameType, Edge, EdgeNameType, EmptyFields},
};
use feed_rs::model as feedrs;
use synd_feed::types::{self, Annotated, Category, FeedType, FeedUrl, Requirement};
use crate::gql::scalar;
use self::id::FeedIdV1;
pub mod id;
#[derive(SimpleObject)]
pub(crate) struct Link {
pub href: String,
pub rel: Option<String>,
pub media_type: Option<String>,
pub href_lang: Option<String>,
pub title: Option<String>,
}
impl From<feedrs::Link> for Link {
fn from(value: feedrs::Link) -> Self {
Self {
href: value.href,
rel: value.rel,
media_type: value.media_type,
href_lang: value.href_lang,
title: value.title,
}
}
}
pub(crate) struct Entry<'a> {
meta: Cow<'a, Annotated<types::FeedMeta>>,
entry: types::Entry,
}
#[Object]
impl Entry<'_> {
/// Feed of this entry
async fn feed(&self) -> FeedMeta<'_> {
self.meta.clone().into()
}
/// Entry title
async fn title(&self) -> Option<&str> {
self.entry.title()
}
/// Time at which the entry was last modified
async fn updated(&self) -> Option<scalar::Rfc3339Time> {
self.entry.updated().map(Into::into)
}
/// The time at which the entry published
async fn published(&self) -> Option<scalar::Rfc3339Time> {
self.entry.published().map(Into::into)
}
/// Entry summary. If there is no summary of the entry, return the content(is this bad api?)
async fn summary(&self) -> Option<&str> {
self.entry.summary().or(self.entry.content())
}
/// Link to websiteurl at which this entry is published
async fn website_url(&self) -> Option<&str> {
self.entry.website_url(self.meta.feed.r#type())
}
}
impl<'a> Entry<'a> {
pub fn new(meta: Cow<'a, Annotated<types::FeedMeta>>, entry: types::Entry) -> Self {
Self { meta, entry }
}
}
pub struct Feed(Annotated<Arc<types::Feed>>);
#[Object]
impl Feed {
/// Feed Id
async fn id(&self) -> ID {
FeedIdV1::new(self.0.feed.meta().url()).into()
}
/// Undering feed specification
async fn r#type(&self) -> FeedType {
self.0.feed.meta().r#type()
}
/// Feed title
async fn title(&self) -> Option<&str> {
self.0.feed.meta().title()
}
/// Feed URL
async fn url(&self) -> &FeedUrl {
self.0.feed.meta().url()
}
/// The time at which the feed was last modified
async fn updated(&self) -> Option<scalar::Rfc3339Time> {
self.0.feed.meta().updated().map(Into::into)
}
/// Feed entries
#[allow(clippy::cast_sign_loss)]
async fn entries(
&'_ self,
#[graphql(default = 5)] first: Option<i32>,
) -> Connection<
usize,
Entry<'_>,
EmptyFields,
EmptyFields,
FeedEntryConnectionName,
FeedEntryEdgeName,
> {
#[allow(clippy::cast_sign_loss)]
let first = first.unwrap_or(5).max(0) as usize;
let meta = self.0.project(|feed| feed.meta().clone());
let entries = self
.0
.feed
.entries()
.map(move |entry| Entry::new(Cow::Owned(meta.clone()), entry.clone()))
.take(first)
.collect::<Vec<_>>();
let mut c = Connection::new(false, entries.len() > first);
c.edges.extend(
entries
.into_iter()
.enumerate()
.map(|(idx, entry)| Edge::new(idx, entry)),
);
c
}
/// Feed authors
async fn authors(&self) -> Connection<usize, String> {
let mut c = Connection::new(false, false);
c.edges.extend(
self.0
.feed
.meta()
.authors()
.enumerate()
.map(|(idx, author)| Edge::new(idx, author.to_owned())),
);
c
}
/// Description of feed
async fn description(&self) -> Option<&str> {
self.0.feed.meta().description()
}
async fn links(&self) -> Connection<usize, Link> {
let mut c = Connection::new(false, false);
c.edges.extend(
self.0
.feed
.meta()
.links()
.map(|link| Link::from(link.clone()))
.enumerate()
.map(|(idx, link)| Edge::new(idx, link)),
);
c
}
async fn website_url(&self) -> Option<&str> {
self.0.feed.meta().website_url()
}
async fn generator(&self) -> Option<&str> {
self.0.feed.meta().generator()
}
/// Requirement level for feed
async fn requirement(&self) -> Option<Requirement> {
self.0.requirement
}
/// Feed category
async fn category(&self) -> Option<&Category<'static>> {
self.0.category.as_ref()
}
}
pub struct FeedEntryConnectionName;
impl ConnectionNameType for FeedEntryConnectionName {
fn type_name<T: async_graphql::OutputType>() -> String {
"FeedEntryConnection".into()
}
}
pub struct FeedEntryEdgeName;
impl EdgeNameType for FeedEntryEdgeName {
fn type_name<T: async_graphql::OutputType>() -> String {
"FeedEntryEdge".into()
}
}
impl From<Annotated<Arc<types::Feed>>> for Feed {
fn from(value: Annotated<Arc<types::Feed>>) -> Self {
Self(value)
}
}
pub(super) struct FeedMeta<'a>(Cow<'a, Annotated<types::FeedMeta>>);
#[Object]
impl FeedMeta<'_> {
/// Title of the feed
async fn title(&self) -> Option<&str> {
self.0.feed.title()
}
/// Url of the feed
async fn url(&self) -> &FeedUrl {
self.0.feed.url()
}
/// Requirement Level for the feed
async fn requirement(&self) -> Option<Requirement> {
self.0.requirement
}
/// Category of the feed
async fn category(&self) -> Option<&Category<'static>> {
self.0.category.as_ref()
}
}
impl<'a> From<Cow<'a, Annotated<types::FeedMeta>>> for FeedMeta<'a> {
fn from(value: Cow<'a, Annotated<types::FeedMeta>>) -> Self {
Self(value)
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/usecase/fetch_subscribed_feeds.rs | crates/synd_api/src/usecase/fetch_subscribed_feeds.rs | use std::sync::Arc;
use synd_feed::{
feed::{cache::FetchCachedFeed, service::FetchFeedError},
types::{self, Annotated, FeedUrl},
};
use thiserror::Error;
use crate::{
principal::Principal,
repository::{SubscriptionRepository, types::SubscribedFeeds},
usecase::{Error, Input, MakeUsecase, Output, Usecase, authorize::Unauthorized},
};
pub struct FetchSubscribedFeeds {
pub repository: Arc<dyn SubscriptionRepository>,
pub fetch_feed: Arc<dyn FetchCachedFeed>,
}
pub struct FetchSubscribedFeedsInput {
pub after: Option<String>,
pub first: usize,
}
#[derive(Default)]
pub struct FetchSubscribedFeedsOutput {
#[allow(clippy::type_complexity)]
pub feeds: Vec<Result<Annotated<Arc<types::Feed>>, (FeedUrl, FetchFeedError)>>,
}
#[derive(Error, Debug)]
#[error("fetch subscribed feeds error")]
pub struct FetchSubscribedFeedsError {}
impl Usecase for FetchSubscribedFeeds {
type Input = FetchSubscribedFeedsInput;
type Output = FetchSubscribedFeedsOutput;
type Error = FetchSubscribedFeedsError;
fn new(make: &MakeUsecase) -> Self {
Self {
repository: make.subscription_repo.clone(),
fetch_feed: make.fetch_feed.clone(),
}
}
async fn authorize(
&self,
principal: Principal,
_: &Self::Input,
) -> Result<Principal, Unauthorized> {
Ok(principal)
}
async fn usecase(
&self,
Input {
principal,
input: FetchSubscribedFeedsInput { after, first },
}: Input<Self::Input>,
) -> Result<Output<Self::Output>, Error<Self::Error>> {
let user_id = principal.user_id().unwrap();
let SubscribedFeeds {
mut urls,
mut annotations,
} = self.repository.fetch_subscribed_feeds(user_id).await?;
// paginate
let urls = {
let start = after
.and_then(|after| {
urls.iter()
.position(|url| url.as_str() == after)
.map(|p| p + 1)
})
.unwrap_or(0);
if start >= urls.len() {
return Ok(Output {
output: FetchSubscribedFeedsOutput::default(),
});
}
let mut urls = urls.split_off(start);
urls.truncate(first);
urls
};
// fetch feeds
let fetched_feeds = self.fetch_feed.fetch_feeds_parallel(&urls).await;
// annotate fetched feeds
let feeds = fetched_feeds
.into_iter()
.zip(urls)
.map(|(result, url)| {
result
.map(|feed| {
match annotations
.as_mut()
.and_then(|annotations| annotations.remove(feed.meta().url()))
{
Some(annotations) => Annotated {
feed,
requirement: annotations.requirement,
category: annotations.category,
},
None => Annotated {
feed,
requirement: None,
category: None,
},
}
})
.map_err(|err| (url.clone(), err))
})
.collect::<Vec<_>>();
Ok(Output {
output: FetchSubscribedFeedsOutput { feeds },
})
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/usecase/authorize.rs | crates/synd_api/src/usecase/authorize.rs | use std::ops::Deref;
use crate::principal::Principal;
use super::Usecase;
pub struct Authorized<T> {
principal: T,
}
impl Authorized<Principal> {
fn new(principal: Principal) -> Self {
Self { principal }
}
}
impl<T> Deref for Authorized<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.principal
}
}
#[derive(Debug)]
pub struct Unauthorized;
pub struct Authorizer {}
impl Authorizer {
pub fn new() -> Self {
Self {}
}
pub async fn authorize<U: Usecase>(
&self,
principal: Principal,
usecase: &U,
input: &U::Input,
) -> Result<Authorized<Principal>, Unauthorized> {
usecase
.authorize(principal, input)
.await
.map(Authorized::new)
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/usecase/unsubscribe_feed.rs | crates/synd_api/src/usecase/unsubscribe_feed.rs | use std::sync::Arc;
use synd_feed::types::FeedUrl;
use synd_o11y::metric;
use crate::{
principal::Principal,
repository::{self, SubscriptionRepository},
usecase::{Input, Output},
};
use super::{Usecase, authorize::Unauthorized};
pub struct UnsubscribeFeed {
pub repository: Arc<dyn SubscriptionRepository>,
}
pub struct UnsubscribeFeedInput {
pub url: FeedUrl,
}
pub struct UnsubscribeFeedOutput {}
impl Usecase for UnsubscribeFeed {
type Input = UnsubscribeFeedInput;
type Output = UnsubscribeFeedOutput;
type Error = anyhow::Error;
fn new(make: &super::MakeUsecase) -> Self {
Self {
repository: make.subscription_repo.clone(),
}
}
async fn authorize(
&self,
principal: Principal,
_: &UnsubscribeFeedInput,
) -> Result<Principal, Unauthorized> {
Ok(principal)
}
async fn usecase(
&self,
Input {
principal,
input: UnsubscribeFeedInput { url },
..
}: Input<Self::Input>,
) -> Result<Output<Self::Output>, super::Error<Self::Error>> {
tracing::debug!("Unsubscribe feed: {url}");
self.repository
.delete_feed_subscription(repository::types::FeedSubscription {
user_id: principal.user_id().unwrap().to_owned(),
url,
requirement: None,
category: None,
})
.await?;
metric!(monotonic_counter.feed.unsubscription = 1);
Ok(Output {
output: UnsubscribeFeedOutput {},
})
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/usecase/fetch_entries.rs | crates/synd_api/src/usecase/fetch_entries.rs | use std::{cmp::Ordering, collections::HashMap, sync::Arc};
use futures_util::{StreamExt, stream::FuturesUnordered};
use synd_feed::{
feed::{cache::FetchCachedFeed, service::FetchFeedError},
types::{self, Annotated, Entry, EntryId, FeedMeta, FeedUrl},
};
use thiserror::Error;
use crate::{
principal::Principal,
repository::{
SubscriptionRepository,
types::{FeedAnnotations, SubscribedFeeds},
},
usecase::{Error, Input, MakeUsecase, Output, Usecase, authorize::Unauthorized},
};
pub struct FetchEntries {
pub repository: Arc<dyn SubscriptionRepository>,
pub fetch_feed: Arc<dyn FetchCachedFeed>,
}
pub struct FetchEntriesInput {
pub after: Option<EntryId<'static>>,
pub first: usize,
}
#[derive(Default)]
pub struct FetchEntriesOutput {
pub entries: Vec<(types::Entry, types::FeedUrl)>,
pub feeds: HashMap<types::FeedUrl, Annotated<types::FeedMeta>>,
}
#[derive(Error, Debug)]
#[error("fetch entries error")]
pub struct FetchEntriesError {}
impl Usecase for FetchEntries {
type Input = FetchEntriesInput;
type Output = FetchEntriesOutput;
type Error = FetchEntriesError;
fn new(make: &MakeUsecase) -> Self {
Self {
repository: make.subscription_repo.clone(),
fetch_feed: make.fetch_feed.clone(),
}
}
async fn authorize(
&self,
principal: Principal,
_: &Self::Input,
) -> Result<Principal, Unauthorized> {
Ok(principal)
}
#[tracing::instrument(name = "fetch_entries", skip(self, principal))]
async fn usecase(
&self,
Input {
principal,
input: FetchEntriesInput { after, first },
}: Input<Self::Input>,
) -> Result<Output<Self::Output>, Error<Self::Error>> {
let user_id = principal
.user_id()
.expect("user id not found. this is a bug");
let SubscribedFeeds { urls, annotations } =
self.repository.fetch_subscribed_feeds(user_id).await?;
let output = self
.operation(urls, annotations)
.fetch()
.await
.sort()
.paginate(first, after);
Ok(output)
}
}
impl FetchEntries {
fn operation(
&self,
urls: Vec<FeedUrl>,
annotations: Option<HashMap<FeedUrl, FeedAnnotations>>,
) -> FetchOperation {
let len = urls.len();
FetchOperation {
urls: Some(urls),
metas: HashMap::with_capacity(len),
entries: Vec::with_capacity(len * 5),
annotations,
fetch_feed: self.fetch_feed.clone(),
}
}
}
struct FetchOperation {
// urls to fetch. wrap `Option` for take ownership
urls: Option<Vec<FeedUrl>>,
// feed annotations got from repository
annotations: Option<HashMap<FeedUrl, FeedAnnotations>>,
// fetch service
fetch_feed: Arc<dyn FetchCachedFeed>,
// output
metas: HashMap<FeedUrl, Annotated<FeedMeta>>,
entries: Vec<(Entry, FeedUrl)>,
}
impl FetchOperation {
// fetch given urls respecting concurrency limit
async fn fetch(mut self) -> Self {
let mut tasks = FuturesUnordered::new();
let in_flight_limit = 10;
for url in self.urls.take().unwrap() {
if tasks.len() >= in_flight_limit
&& let Some(result) = tasks.next().await
{
self.handle(result);
}
let fetch_feed = Arc::clone(&self.fetch_feed);
tasks.push(async move { fetch_feed.fetch_feed(url).await });
}
while let Some(result) = tasks.next().await {
self.handle(result);
}
self
}
// handle fetch feed result
fn handle(&mut self, feed: Result<Arc<types::Feed>, FetchFeedError>) {
let feed = match feed {
Ok(feed) => feed,
Err(err) => {
tracing::warn!("Failed to fetch feed {err:?}");
return;
}
};
let meta = feed.meta().clone();
let feed_url = meta.url().to_owned();
let meta = match self
.annotations
.as_mut()
.and_then(|annotations| annotations.remove(&feed_url))
{
Some(feed_annotations) => Annotated {
feed: meta,
requirement: feed_annotations.requirement,
category: feed_annotations.category,
},
None => Annotated::new(meta),
};
self.metas.insert(feed_url.clone(), meta);
self.entries.extend(
feed.entries()
.cloned()
.map(|entry| (entry, feed_url.clone())),
);
}
// sort entries
fn sort(mut self) -> Self {
self.entries.sort_unstable_by(|(a, _), (b, _)| {
match (a.published().or(a.updated()), b.published().or(b.updated())) {
(Some(a), Some(b)) => b.cmp(&a),
(None, Some(_)) => Ordering::Greater,
(Some(_), None) => Ordering::Less,
(None, None) => std::cmp::Ordering::Equal,
}
});
self
}
// paginate entries and return output
fn paginate(
mut self,
first: usize,
after: Option<EntryId<'static>>,
) -> Output<FetchEntriesOutput> {
let start = after
.and_then(|after| {
self.entries
.iter()
.position(|(entry, _)| entry.id_ref() == after)
.map(|position| position + 1)
})
.unwrap_or(0);
if start >= self.entries.len() {
return Output {
output: FetchEntriesOutput::default(),
};
}
let mut entries = self.entries.split_off(start);
entries.truncate(first);
Output {
output: FetchEntriesOutput {
entries,
feeds: self.metas,
},
}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/usecase/subscribe_feed.rs | crates/synd_api/src/usecase/subscribe_feed.rs | use std::sync::Arc;
use synd_feed::{
feed::{cache::FetchCachedFeed, service::FetchFeedError},
types::{Annotated, Category, Feed, FeedUrl, Requirement},
};
use synd_o11y::metric;
use thiserror::Error;
use crate::{
principal::Principal,
repository::{self, SubscriptionRepository},
usecase::{Input, Output},
};
use super::{Usecase, authorize::Unauthorized};
pub struct SubscribeFeed {
pub repository: Arc<dyn SubscriptionRepository>,
pub fetch_feed: Arc<dyn FetchCachedFeed>,
}
pub struct SubscribeFeedInput {
pub url: FeedUrl,
pub requirement: Option<Requirement>,
pub category: Option<Category<'static>>,
}
pub struct SubscribeFeedOutput {
pub feed: Annotated<Arc<Feed>>,
}
#[derive(Error, Debug)]
pub enum SubscribeFeedError {
#[error("fetch feed error: {0}")]
FetchFeed(FetchFeedError),
}
impl Usecase for SubscribeFeed {
type Input = SubscribeFeedInput;
type Output = SubscribeFeedOutput;
type Error = SubscribeFeedError;
fn new(make: &super::MakeUsecase) -> Self {
Self {
repository: make.subscription_repo.clone(),
fetch_feed: make.fetch_feed.clone(),
}
}
async fn authorize(
&self,
principal: Principal,
_: &SubscribeFeedInput,
) -> Result<Principal, Unauthorized> {
Ok(principal)
}
async fn usecase(
&self,
Input {
principal,
input:
SubscribeFeedInput {
url,
requirement,
category,
},
..
}: Input<Self::Input>,
) -> Result<Output<Self::Output>, super::Error<Self::Error>> {
tracing::debug!("Subscribe feed: {url}");
let feed = self
.fetch_feed
.fetch_feed(url.clone())
.await
.map_err(|err| super::Error::Usecase(SubscribeFeedError::FetchFeed(err)))?;
tracing::debug!("{:?}", feed.meta());
self.repository
.put_feed_subscription(repository::types::FeedSubscription {
user_id: principal.user_id().unwrap().to_owned(),
url: feed.meta().url().to_owned(),
requirement,
category: category.clone(),
})
.await?;
metric!(monotonic_counter.feed.subscription = 1);
let feed = Annotated {
feed,
requirement,
category,
};
Ok(Output {
output: SubscribeFeedOutput { feed },
})
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/usecase/mod.rs | crates/synd_api/src/usecase/mod.rs | mod subscribe_feed;
pub use subscribe_feed::{
SubscribeFeed, SubscribeFeedError, SubscribeFeedInput, SubscribeFeedOutput,
};
mod unsubscribe_feed;
pub use unsubscribe_feed::{UnsubscribeFeed, UnsubscribeFeedInput, UnsubscribeFeedOutput};
mod fetch_subscribed_feeds;
pub use fetch_subscribed_feeds::{
FetchSubscribedFeeds, FetchSubscribedFeedsError, FetchSubscribedFeedsInput,
FetchSubscribedFeedsOutput,
};
mod fetch_entries;
pub use fetch_entries::{FetchEntries, FetchEntriesError, FetchEntriesInput, FetchEntriesOutput};
use tracing::error;
pub mod authorize;
use std::{future::Future, sync::Arc};
use synd_feed::feed::cache::FetchCachedFeed;
use synd_o11y::{audit, metric, tracing_subscriber::audit::Audit};
use crate::{
principal::Principal,
repository::{RepositoryError, SubscriptionRepository},
};
use self::authorize::{Authorized, Authorizer, Unauthorized};
pub struct MakeUsecase {
pub subscription_repo: Arc<dyn SubscriptionRepository>,
pub fetch_feed: Arc<dyn FetchCachedFeed>,
}
impl MakeUsecase {
pub fn make<T: Usecase + Send>(&self) -> T {
T::new(self)
}
}
pub struct Input<T> {
pub principal: Authorized<Principal>,
pub input: T,
}
pub struct Output<T> {
pub output: T,
}
#[derive(Debug, thiserror::Error)]
pub enum Error<T> {
#[error(transparent)]
Usecase(T),
#[error("unauthorized error")]
Unauthorized(Unauthorized),
#[error("repository error")]
Repository(#[from] RepositoryError),
}
pub trait Usecase {
type Input;
type Output;
type Error: std::fmt::Debug;
fn new(make: &MakeUsecase) -> Self;
fn audit_operation(&self) -> &'static str {
let name = std::any::type_name::<Self>();
// extract last element
name.split("::").last().unwrap_or("?")
}
/// Authorize given principal
fn authorize(
&self,
principal: Principal,
input: &Self::Input,
) -> impl Future<Output = Result<Principal, Unauthorized>>;
/// Usecase entrypoint
fn usecase(
&self,
input: Input<Self::Input>,
) -> impl Future<Output = Result<Output<Self::Output>, Error<Self::Error>>>;
}
pub struct Runtime {
make_usecase: MakeUsecase,
authorizer: Authorizer,
}
impl Runtime {
pub fn new(make: MakeUsecase, authorizer: Authorizer) -> Self {
Self {
make_usecase: make,
authorizer,
}
}
pub async fn run<Uc, Cx, In>(
&self,
cx: Cx,
input: In,
) -> Result<Output<Uc::Output>, Error<Uc::Error>>
where
Uc: Usecase + Sync + Send,
Cx: Context,
In: Into<Uc::Input>,
{
let principal = cx.principal();
let uc = self.make_usecase.make::<Uc>();
let input = input.into();
{
let user_id = principal.user_id().unwrap_or("?");
let operation = uc.audit_operation();
audit!(
{ Audit::USER_ID } = user_id,
{ Audit::OPERATION } = operation,
);
metric!(monotonic_counter.usecase = 1, operation);
}
let principal = match self.authorizer.authorize(principal, &uc, &input).await {
Ok(authorized_principal) => authorized_principal,
Err(unauthorized) => {
audit!({ Audit::RESULT } = "unauthorized");
return Err(Error::Unauthorized(unauthorized));
}
};
let input = Input { principal, input };
match uc.usecase(input).await {
Ok(output) => {
audit!({ Audit::RESULT } = "success");
Ok(output)
}
Err(err) => {
audit!({ Audit::RESULT } = "error");
error!("{err:?}");
Err(err)
}
}
}
}
pub trait Context {
fn principal(&self) -> Principal;
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/repository/kvsd.rs | crates/synd_api/src/repository/kvsd.rs | use std::{collections::HashMap, io::ErrorKind, time::Duration};
use anyhow::Context;
use async_trait::async_trait;
use futures_util::TryFutureExt;
use kvsd::{
Key, Value,
client::{Api, tcp::Client},
};
use thiserror::Error;
use tokio::sync::Mutex;
use tokio::{net::TcpStream, sync::MutexGuard};
use crate::repository::{
self, RepositoryError, SubscriptionRepository,
subscription::RepositoryResult,
types::{FeedAnnotations, SubscribedFeeds},
};
#[derive(Error, Debug)]
#[error("connect kvsd failed")]
pub struct ConnectKvsdFailed;
pub struct KvsdClient {
#[allow(dead_code)]
client: Mutex<Client<TcpStream>>,
}
impl KvsdClient {
pub fn new(client: Client<TcpStream>) -> Self {
Self {
client: Mutex::new(client),
}
}
pub async fn connect(
host: impl AsRef<str>,
port: u16,
username: String,
password: String,
timeout: Duration,
) -> anyhow::Result<Self> {
let handshake = async {
let mut retry = 0;
loop {
match kvsd::client::tcp::UnauthenticatedClient::insecure_from_addr(&host, port)
.and_then(|client| client.authenticate(&username, &password))
.await
.map(Self::new)
{
Ok(client) => break Ok(client),
Err(kvsd::KvsdError::Io(io)) if io.kind() == ErrorKind::ConnectionRefused => {
tracing::info!(retry, "Kvsd connection refused");
}
err => break err,
}
retry += 1;
tokio::time::sleep(Duration::from_millis(1000)).await;
}
};
tokio::time::timeout(timeout, handshake)
.await
.map_err(anyhow::Error::from)
.context(ConnectKvsdFailed)?
.map_err(anyhow::Error::from)
.inspect(|_| tracing::info!("Kvsd handshake successfully completed"))
}
async fn get<T>(
client: &mut MutexGuard<'_, Client<TcpStream>>,
key: Key,
) -> RepositoryResult<Option<T>>
where
T: TryFrom<Value>,
T::Error: Into<RepositoryError>,
{
let Some(value) = client.get(key).await.map_err(RepositoryError::internal)? else {
return Ok(None);
};
Ok(Some(value.try_into().map_err(Into::into)?))
}
async fn set<T>(
client: &mut MutexGuard<'_, Client<TcpStream>>,
key: Key,
value: T,
) -> RepositoryResult<()>
where
T: TryInto<Value>,
T::Error: Into<RepositoryError>,
{
let value = value.try_into().map_err(Into::into)?;
client.set(key, value).await?;
Ok(())
}
fn feed_subscription_key(user_id: &str) -> Key {
let key = format!(
"{prefix}/subscription/{user_id}",
prefix = Self::key_prefix()
);
Key::new(key).expect("Invalid key")
}
fn key_prefix() -> &'static str {
"/synd_api/v1"
}
}
#[async_trait]
impl SubscriptionRepository for KvsdClient {
#[tracing::instrument(name = "repo::put_feed_subscription", skip_all)]
async fn put_feed_subscription(
&self,
feed: repository::types::FeedSubscription,
) -> RepositoryResult<()> {
let key = Self::feed_subscription_key(&feed.user_id);
let mut client = self.client.lock().await;
let annotations = FeedAnnotations {
requirement: feed.requirement,
category: feed.category,
};
let feeds = if let Some(mut feeds) =
Self::get::<SubscribedFeeds>(&mut client, key.clone()).await?
{
// Create case
if !feeds.urls.contains(&feed.url) {
feeds.urls.insert(0, feed.url.clone());
}
if feeds.annotations.is_none() {
feeds.annotations = Some(HashMap::new());
}
feeds
.annotations
.as_mut()
.map(|m| m.insert(feed.url, annotations));
feeds
} else {
// for investigating data loss
tracing::warn!(
enduser.id = feed.user_id,
feed_url = %feed.url,
"SubscribedFeeds not found"
);
let mut metadata = HashMap::new();
metadata.insert(feed.url.clone(), annotations);
SubscribedFeeds {
urls: vec![feed.url.clone()],
annotations: Some(metadata),
}
};
Self::set(&mut client, key, feeds).await
}
#[tracing::instrument(name = "repo::delete_feed_subscription", skip_all)]
async fn delete_feed_subscription(
&self,
feed: repository::types::FeedSubscription,
) -> RepositoryResult<()> {
let key = Self::feed_subscription_key(&feed.user_id);
let mut client = self.client.lock().await;
let Some(mut feeds) = Self::get::<SubscribedFeeds>(&mut client, key.clone()).await? else {
return Ok(());
};
feeds.urls.retain(|url| url != &feed.url);
feeds.annotations.as_mut().map(|m| m.remove(&feed.url));
Self::set(&mut client, key, feeds).await
}
#[tracing::instrument(name = "repo::fetch_subscribed_feed_urls", skip_all)]
async fn fetch_subscribed_feeds(&self, user_id: &str) -> RepositoryResult<SubscribedFeeds> {
let key = Self::feed_subscription_key(user_id);
let mut client = self.client.lock().await;
let Some(feeds) = Self::get::<SubscribedFeeds>(&mut client, key).await? else {
return Ok(SubscribedFeeds::default());
};
Ok(feeds)
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/repository/subscription.rs | crates/synd_api/src/repository/subscription.rs | use async_trait::async_trait;
use crate::repository::{self, types::SubscribedFeeds};
use super::RepositoryError;
pub type RepositoryResult<T> = std::result::Result<T, RepositoryError>;
#[async_trait]
pub trait SubscriptionRepository: Send + Sync {
async fn put_feed_subscription(
&self,
feed: repository::types::FeedSubscription,
) -> RepositoryResult<()>;
async fn delete_feed_subscription(
&self,
feed: repository::types::FeedSubscription,
) -> RepositoryResult<()>;
async fn fetch_subscribed_feeds(&self, _user_id: &str) -> RepositoryResult<SubscribedFeeds>;
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/repository/types.rs | crates/synd_api/src/repository/types.rs | use std::collections::HashMap;
use kvsd::Value;
use serde::{Deserialize, Serialize};
use sqlx::prelude::FromRow;
use synd_feed::types::{Category, FeedUrl, Requirement};
use crate::repository::RepositoryError;
#[derive(Debug, Clone)]
pub struct Feed {
pub url: String,
pub title: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
pub struct FeedSubscription {
pub user_id: String,
pub url: FeedUrl,
pub requirement: Option<Requirement>,
pub category: Option<Category<'static>>,
}
#[derive(Serialize, Deserialize, Debug, Default)]
pub struct SubscribedFeeds {
pub urls: Vec<FeedUrl>,
pub annotations: Option<HashMap<FeedUrl, FeedAnnotations>>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct FeedAnnotations {
pub requirement: Option<Requirement>,
pub category: Option<Category<'static>>,
}
impl TryFrom<Value> for SubscribedFeeds {
type Error = RepositoryError;
fn try_from(value: Value) -> Result<Self, Self::Error> {
serde_json::from_slice(&value).map_err(RepositoryError::internal)
}
}
impl TryFrom<SubscribedFeeds> for Value {
type Error = RepositoryError;
fn try_from(value: SubscribedFeeds) -> Result<Self, Self::Error> {
let value = serde_json::to_vec(&value).map_err(RepositoryError::internal)?;
Ok(Value::new(value).unwrap())
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/repository/mod.rs | crates/synd_api/src/repository/mod.rs | mod subscription;
use ::kvsd::KvsdError;
pub use subscription::SubscriptionRepository;
pub mod kvsd;
pub mod sqlite;
pub mod types;
#[derive(thiserror::Error, Debug)]
pub enum RepositoryError {
#[error("internal error: {0}")]
Internal(#[from] anyhow::Error),
#[error(transparent)]
Migrate(#[from] sqlx::migrate::MigrateError),
}
impl RepositoryError {
pub fn internal(err: impl Into<anyhow::Error>) -> Self {
RepositoryError::Internal(err.into())
}
}
impl From<KvsdError> for RepositoryError {
fn from(value: KvsdError) -> Self {
RepositoryError::Internal(value.into())
}
}
impl From<sqlx::Error> for RepositoryError {
fn from(value: sqlx::Error) -> Self {
RepositoryError::internal(anyhow::Error::from(value))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn error() {
assert!(
!RepositoryError::internal(anyhow::anyhow!("error"))
.to_string()
.is_empty()
);
assert!(
!RepositoryError::from(KvsdError::Unauthenticated)
.to_string()
.is_empty()
);
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/repository/sqlite/mod.rs | crates/synd_api/src/repository/sqlite/mod.rs | use std::{collections::HashMap, path::Path};
use async_trait::async_trait;
use sqlx::{SqlitePool, sqlite::SqliteConnectOptions};
use tracing::info;
use crate::repository::{
self, RepositoryError, SubscriptionRepository,
subscription::RepositoryResult,
types::{FeedAnnotations, SubscribedFeeds},
};
pub struct DbPool {
pool: SqlitePool,
}
impl DbPool {
pub async fn connect(db_path: impl AsRef<Path>) -> Result<Self, RepositoryError> {
let opts = SqliteConnectOptions::new()
.filename(db_path)
.create_if_missing(true)
.foreign_keys(true);
Self::do_connect(opts).await
}
pub async fn migrate(&self) -> Result<(), RepositoryError> {
info!("Run migrations...");
sqlx::migrate!().run(&self.pool).await?;
Ok(())
}
async fn do_connect(opts: SqliteConnectOptions) -> Result<Self, RepositoryError> {
info!(?opts, "Connecting to sqlite...");
let pool = SqlitePool::connect_with(opts).await?;
// TODO: configure pool options
Ok(DbPool { pool })
}
#[cfg(test)]
pub async fn in_memory() -> Result<Self, RepositoryError> {
use sqlx::sqlite::SqlitePoolOptions;
let opts = SqliteConnectOptions::new()
.in_memory(true)
.foreign_keys(true);
let pool = SqlitePoolOptions::new()
.max_connections(1)
.connect_with(opts)
.await?;
Ok(DbPool { pool })
}
}
#[async_trait]
impl SubscriptionRepository for DbPool {
#[tracing::instrument(name = "repo::put_feed_subscription", skip_all)]
async fn put_feed_subscription(
&self,
feed: repository::types::FeedSubscription,
) -> RepositoryResult<()> {
let feed_url = feed.url.to_string();
let requirement = feed.requirement.map(|r| r.to_string());
let category = feed.category.map(|c| c.to_string());
sqlx::query!(
r#"
INSERT INTO subscribed_feed (user_id, url, requirement, category)
VALUES (?, ?, ?, ?)
ON CONFLICT(user_id, url) DO UPDATE SET
requirement = excluded.requirement,
category = excluded.category
"#,
feed.user_id,
feed_url,
requirement,
category,
)
.execute(&self.pool)
.await
.map_err(RepositoryError::internal)?;
Ok(())
}
async fn delete_feed_subscription(
&self,
feed: repository::types::FeedSubscription,
) -> RepositoryResult<()> {
let affected = sqlx::query!(
r#"
DELETE FROM subscribed_feed
WHERE user_id = ? AND url = ?
"#,
feed.user_id,
feed.url,
)
.execute(&self.pool)
.await
.map_err(RepositoryError::internal)?
.rows_affected();
if affected > 0 {
info!(url = %feed.url, "Delete subscribed feed");
}
Ok(())
}
async fn fetch_subscribed_feeds(&self, user_id: &str) -> RepositoryResult<SubscribedFeeds> {
use synd_feed::types::{Category, FeedUrl, Requirement};
let feeds = sqlx::query_as!(
repository::types::FeedSubscription,
r#"
SELECT
user_id,
url AS "url: FeedUrl",
requirement AS "requirement: Requirement",
category AS "category: Category"
FROM subscribed_feed
WHERE user_id = ?
ORDER BY created_at DESC
"#,
user_id
)
.fetch_all(&self.pool)
.await?
.into_iter()
.fold(
SubscribedFeeds {
urls: Vec::new(),
annotations: Some(HashMap::new()),
},
|mut feeds, feed| {
let annot = FeedAnnotations {
requirement: feed.requirement,
category: feed.category,
};
feeds
.annotations
.as_mut()
.unwrap()
.insert(feed.url.clone(), annot);
feeds.urls.push(feed.url);
feeds
},
);
tracing::info!("{feeds:?}");
Ok(feeds)
}
}
#[cfg(test)]
mod tests {
use synd_feed::types::{Category, Requirement};
use super::*;
async fn test_db() -> DbPool {
let db = DbPool::in_memory().await.unwrap();
db.migrate().await.unwrap();
db
}
#[tokio::test]
async fn feed_subscription() -> anyhow::Result<()> {
let db = test_db().await;
let user_id = String::from("test1");
let mut test_feeds = vec![
repository::types::FeedSubscription {
user_id: user_id.clone(),
url: "https://ymgyt.io/feed_1".try_into().unwrap(),
requirement: Some(Requirement::Must),
category: Some(Category::new("rust").unwrap()),
},
repository::types::FeedSubscription {
user_id: user_id.clone(),
url: "https://ymgyt.io/feed_2".try_into().unwrap(),
requirement: Some(Requirement::Should),
category: Some(Category::new("linux").unwrap()),
},
];
let feeds = db.fetch_subscribed_feeds(&user_id).await?;
assert!(feeds.urls.is_empty());
// create
{
for feed in &test_feeds {
db.put_feed_subscription(feed.clone()).await?;
}
let feeds = db.fetch_subscribed_feeds(&user_id).await?;
insta::assert_yaml_snapshot!("create", feeds, {
".annotations" => insta::sorted_redaction(),
});
}
// update
{
test_feeds[0].requirement = Some(Requirement::May);
test_feeds[0].category = Some(Category::new("foo").unwrap());
db.put_feed_subscription(test_feeds[0].clone()).await?;
let feeds = db.fetch_subscribed_feeds(&user_id).await?;
insta::assert_yaml_snapshot!("update", feeds, {
".annotations" => insta::sorted_redaction(),
});
}
// delete
{
db.delete_feed_subscription(test_feeds[0].clone()).await?;
let feeds = db.fetch_subscribed_feeds(&user_id).await?;
insta::assert_yaml_snapshot!("delete", feeds, {
".annotations" => insta::sorted_redaction(),
});
}
Ok(())
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/client/mod.rs | crates/synd_api/src/client/mod.rs | pub mod github;
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/client/github/client.rs | crates/synd_api/src/client/github/client.rs | use std::{fmt::Debug, time::Duration};
use graphql_client::{GraphQLQuery, Response};
use reqwest::header::{self, HeaderValue};
use serde::{Serialize, de::DeserializeOwned};
use crate::{client::github::query, config};
#[derive(Clone)]
pub struct GithubClient {
client: reqwest::Client,
endpoint: Option<&'static str>,
}
impl GithubClient {
const ENDPOINT: &'static str = "https://api.github.com/graphql";
/// Construct `GithubClient`.
pub fn new() -> anyhow::Result<Self> {
let client = reqwest::ClientBuilder::new()
.user_agent(config::USER_AGENT)
.timeout(Duration::from_secs(10))
.connect_timeout(Duration::from_secs(10))
.build()?;
Ok(Self {
client,
endpoint: None,
})
}
#[must_use]
pub fn with_endpoint(self, endpoint: &'static str) -> Self {
Self {
endpoint: Some(endpoint),
..self
}
}
#[tracing::instrument(name = "github::authenticate", skip_all)]
pub async fn authenticate(&self, access_token: &str) -> anyhow::Result<String> {
let variables = query::authenticate::Variables {};
let request = query::Authenticate::build_query(variables);
let response: query::authenticate::ResponseData =
self.request(access_token, &request).await?;
Ok(response.viewer.email)
}
async fn request<Body, ResponseData>(
&self,
access_token: &str,
body: &Body,
) -> anyhow::Result<ResponseData>
where
Body: Serialize + ?Sized,
ResponseData: DeserializeOwned + Debug,
{
let mut auth_header = HeaderValue::try_from(format!("bearer {access_token}"))?;
auth_header.set_sensitive(true);
let res: Response<ResponseData> = self
.client
.post(self.endpoint.unwrap_or(Self::ENDPOINT))
.header(header::AUTHORIZATION, auth_header)
.json(&body)
.send()
.await?
.error_for_status()?
.json()
.await?;
match (res.data, res.errors) {
(_, Some(errs)) if !errs.is_empty() => {
Err(anyhow::anyhow!("failed to request github api: {errs:?}"))
}
(Some(data), _) => Ok(data),
_ => Err(anyhow::anyhow!("unexpected response",)),
}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/client/github/mod.rs | crates/synd_api/src/client/github/mod.rs | mod client;
pub use client::GithubClient;
#[path = "generated/query.rs"]
pub mod query;
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/client/github/generated/query.rs | crates/synd_api/src/client/github/generated/query.rs | #![allow(clippy::all, warnings)]
pub struct Authenticate;
pub mod authenticate {
#![allow(dead_code)]
use std::result::Result;
pub const OPERATION_NAME: &str = "Authenticate";
pub const QUERY: &str = "query Authenticate {\n viewer {\n email,\n }\n}\n";
use super::*;
use serde::{Deserialize, Serialize};
#[allow(dead_code)]
type Boolean = bool;
#[allow(dead_code)]
type Float = f64;
#[allow(dead_code)]
type Int = i64;
#[allow(dead_code)]
type ID = String;
#[derive(Serialize, Debug)]
pub struct Variables;
#[derive(Deserialize, Debug)]
pub struct ResponseData {
pub viewer: AuthenticateViewer,
}
#[derive(Deserialize, Debug)]
pub struct AuthenticateViewer {
pub email: String,
}
}
impl graphql_client::GraphQLQuery for Authenticate {
type Variables = authenticate::Variables;
type ResponseData = authenticate::ResponseData;
fn build_query(variables: Self::Variables) -> ::graphql_client::QueryBody<Self::Variables> {
graphql_client::QueryBody {
variables,
query: authenticate::QUERY,
operation_name: authenticate::OPERATION_NAME,
}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/serve/auth.rs | crates/synd_api/src/serve/auth.rs | use std::time::Duration;
use futures_util::future::BoxFuture;
use moka::future::Cache;
use synd_auth::jwt::google::JwtService as GoogleJwtService;
use tracing::warn;
use crate::{
client::github::GithubClient,
principal::{Principal, User},
serve::layer::authenticate::Authenticate,
};
#[derive(Clone)]
pub struct Authenticator {
github: GithubClient,
google: GoogleJwtService,
cache: Cache<String, Principal>,
}
impl Authenticator {
pub fn new() -> anyhow::Result<Self> {
let cache = Cache::builder()
.max_capacity(1024 * 1024)
.time_to_live(Duration::from_secs(60 * 60))
.build();
Ok(Self {
github: GithubClient::new()?,
google: GoogleJwtService::default(),
cache,
})
}
#[must_use]
pub fn with_github_client(self, github: GithubClient) -> Self {
Self { github, ..self }
}
#[must_use]
pub fn with_google_jwt(self, google: GoogleJwtService) -> Self {
Self { google, ..self }
}
/// Authenticate from given token
#[tracing::instrument(skip_all)]
pub async fn authenticate<S>(&self, token: S) -> Result<Principal, ()>
where
S: AsRef<str>,
{
let token = token.as_ref();
let mut split = token.splitn(2, ' ');
match (split.next(), split.next()) {
(Some("github"), Some(access_token)) => {
if let Some(principal) = self.cache.get(token).await {
tracing::debug!("Principal cache hit");
return Ok(principal);
}
match self.github.authenticate(access_token).await {
Ok(email) => {
let principal = Principal::User(User::from_email(email));
self.cache.insert(token.to_owned(), principal.clone()).await;
Ok(principal)
}
Err(err) => {
warn!("Failed to authenticate github: {err}");
Err(())
}
}
}
(Some("google"), Some(id_token)) => {
if let Some(principal) = self.cache.get(id_token).await {
tracing::debug!("Principal cache hit");
return Ok(principal);
}
match self.google.decode_id_token(id_token).await {
Ok(claims) => {
if !claims.email_verified {
warn!("Google jwt claims email is not verified");
return Err(());
}
let principal = Principal::User(User::from_email(claims.email));
self.cache
.insert(id_token.to_owned(), principal.clone())
.await;
Ok(principal)
}
Err(err) => {
// If a lot of intentional invalid id tokens are sent
// google's api limit will be exceeded.
// To prevent this, it is necessary to cache the currently valid kids
// and discard jwt headers with other kids.
warn!("Failed to authenticate google: {err}");
Err(())
}
}
}
_ => Err(()),
}
}
}
impl Authenticate for Authenticator {
type Output = BoxFuture<'static, Result<Principal, ()>>;
fn authenticate(&self, token: Option<String>) -> Self::Output {
let this = self.clone();
Box::pin(async move {
match token {
Some(token) => Authenticator::authenticate(&this, token).await,
None => Err(()),
}
})
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/serve/mod.rs | crates/synd_api/src/serve/mod.rs | use std::{net::IpAddr, time::Duration};
use axum::{
BoxError, Extension, Router,
error_handling::HandleErrorLayer,
http::{StatusCode, header::AUTHORIZATION},
response::IntoResponse,
routing::{get, post},
};
use tokio::net::TcpListener;
use tokio_metrics::TaskMonitor;
use tower::{ServiceBuilder, limit::ConcurrencyLimitLayer, timeout::TimeoutLayer};
use tower_http::{
cors::CorsLayer, limit::RequestBodyLimitLayer, sensitive_headers::SetSensitiveHeadersLayer,
};
use tracing::info;
use crate::{
config,
dependency::Dependency,
gql::{self, SyndSchema},
serve::layer::{authenticate, request_metrics::RequestMetricsLayer, trace},
shutdown::Shutdown,
};
pub mod auth;
mod probe;
pub mod layer;
pub struct BindOptions {
pub port: u16,
pub addr: IpAddr,
}
pub struct ServeOptions {
pub timeout: Duration,
pub body_limit_bytes: usize,
pub concurrency_limit: usize,
}
#[derive(Clone)]
pub(crate) struct Context {
pub gql_monitor: TaskMonitor,
pub schema: SyndSchema,
}
/// Bind tcp listener and serve.
pub async fn listen_and_serve(
dep: Dependency,
bind: BindOptions,
shutdown: Shutdown,
) -> anyhow::Result<()> {
info!(addr = %bind.addr, port = bind.port, "Listening...");
let listener = TcpListener::bind((bind.addr, bind.port)).await?;
serve(listener, dep, shutdown).await
}
/// Start api server
pub async fn serve(
listener: TcpListener,
dep: Dependency,
shutdown: Shutdown,
) -> anyhow::Result<()> {
let Dependency {
authenticator,
runtime,
tls_config,
serve_options:
ServeOptions {
timeout: request_timeout,
body_limit_bytes: request_body_limit_bytes,
concurrency_limit,
},
monitors,
} = dep;
let cx = Context {
gql_monitor: monitors.graphql_task_monitor(),
schema: gql::schema_builder().data(runtime).finish(),
};
tokio::spawn(monitors.emit_metrics(
config::metrics::MONITOR_INTERVAL,
shutdown.cancellation_token(),
));
let service = Router::new()
.route("/graphql", post(gql::handler::graphql))
.layer(Extension(cx))
.layer(authenticate::AuthenticateLayer::new(authenticator))
.route("/graphql", get(gql::handler::graphiql))
.layer(
ServiceBuilder::new()
.layer(SetSensitiveHeadersLayer::new(std::iter::once(
AUTHORIZATION,
)))
.layer(trace::layer())
.layer(HandleErrorLayer::new(handle_middleware_error))
.layer(TimeoutLayer::new(request_timeout))
.layer(ConcurrencyLimitLayer::new(concurrency_limit))
.layer(RequestBodyLimitLayer::new(request_body_limit_bytes))
.layer(CorsLayer::new()),
)
.route(config::serve::HEALTH_CHECK_PATH, get(probe::healthcheck))
.layer(RequestMetricsLayer::new())
.fallback(not_found);
tracing::info!("Serving...");
axum_server::from_tcp_rustls(listener.into_std()?, tls_config)?
.handle(shutdown.into_handle())
.serve(service.into_make_service())
.await?;
tracing::info!("Shutdown complete");
Ok(())
}
async fn handle_middleware_error(err: BoxError) -> (StatusCode, String) {
if err.is::<tower::timeout::error::Elapsed>() {
(
StatusCode::REQUEST_TIMEOUT,
"Request took too long".to_string(),
)
} else {
(
StatusCode::INTERNAL_SERVER_ERROR,
format!("Unhandled internal error: {err}"),
)
}
}
async fn not_found() -> impl IntoResponse {
StatusCode::NOT_FOUND
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn error_mapping() {
assert_eq!(
handle_middleware_error(Box::new(tower::timeout::error::Elapsed::new()))
.await
.0,
StatusCode::REQUEST_TIMEOUT
);
assert_eq!(
handle_middleware_error(Box::new(std::io::Error::from(
std::io::ErrorKind::OutOfMemory
)))
.await
.0,
StatusCode::INTERNAL_SERVER_ERROR,
);
assert_eq!(
not_found().await.into_response().status(),
StatusCode::NOT_FOUND
);
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/serve/probe.rs | crates/synd_api/src/serve/probe.rs | use axum::{
Json,
http::{StatusCode, header},
response::IntoResponse,
};
use synd_o11y::health_check::Health;
use crate::config;
pub async fn healthcheck() -> impl IntoResponse {
(
StatusCode::OK,
[(header::CONTENT_TYPE, Health::CONTENT_TYPE)],
Json(
Health::pass()
.with_version(config::app::VERSION)
.with_description("health of synd-api"),
),
)
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/serve/layer/trace.rs | crates/synd_api/src/serve/layer/trace.rs | use tower_http::trace::HttpMakeClassifier;
use tracing::Level;
#[derive(Clone)]
pub struct MakeSpan;
impl<B> tower_http::trace::MakeSpan<B> for MakeSpan {
fn make_span(&mut self, request: &axum::http::Request<B>) -> tracing::Span {
use synd_o11y::opentelemetry::extension::*;
let cx = synd_o11y::opentelemetry::http::extract(request.headers());
let request_id = cx
.baggage()
.get(synd_o11y::REQUEST_ID_KEY)
.map_or("?", |v| v.as_str());
let span = tracing::span!(
Level::INFO,
"http",
method = %request.method(),
uri = %request.uri(),
%request_id,
);
span.set_parent(cx);
span
}
}
#[derive(Clone)]
pub struct OnRequest;
impl<B> tower_http::trace::OnRequest<B> for OnRequest {
fn on_request(&mut self, _request: &axum::http::Request<B>, _span: &tracing::Span) {
// do nothing
}
}
pub fn layer() -> tower_http::trace::TraceLayer<
HttpMakeClassifier,
MakeSpan,
OnRequest,
tower_http::trace::DefaultOnResponse,
> {
tower_http::trace::TraceLayer::new_for_http()
.make_span_with(MakeSpan)
.on_request(OnRequest)
.on_response(tower_http::trace::DefaultOnResponse::default().level(Level::INFO))
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/serve/layer/mod.rs | crates/synd_api/src/serve/layer/mod.rs | //! Module for tower layer
pub(crate) mod authenticate;
pub(crate) mod request_metrics;
pub(crate) mod trace;
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/serve/layer/request_metrics.rs | crates/synd_api/src/serve/layer/request_metrics.rs | use std::{
convert::Infallible,
pin::Pin,
task::{Context, Poll},
time::Instant,
};
use axum::{extract::Request, http::Method, response::Response};
use futures_util::Future;
use synd_o11y::metric;
use tower::{Layer, Service};
use crate::config;
#[derive(Clone)]
pub struct RequestMetricsLayer {}
impl RequestMetricsLayer {
pub fn new() -> Self {
Self {}
}
}
impl<S> Layer<S> for RequestMetricsLayer {
type Service = RequestMetricsService<S>;
fn layer(&self, inner: S) -> Self::Service {
RequestMetricsService { inner }
}
}
#[derive(Clone)]
pub struct RequestMetricsService<S> {
inner: S,
}
impl<S> Service<Request> for RequestMetricsService<S>
where
S: Service<Request, Response = Response, Error = Infallible> + Clone + Send + 'static,
S::Future: Send,
{
type Response = Response;
type Error = Infallible;
type Future =
Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send + 'static>>;
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.inner.poll_ready(cx)
}
fn call(&mut self, req: Request) -> Self::Future {
let start = Instant::now();
let path = req.uri().path().to_owned();
let method = req.method().clone();
let mut this = self.clone();
Box::pin(async move {
let response = this.inner.call(req).await.unwrap();
let status = response.status().as_u16();
// Ignore health check
// Metrics related to health checks is ignored as the are collected
// by the service performing the health check
if path != config::serve::HEALTH_CHECK_PATH {
// https://opentelemetry.io/docs/specs/semconv/http/http-metrics/
// Considiering the case of not found(404), recording the path as
// an attribute leads to an inability to control cardinality.
// Therefore, the path is not recorded.
metric!(
monotonic_counter.http.server.request = 1,
http.response.status.code = status
);
}
// instrument graphql latency
if path == "/graphql" && method == Method::POST {
// f64 is matter
// The type of the field that MetricsVisitor visits when on_event() is called is pre defined.
// If u128 which is returned from elapsed() is used, it will not be visited, resulting in no metrics recorded.
// Spec say "When instruments are measuring durations, seconds SHOULD be used"
// https://opentelemetry.io/docs/specs/semconv/general/metrics/#instrument-units
let elapsed: f64 = start.elapsed().as_secs_f64();
// is there any semantic conventions?
metric!(histogram.graphql.duration = elapsed);
}
Ok(response)
})
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/src/serve/layer/authenticate/mod.rs | crates/synd_api/src/serve/layer/authenticate/mod.rs | use std::{
convert::Infallible,
future::Future,
pin::Pin,
task::{Context, Poll},
};
use axum::{
extract::Request,
http::StatusCode,
response::{IntoResponse, Response},
};
use pin_project::pin_project;
use tower::{Layer, Service};
use crate::principal::Principal;
pub trait Authenticate {
// how to implementor fill this associate type
// need impl trait in associate type ?
// https://github.com/rust-lang/rust/issues/63063
type Output: Future<Output = Result<Principal, ()>>;
fn authenticate(&self, token: Option<String>) -> Self::Output;
}
#[expect(clippy::large_enum_variant)]
#[pin_project(project = AuthFutureProj)]
pub enum AuthenticateFuture<AuthFut, S, F> {
Authenticate {
req: Option<Request>,
#[pin]
auth_fut: AuthFut,
inner: S,
},
ServiceCall {
#[pin]
service_fut: F,
},
}
impl<AuthFut, S, F> AuthenticateFuture<AuthFut, S, F> {
fn new(req: Request, auth_fut: AuthFut, inner: S) -> Self {
AuthenticateFuture::Authenticate {
req: Some(req),
auth_fut,
inner,
}
}
}
impl<AuthFut, S> Future for AuthenticateFuture<AuthFut, S, S::Future>
where
AuthFut: Future<Output = Result<Principal, ()>>,
S: Service<Request, Response = Response, Error = Infallible>,
{
type Output = Result<Response, Infallible>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
match self.as_mut().project() {
AuthFutureProj::Authenticate {
req,
auth_fut,
inner,
} => match auth_fut.poll(cx) {
Poll::Ready(Ok(principal)) => {
let mut req = req.take().unwrap();
req.extensions_mut().insert(principal);
let service_fut = inner.call(req);
self.set(AuthenticateFuture::ServiceCall { service_fut });
self.poll(cx)
}
Poll::Ready(Err(())) => Poll::Ready(Ok(StatusCode::UNAUTHORIZED.into_response())),
Poll::Pending => Poll::Pending,
},
AuthFutureProj::ServiceCall { service_fut } => service_fut.poll(cx),
}
}
}
#[derive(Clone)]
pub struct AuthenticateLayer<A> {
authenticator: A,
}
impl<A> AuthenticateLayer<A> {
pub fn new(authenticator: A) -> Self {
Self { authenticator }
}
}
impl<S, A> Layer<S> for AuthenticateLayer<A>
where
A: Authenticate + Clone,
{
type Service = AuthenticateService<S, A>;
fn layer(&self, inner: S) -> Self::Service {
AuthenticateService {
inner,
authenticator: self.authenticator.clone(),
}
}
}
#[derive(Clone)]
pub struct AuthenticateService<S, A> {
inner: S,
authenticator: A,
}
impl<S, A> Service<Request> for AuthenticateService<S, A>
where
S: Service<Request, Response = Response, Error = Infallible> + Clone,
A: Authenticate,
{
type Response = Response;
type Error = Infallible;
type Future = AuthenticateFuture<A::Output, S, S::Future>;
fn poll_ready(
&mut self,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Result<(), Self::Error>> {
self.inner.poll_ready(cx)
}
fn call(&mut self, req: Request) -> Self::Future {
let token = req
.headers()
.get(axum::http::header::AUTHORIZATION)
.and_then(|header| header.to_str().ok())
.map(ToOwned::to_owned);
let auth_fut = self.authenticator.authenticate(token);
let inner = self.inner.clone();
AuthenticateFuture::new(req, auth_fut, inner)
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_api/tests/integration.rs | crates/synd_api/tests/integration.rs | #[cfg(feature = "integration")]
mod test {
#[tokio::test(flavor = "multi_thread")]
async fn api_command_test() -> anyhow::Result<()> {
let _kvsd_client = synd_test::kvsd::run_kvsd(
"localhost".into(),
45000,
"test".into(),
"test".into(),
synd_test::temp_dir().keep(),
)
.await?;
#[expect(deprecated)]
let mut cmd = assert_cmd::Command::cargo_bin("synd-api").unwrap();
cmd.args([
"--addr",
"127.0.0.1",
"--port",
&format!("{}", 45001),
"--sqlite-db",
&format!("{}", synd_test::temp_dir().keep().join("synd.db").display(),),
"--tls-cert",
synd_test::certificate().to_str().unwrap(),
"--tls-key",
synd_test::private_key().to_str().unwrap(),
"--otlp-endpoint",
"http://localhost:43177",
"--dry-run",
])
.assert()
.success();
Ok(())
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/lib.rs | crates/synd_term/src/lib.rs | #![allow(clippy::new_without_default)]
#![warn(rustdoc::broken_intra_doc_links)]
pub mod application;
pub mod auth;
pub mod cli;
pub mod client;
pub(crate) mod command;
pub mod config;
pub mod filesystem;
pub mod interact;
pub mod job;
pub mod keymap;
pub mod matcher;
pub mod terminal;
pub mod types;
pub mod ui;
#[cfg(feature = "integration")]
pub mod integration;
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/command.rs | crates/synd_term/src/command.rs | use std::{fmt::Display, sync::Arc};
use synd_auth::device_flow::DeviceAuthorizationResponse;
use synd_feed::types::{Category, FeedUrl};
use crate::{
application::{Direction, Populate, RequestSequence},
auth::{AuthenticationProvider, Credential, Verified},
client::{
github::{FetchNotificationsParams, GithubError},
synd_api::{
SyndApiError, mutation::subscribe_feed::SubscribeFeedInput, payload,
query::subscription::SubscriptionOutput,
},
},
types::{
Feed,
github::{
IssueContext, IssueOrPullRequest, Notification, NotificationId, PullRequestContext,
PullRequestState, Reason,
},
},
ui::components::{filter::FilterLane, gh_notifications::GhNotificationFilterUpdater},
};
#[derive(Debug, Clone)]
pub(crate) enum ApiResponse {
DeviceFlowAuthorization {
provider: AuthenticationProvider,
device_authorization: DeviceAuthorizationResponse,
},
DeviceFlowCredential {
credential: Verified<Credential>,
},
SubscribeFeed {
feed: Box<Feed>,
},
UnsubscribeFeed {
url: FeedUrl,
},
FetchSubscription {
populate: Populate,
subscription: SubscriptionOutput,
},
FetchEntries {
populate: Populate,
payload: payload::FetchEntriesPayload,
},
FetchGithubNotifications {
populate: Populate,
notifications: Vec<Notification>,
},
FetchGithubIssue {
notification_id: NotificationId,
issue: IssueContext,
},
FetchGithubPullRequest {
notification_id: NotificationId,
pull_request: PullRequestContext,
},
MarkGithubNotificationAsDone {
notification_id: NotificationId,
},
UnsubscribeGithubThread {},
}
impl Display for ApiResponse {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ApiResponse::DeviceFlowCredential { .. } => f.write_str("DeviceFlowCredential"),
ApiResponse::FetchSubscription { .. } => f.write_str("FetchSubscription"),
ApiResponse::FetchEntries { .. } => f.write_str("FetchEntries"),
ApiResponse::FetchGithubNotifications { .. } => f.write_str("FetchGithubNotifications"),
ApiResponse::FetchGithubIssue { .. } => f.write_str("FetchGithubIssue"),
ApiResponse::FetchGithubPullRequest { .. } => f.write_str("FetchGithubPullRequest"),
cmd => write!(f, "{cmd:?}"),
}
}
}
#[expect(clippy::large_enum_variant)]
#[derive(Debug, Clone)]
pub(crate) enum Command {
Nop,
Quit,
ResizeTerminal {
_columns: u16,
_rows: u16,
},
RenderThrobber,
Idle,
Authenticate,
MoveAuthenticationProvider(Direction),
HandleApiResponse {
request_seq: RequestSequence,
response: ApiResponse,
},
RefreshCredential {
credential: Verified<Credential>,
},
MoveTabSelection(Direction),
// Subscription
MoveSubscribedFeed(Direction),
MoveSubscribedFeedFirst,
MoveSubscribedFeedLast,
PromptFeedSubscription,
PromptFeedEdition,
PromptFeedUnsubscription,
MoveFeedUnsubscriptionPopupSelection(Direction),
SelectFeedUnsubscriptionPopup,
CancelFeedUnsubscriptionPopup,
SubscribeFeed {
input: SubscribeFeedInput,
},
FetchSubscription {
after: Option<String>,
first: i64,
},
ReloadSubscription,
OpenFeed,
// Entries
FetchEntries {
after: Option<String>,
first: i64,
},
ReloadEntries,
MoveEntry(Direction),
MoveEntryFirst,
MoveEntryLast,
OpenEntry,
BrowseEntry,
// Filter
MoveFilterRequirement(Direction),
ActivateCategoryFilterling,
ActivateSearchFiltering,
PromptChanged,
DeactivateFiltering,
ToggleFilterCategory {
lane: FilterLane,
category: Category<'static>,
},
ActivateAllFilterCategories {
lane: FilterLane,
},
DeactivateAllFilterCategories {
lane: FilterLane,
},
// Theme
RotateTheme,
// Latest release check
InformLatestRelease(update_informer::Version),
// Github notifications
FetchGhNotifications {
populate: Populate,
params: FetchNotificationsParams,
},
FetchGhNotificationDetails {
contexts: Vec<IssueOrPullRequest>,
},
MoveGhNotification(Direction),
MoveGhNotificationFirst,
MoveGhNotificationLast,
OpenGhNotification {
with_mark_as_done: bool,
},
ReloadGhNotifications,
MarkGhNotificationAsDone {
all: bool,
},
UnsubscribeGhThread,
OpenGhNotificationFilterPopup,
CloseGhNotificationFilterPopup,
UpdateGhnotificationFilterPopupOptions(GhNotificationFilterUpdater),
// Error
HandleError {
message: String,
},
HandleApiError {
// use Arc for impl Clone
error: Arc<SyndApiError>,
request_seq: RequestSequence,
},
HandleOauthApiError {
// use Arc for impl Clone
error: Arc<anyhow::Error>,
request_seq: RequestSequence,
},
HandleGithubApiError {
// use Arc for impl Clone
error: Arc<GithubError>,
request_seq: RequestSequence,
},
}
impl Display for Command {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Command::HandleApiResponse { response, .. } => response.fmt(f),
Command::FetchGhNotificationDetails { .. } => f
.debug_struct("FetchGhNotificationDetails")
.finish_non_exhaustive(),
_ => write!(f, "{self:?}"),
}
}
}
impl Command {
pub(crate) fn api_error(error: SyndApiError, request_seq: RequestSequence) -> Self {
Command::HandleApiError {
error: Arc::new(error),
request_seq,
}
}
pub(crate) fn oauth_api_error(error: anyhow::Error, request_seq: RequestSequence) -> Self {
Command::HandleOauthApiError {
error: Arc::new(error),
request_seq,
}
}
}
impl Command {
pub fn quit() -> Self {
Command::Quit
}
pub fn authenticate() -> Self {
Command::Authenticate
}
pub fn move_right_tab_selection() -> Self {
Command::MoveTabSelection(Direction::Right)
}
pub fn move_left_tab_selection() -> Self {
Command::MoveTabSelection(Direction::Left)
}
pub fn move_up_authentication_provider() -> Self {
Command::MoveAuthenticationProvider(Direction::Up)
}
pub fn move_down_authentication_provider() -> Self {
Command::MoveAuthenticationProvider(Direction::Down)
}
pub fn move_up_entry() -> Self {
Command::MoveEntry(Direction::Up)
}
pub fn move_down_entry() -> Self {
Command::MoveEntry(Direction::Down)
}
pub fn reload_entries() -> Self {
Command::ReloadEntries
}
pub fn open_entry() -> Self {
Command::OpenEntry
}
pub fn browse_entry() -> Self {
Command::BrowseEntry
}
pub fn move_entry_first() -> Self {
Command::MoveEntryFirst
}
pub fn move_entry_last() -> Self {
Command::MoveEntryLast
}
pub fn prompt_feed_subscription() -> Self {
Command::PromptFeedSubscription
}
pub fn prompt_feed_edition() -> Self {
Command::PromptFeedEdition
}
pub fn prompt_feed_unsubscription() -> Self {
Command::PromptFeedUnsubscription
}
pub fn move_feed_unsubscription_popup_selection_left() -> Self {
Command::MoveFeedUnsubscriptionPopupSelection(Direction::Left)
}
pub fn move_feed_unsubscription_popup_selection_right() -> Self {
Command::MoveFeedUnsubscriptionPopupSelection(Direction::Right)
}
pub fn select_feed_unsubscription_popup() -> Self {
Command::SelectFeedUnsubscriptionPopup
}
pub fn cancel_feed_unsubscription_popup() -> Self {
Command::CancelFeedUnsubscriptionPopup
}
pub fn move_up_subscribed_feed() -> Self {
Command::MoveSubscribedFeed(Direction::Up)
}
pub fn move_down_subscribed_feed() -> Self {
Command::MoveSubscribedFeed(Direction::Down)
}
pub fn reload_subscription() -> Self {
Command::ReloadSubscription
}
pub fn open_feed() -> Self {
Command::OpenFeed
}
pub fn move_subscribed_feed_first() -> Self {
Command::MoveSubscribedFeedFirst
}
pub fn move_subscribed_feed_last() -> Self {
Command::MoveSubscribedFeedLast
}
pub fn move_filter_requirement_left() -> Self {
Command::MoveFilterRequirement(Direction::Left)
}
pub fn move_filter_requirement_right() -> Self {
Command::MoveFilterRequirement(Direction::Right)
}
pub fn activate_category_filtering() -> Self {
Command::ActivateCategoryFilterling
}
pub fn activate_search_filtering() -> Self {
Command::ActivateSearchFiltering
}
pub fn deactivate_filtering() -> Self {
Command::DeactivateFiltering
}
pub fn rotate_theme() -> Self {
Command::RotateTheme
}
pub fn move_up_gh_notification() -> Self {
Command::MoveGhNotification(Direction::Up)
}
pub fn move_down_gh_notification() -> Self {
Command::MoveGhNotification(Direction::Down)
}
pub fn move_gh_notification_first() -> Self {
Command::MoveGhNotificationFirst
}
pub fn move_gh_notification_last() -> Self {
Command::MoveGhNotificationLast
}
pub fn open_gh_notification() -> Self {
Command::OpenGhNotification {
with_mark_as_done: false,
}
}
pub fn open_gh_notification_with_done() -> Self {
Command::OpenGhNotification {
with_mark_as_done: true,
}
}
pub fn reload_gh_notifications() -> Self {
Command::ReloadGhNotifications
}
pub fn mark_gh_notification_as_done() -> Self {
Command::MarkGhNotificationAsDone { all: false }
}
pub fn mark_gh_notification_as_done_all() -> Self {
Command::MarkGhNotificationAsDone { all: true }
}
pub fn unsubscribe_gh_thread() -> Self {
Command::UnsubscribeGhThread
}
pub fn open_gh_notification_filter_popup() -> Self {
Command::OpenGhNotificationFilterPopup
}
pub fn close_gh_notification_filter_popup() -> Self {
Command::CloseGhNotificationFilterPopup
}
pub fn toggle_gh_notification_filter_popup_include_unread() -> Self {
Command::UpdateGhnotificationFilterPopupOptions(GhNotificationFilterUpdater {
toggle_include: true,
..Default::default()
})
}
pub fn toggle_gh_notification_filter_popup_participating() -> Self {
Command::UpdateGhnotificationFilterPopupOptions(GhNotificationFilterUpdater {
toggle_participating: true,
..Default::default()
})
}
pub fn toggle_gh_notification_filter_popup_visibility_public() -> Self {
Command::UpdateGhnotificationFilterPopupOptions(GhNotificationFilterUpdater {
toggle_visilibty_public: true,
..Default::default()
})
}
pub fn toggle_gh_notification_filter_popup_visibility_private() -> Self {
Command::UpdateGhnotificationFilterPopupOptions(GhNotificationFilterUpdater {
toggle_visilibty_private: true,
..Default::default()
})
}
pub fn toggle_gh_notification_filter_popup_pr_open() -> Self {
Command::UpdateGhnotificationFilterPopupOptions(GhNotificationFilterUpdater {
toggle_pull_request_condition: Some(PullRequestState::Open),
..Default::default()
})
}
pub fn toggle_gh_notification_filter_popup_pr_closed() -> Self {
Command::UpdateGhnotificationFilterPopupOptions(GhNotificationFilterUpdater {
toggle_pull_request_condition: Some(PullRequestState::Closed),
..Default::default()
})
}
pub fn toggle_gh_notification_filter_popup_pr_merged() -> Self {
Command::UpdateGhnotificationFilterPopupOptions(GhNotificationFilterUpdater {
toggle_pull_request_condition: Some(PullRequestState::Merged),
..Default::default()
})
}
pub fn toggle_gh_notification_filter_popup_reason_mentioned() -> Self {
Command::UpdateGhnotificationFilterPopupOptions(GhNotificationFilterUpdater {
toggle_reason: Some(Reason::Mention),
..Default::default()
})
}
pub fn toggle_gh_notification_filter_popup_reason_review() -> Self {
Command::UpdateGhnotificationFilterPopupOptions(GhNotificationFilterUpdater {
toggle_reason: Some(Reason::ReviewRequested),
..Default::default()
})
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/filesystem.rs | crates/synd_term/src/filesystem.rs | #[cfg(test)]
pub(crate) mod mock {
use std::{
collections::HashMap,
fs::File,
io,
path::{Path, PathBuf},
};
use synd_stdx::fs::FileSystem;
#[derive(Default, Clone)]
pub(crate) struct MockFileSystem {
remove_errors: HashMap<PathBuf, io::ErrorKind>,
}
impl MockFileSystem {
pub(crate) fn with_remove_errors(
mut self,
path: impl Into<PathBuf>,
err: io::ErrorKind,
) -> Self {
self.remove_errors.insert(path.into(), err);
self
}
}
impl FileSystem for MockFileSystem {
fn create_dir_all<P: AsRef<Path>>(&self, _path: P) -> io::Result<()> {
unimplemented!()
}
fn create_file<P: AsRef<Path>>(&self, _path: P) -> io::Result<File> {
unimplemented!()
}
fn open_file<P: AsRef<Path>>(&self, _path: P) -> io::Result<File> {
unimplemented!()
}
fn remove_file<P: AsRef<Path>>(&self, path: P) -> io::Result<()> {
let path = path.as_ref();
match self.remove_errors.get(path) {
Some(err) => Err(io::Error::from(*err)),
None => Ok(()),
}
}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/integration.rs | crates/synd_term/src/integration.rs | use std::io;
use tokio::sync::mpsc::UnboundedSender;
use tokio_stream::wrappers::UnboundedReceiverStream;
pub struct UnboundedSenderWrapper {
inner: UnboundedSender<io::Result<crossterm::event::Event>>,
}
impl UnboundedSenderWrapper {
pub fn send(&self, event: crossterm::event::Event) {
self.inner.send(Ok(event)).unwrap();
}
pub fn send_multi<T>(&self, events: T)
where
T: IntoIterator<Item = crossterm::event::Event>,
{
events.into_iter().for_each(|event| {
self.send(event);
});
}
}
pub fn event_stream() -> (
UnboundedSenderWrapper,
UnboundedReceiverStream<io::Result<crossterm::event::Event>>,
) {
let (tx, rx) = tokio::sync::mpsc::unbounded_channel();
let tx = UnboundedSenderWrapper { inner: tx };
let event_stream = UnboundedReceiverStream::new(rx);
(tx, event_stream)
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/main.rs | crates/synd_term/src/main.rs | use std::{future, path::PathBuf, process::ExitCode};
use anyhow::Context as _;
use futures_util::TryFutureExt as _;
use synd_stdx::fs::fsimpl::FileSystem;
use synd_term::{
application::{Application, Cache, Config, Features},
cli::{self, Args},
client::{github::GithubClient, synd_api::Client},
config::{self, ConfigResolver},
interact::{ProcessInteractor, TextBrowserInteractor},
terminal::{self, Terminal},
ui::theme::Theme,
};
use tracing::error;
use tracing_appender::non_blocking::WorkerGuard;
fn init_tracing(log_path: Option<PathBuf>) -> anyhow::Result<Option<WorkerGuard>> {
use synd_o11y::opentelemetry::init_propagation;
use tracing_subscriber::{
Registry,
filter::EnvFilter,
fmt::{self, writer::BoxMakeWriter},
layer::SubscriberExt,
util::SubscriberInitExt as _,
};
let (writer, guard) = if let Some(log_path) = log_path {
if let Some(parent) = log_path.parent() {
std::fs::create_dir_all(parent)?;
}
let log = std::fs::OpenOptions::new()
.append(true)
.create(true)
.open(log_path)?;
let (non_blocking, guard) = tracing_appender::non_blocking(log);
(BoxMakeWriter::new(non_blocking), Some(guard))
} else {
(BoxMakeWriter::new(std::io::stdout), None)
};
Registry::default()
.with(
fmt::Layer::new()
.with_ansi(true)
.with_timer(fmt::time::UtcTime::rfc_3339())
.with_file(false)
.with_line_number(false)
.with_target(true)
.with_writer(writer),
)
.with(
EnvFilter::try_from_env(config::env::LOG_DIRECTIVE)
.or_else(|_| EnvFilter::try_new("info"))
.unwrap(),
)
.try_init()?;
// Set text map progator globally
init_propagation();
Ok(guard)
}
#[allow(clippy::needless_pass_by_value)]
fn build_app(config: ConfigResolver, dry_run: bool) -> anyhow::Result<Application> {
let mut builder = Application::builder()
.terminal(Terminal::new().context("Failed to construct terminal")?)
.client(
Client::new(config.api_endpoint(), config.api_timeout())
.context("Failed to construct client")?,
)
.categories(config.categories())
.config(Config {
entries_limit: config.feed_entries_limit(),
features: Features {
enable_github_notification: config.is_github_enable(),
},
..Default::default()
})
.cache(Cache::new(config.cache_dir()))
.theme(Theme::with_palette(config.palette()))
.interactor(Box::new(ProcessInteractor::new(
TextBrowserInteractor::new(config.feed_browser_command(), config.feed_browser_args()),
)))
.dry_run(dry_run);
if config.is_github_enable() {
builder = builder.github_client(
GithubClient::new(config.github_pat()).context("Failed to construct github client")?,
);
}
Ok(builder.build())
}
#[tokio::main]
async fn main() -> ExitCode {
// parse args and resolve configuration
let (config, command, dry_run) = {
let Args {
config,
log,
cache_dir,
api,
feed,
github,
command,
palette,
dry_run,
} = cli::parse();
let config = match ConfigResolver::builder()
.config_file(config)
.log_file(log)
.cache_dir(cache_dir)
.api_options(api)
.feed_options(feed)
.github_options(github)
.palette(palette)
.try_build()
{
Ok(config) => config,
Err(err) => {
// tracing subscriber is not yet configured
eprintln!("{err}");
return ExitCode::FAILURE;
}
};
(config, command, dry_run)
};
// init tracing
let _guard = {
// Subcommand logs to the terminal, while tui writes logs to a file.
let log = if command.is_some() {
None
} else {
Some(config.log_file())
};
init_tracing(log).unwrap()
};
// if subcommand is specified, execute it
if let Some(command) = command {
return match command {
cli::Command::Clean(clean) => clean.run(&FileSystem::new()),
cli::Command::Check(check) => check.run(config).await,
cli::Command::Export(export) => export.run(config.api_endpoint()).await,
cli::Command::Import(import) => import.run(config.api_endpoint()).await,
cli::Command::Config(config) => config.run(),
};
}
let mut event_stream = terminal::event_stream();
if let Err(err) = future::ready(build_app(config, dry_run))
.and_then(|app| {
tracing::info!("Running...");
app.run(&mut event_stream)
})
.await
{
error!("{err:?}");
ExitCode::FAILURE
} else {
ExitCode::SUCCESS
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/matcher.rs | crates/synd_term/src/matcher.rs | use std::{cell::RefCell, fmt, rc::Rc};
use nucleo::{
Utf32Str,
pattern::{AtomKind, CaseMatching, Normalization, Pattern},
};
#[derive(Clone)]
pub struct Matcher {
#[expect(clippy::struct_field_names)]
matcher: Rc<RefCell<nucleo::Matcher>>,
needle: Option<Pattern>,
// For Utf32 conversion
buf: Rc<RefCell<Vec<char>>>,
}
impl Default for Matcher {
fn default() -> Self {
Self::new()
}
}
impl Matcher {
pub fn new() -> Self {
Self {
matcher: Rc::new(RefCell::new(nucleo::Matcher::default())),
needle: None,
buf: Rc::new(RefCell::new(Vec::new())),
}
}
pub fn update_needle(&mut self, needle: &str) {
if needle.is_empty() {
self.needle = None;
} else {
let needle = Pattern::new(
needle,
CaseMatching::Smart,
Normalization::Smart,
AtomKind::Substring,
);
self.needle = Some(needle);
}
}
pub fn r#match(&self, haystack: &str) -> bool {
match self.needle.as_ref() {
Some(needle) => {
let mut buf = self.buf.borrow_mut();
let haystack = Utf32Str::new(haystack, &mut buf);
needle
.score(haystack, &mut self.matcher.borrow_mut())
.unwrap_or(0)
> 0
}
None => true,
}
}
}
impl fmt::Debug for Matcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Matcher")
.field("needle", &self.needle)
.finish_non_exhaustive()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn should_match() {
let mut m = Matcher::new();
let cases = vec![
("rustsec", "rustsec"),
("rustsec", "RUSTSEC"),
("rustsec", "RustSec"),
("this week in rust", "This Week in Rust"),
];
for case in cases {
m.update_needle(case.0);
assert!(m.r#match(case.1));
}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/job.rs | crates/synd_term/src/job.rs | use std::{collections::VecDeque, num::NonZero};
use futures_util::{StreamExt as _, future::BoxFuture, stream::FuturesUnordered};
use crate::command::Command;
pub(crate) type JobFuture = BoxFuture<'static, anyhow::Result<Command>>;
pub(crate) struct Jobs {
futures: FuturesUnordered<JobFuture>,
delay_queue: VecDeque<JobFuture>,
concurrent_limit: NonZero<usize>,
}
impl Jobs {
pub fn new(concurrent_limit: NonZero<usize>) -> Self {
Self {
futures: FuturesUnordered::new(),
delay_queue: VecDeque::new(),
concurrent_limit,
}
}
pub(crate) fn push(&mut self, job: JobFuture) {
self.delay_queue.push_back(job);
while self.concurrent_limit.get() > self.futures.len() {
let Some(job) = self.delay_queue.pop_front() else {
break;
};
self.futures.push(job);
}
tracing::trace!(
"Job delay_queue: {} futures: {}",
self.delay_queue.len(),
self.futures.len()
);
}
pub(crate) async fn next(&mut self) -> Option<anyhow::Result<Command>> {
debug_assert!(self.concurrent_limit.get() >= self.futures.len());
match self.futures.next().await {
Some(result) => {
if let Some(job) = self.delay_queue.pop_front() {
self.futures.push(job);
}
Some(result)
}
None => None,
}
}
#[cfg(feature = "integration")]
pub(crate) fn is_empty(&self) -> bool {
self.futures.is_empty() && self.delay_queue.is_empty()
}
}
#[cfg(test)]
mod tests {
use futures_util::FutureExt as _;
use super::*;
use std::future;
#[tokio::test]
async fn respect_concurrent_limit() {
let mut job = Jobs::new(NonZero::new(2).unwrap());
for _ in 0..3 {
job.push(future::ready(Ok(Command::Nop)).boxed());
}
assert_eq!(job.futures.len(), 2);
assert_eq!(job.delay_queue.len(), 1);
let mut count = 0;
loop {
if let Some(result) = job.next().await {
assert!(matches!(result, Ok(Command::Nop)));
count += 1;
}
if count == 3 {
break;
}
}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/auth/mod.rs | crates/synd_term/src/auth/mod.rs | use std::{
borrow::Borrow,
cmp::Ordering,
fmt,
ops::{Deref, Sub},
};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use synd_auth::jwt::google::JwtError;
use thiserror::Error;
use tracing::debug;
use crate::{
application::{Cache, JwtService, LoadCacheError, PersistCacheError},
config,
types::Time,
};
pub mod authenticator;
#[derive(Debug, Clone, Copy)]
pub enum AuthenticationProvider {
Github,
Google,
}
#[derive(Debug, Error)]
pub enum CredentialError {
#[error("google jwt expired")]
GoogleJwtExpired { refresh_token: String },
#[error("google jwt email not verified")]
GoogleJwtEmailNotVerified,
#[error("decode jwt: {0}")]
DecodeJwt(JwtError),
#[error("refresh jwt id token: {0}")]
RefreshJwt(JwtError),
#[error("persist credential: {0}")]
PersistCredential(#[from] PersistCacheError),
#[error("load credential: {0}")]
LoadCredential(#[from] LoadCacheError),
}
#[derive(Serialize, Deserialize, Clone, PartialEq, Eq)]
pub enum Credential {
Github {
access_token: String,
},
Google {
id_token: String,
refresh_token: String,
expired_at: DateTime<Utc>,
},
}
impl fmt::Debug for Credential {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Credential").finish_non_exhaustive()
}
}
/// Represents expired state
#[derive(PartialEq, Eq, Debug)]
pub(super) struct Expired<C = Credential>(pub(super) C);
/// Represents verified state
#[derive(Debug, Clone)]
pub(super) struct Verified<C = Credential>(C);
impl Deref for Verified<Credential> {
type Target = Credential;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Borrow<Credential> for &Verified<Credential> {
fn borrow(&self) -> &Credential {
&self.0
}
}
impl<C> Verified<C> {
pub(super) fn into_inner(self) -> C {
self.0
}
}
/// Represents unverified state
#[derive(PartialEq, Eq, Debug)]
pub struct Unverified<C = Credential>(C);
impl From<Credential> for Unverified<Credential> {
fn from(cred: Credential) -> Self {
Unverified(cred)
}
}
pub(super) enum VerifyResult {
Verified(Verified<Credential>),
Expired(Expired<Credential>),
}
impl Unverified<Credential> {
pub(super) fn verify(
self,
jwt_service: &JwtService,
now: DateTime<Utc>,
) -> Result<VerifyResult, CredentialError> {
let credential = self.0;
match &credential {
Credential::Github { .. } => Ok(VerifyResult::Verified(Verified(credential))),
Credential::Google { id_token, .. } => {
let claims = jwt_service
.google
.decode_id_token_insecure(id_token, false)
.map_err(CredentialError::DecodeJwt)?;
if !claims.email_verified {
return Err(CredentialError::GoogleJwtEmailNotVerified);
}
match claims
.expired_at()
.sub(config::credential::EXPIRE_MARGIN)
.cmp(&now)
{
// expired
Ordering::Less | Ordering::Equal => {
debug!("Google jwt expired, trying to refresh");
Ok(VerifyResult::Expired(Expired(credential)))
}
// not expired
Ordering::Greater => Ok(VerifyResult::Verified(Verified(credential))),
}
}
}
}
}
/// Process for restoring credential from cache
pub(crate) struct Restore<'a> {
pub(crate) jwt_service: &'a JwtService,
pub(crate) cache: &'a Cache,
pub(crate) now: Time,
pub(crate) persist_when_refreshed: bool,
}
impl Restore<'_> {
pub(crate) async fn restore(self) -> Result<Verified<Credential>, CredentialError> {
let Restore {
jwt_service,
cache,
now,
persist_when_refreshed,
} = self;
let cred = cache.load_credential()?;
match cred.verify(jwt_service, now)? {
VerifyResult::Verified(cred) => Ok(cred),
VerifyResult::Expired(Expired(Credential::Google { refresh_token, .. })) => {
let cred = jwt_service.refresh_google_id_token(&refresh_token).await?;
if persist_when_refreshed {
cache.persist_credential(&cred)?;
}
Ok(cred)
}
VerifyResult::Expired(_) => panic!("Unexpected verify result. this is bug"),
}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/auth/authenticator.rs | crates/synd_term/src/auth/authenticator.rs | use std::ops::Add;
use synd_auth::{
device_flow::{DeviceAuthorizationResponse, DeviceFlow, provider},
jwt,
};
use crate::{
auth::{AuthenticationProvider, Credential, CredentialError, Verified},
config,
types::Time,
};
#[derive(Clone)]
pub struct DeviceFlows {
pub github: DeviceFlow<provider::Github>,
pub google: DeviceFlow<provider::Google>,
}
#[derive(Clone)]
pub struct JwtService {
pub google: jwt::google::JwtService,
}
impl JwtService {
pub fn new() -> Self {
Self {
google: jwt::google::JwtService::default(),
}
}
#[must_use]
pub fn with_google_jwt_service(self, google: jwt::google::JwtService) -> Self {
Self { google }
}
pub(crate) async fn refresh_google_id_token(
&self,
refresh_token: &str,
) -> Result<Verified<Credential>, CredentialError> {
let id_token = self
.google
.refresh_id_token(refresh_token)
.await
.map_err(CredentialError::RefreshJwt)?;
let expired_at = self
.google
.decode_id_token_insecure(&id_token, false)
.map_err(CredentialError::DecodeJwt)?
.expired_at();
let credential = Credential::Google {
id_token,
refresh_token: refresh_token.to_owned(),
expired_at,
};
Ok(Verified(credential))
}
}
#[derive(Clone)]
pub struct Authenticator {
pub device_flows: DeviceFlows,
pub jwt_service: JwtService,
}
impl Authenticator {
pub fn new() -> Self {
Self {
device_flows: DeviceFlows {
github: DeviceFlow::new(provider::Github::default()),
google: DeviceFlow::new(provider::Google::default()),
},
jwt_service: JwtService::new(),
}
}
#[must_use]
pub fn with_device_flows(self, device_flows: DeviceFlows) -> Self {
Self {
device_flows,
..self
}
}
#[must_use]
pub fn with_jwt_service(self, jwt_service: JwtService) -> Self {
Self {
jwt_service,
..self
}
}
pub(crate) async fn init_device_flow(
&self,
provider: AuthenticationProvider,
) -> anyhow::Result<DeviceAuthorizationResponse> {
match provider {
AuthenticationProvider::Github => {
self.device_flows.github.device_authorize_request().await
}
AuthenticationProvider::Google => {
self.device_flows.google.device_authorize_request().await
}
}
}
pub(crate) async fn poll_device_flow_access_token(
&self,
now: Time,
provider: AuthenticationProvider,
response: DeviceAuthorizationResponse,
) -> anyhow::Result<Verified<Credential>> {
match provider {
AuthenticationProvider::Github => {
let token_response = self
.device_flows
.github
.poll_device_access_token(response.device_code, response.interval)
.await?;
Ok(Verified(Credential::Github {
access_token: token_response.access_token,
}))
}
AuthenticationProvider::Google => {
let token_response = self
.device_flows
.google
.poll_device_access_token(response.device_code, response.interval)
.await?;
let id_token = token_response.id_token.expect("id token not found");
let expired_at = self
.jwt_service
.google
.decode_id_token_insecure(&id_token, false)
.ok()
.map_or(now.add(config::credential::FALLBACK_EXPIRE), |claims| {
claims.expired_at()
});
Ok(Verified(Credential::Google {
id_token,
refresh_token: token_response
.refresh_token
.expect("refresh token not found"),
expired_at,
}))
}
}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/application/builder.rs | crates/synd_term/src/application/builder.rs | use crate::{
application::{Application, Authenticator, Cache, Clock, Config},
client::{github::GithubClient, synd_api::Client},
config::Categories,
interact::Interact,
terminal::Terminal,
ui::theme::Theme,
};
pub struct ApplicationBuilder<
Terminal = (),
Client = (),
Categories = (),
Cache = (),
Config = (),
Theme = (),
Interactor = (),
> {
pub(super) terminal: Terminal,
pub(super) client: Client,
pub(super) categories: Categories,
pub(super) cache: Cache,
pub(super) config: Config,
pub(super) theme: Theme,
pub(super) interactor: Interactor,
pub(super) authenticator: Option<Authenticator>,
pub(super) github_client: Option<GithubClient>,
pub(super) clock: Option<Box<dyn Clock>>,
pub(super) dry_run: bool,
}
impl Default for ApplicationBuilder {
fn default() -> Self {
Self {
terminal: (),
client: (),
categories: (),
cache: (),
config: (),
theme: (),
interactor: (),
authenticator: None,
github_client: None,
clock: None,
dry_run: false,
}
}
}
impl<T1, T2, T3, T4, T5, T6> ApplicationBuilder<(), T1, T2, T3, T4, T5, T6> {
#[must_use]
pub fn terminal(
self,
terminal: Terminal,
) -> ApplicationBuilder<Terminal, T1, T2, T3, T4, T5, T6> {
ApplicationBuilder {
terminal,
client: self.client,
categories: self.categories,
cache: self.cache,
config: self.config,
theme: self.theme,
interactor: self.interactor,
authenticator: self.authenticator,
github_client: self.github_client,
clock: self.clock,
dry_run: self.dry_run,
}
}
}
impl<T1, T2, T3, T4, T5, T6> ApplicationBuilder<T1, (), T2, T3, T4, T5, T6> {
#[must_use]
pub fn client(self, client: Client) -> ApplicationBuilder<T1, Client, T2, T3, T4, T5, T6> {
ApplicationBuilder {
terminal: self.terminal,
client,
categories: self.categories,
cache: self.cache,
config: self.config,
theme: self.theme,
interactor: self.interactor,
authenticator: self.authenticator,
github_client: self.github_client,
clock: self.clock,
dry_run: self.dry_run,
}
}
}
impl<T1, T2, T3, T4, T5, T6> ApplicationBuilder<T1, T2, (), T3, T4, T5, T6> {
#[must_use]
pub fn categories(
self,
categories: Categories,
) -> ApplicationBuilder<T1, T2, Categories, T3, T4, T5, T6> {
ApplicationBuilder {
terminal: self.terminal,
client: self.client,
categories,
cache: self.cache,
config: self.config,
theme: self.theme,
interactor: self.interactor,
authenticator: self.authenticator,
github_client: self.github_client,
clock: self.clock,
dry_run: self.dry_run,
}
}
}
impl<T1, T2, T3, T4, T5, T6> ApplicationBuilder<T1, T2, T3, (), T4, T5, T6> {
#[must_use]
pub fn cache(self, cache: Cache) -> ApplicationBuilder<T1, T2, T3, Cache, T4, T5, T6> {
ApplicationBuilder {
terminal: self.terminal,
client: self.client,
categories: self.categories,
cache,
config: self.config,
theme: self.theme,
interactor: self.interactor,
authenticator: self.authenticator,
github_client: self.github_client,
clock: self.clock,
dry_run: self.dry_run,
}
}
}
impl<T1, T2, T3, T4, T5, T6> ApplicationBuilder<T1, T2, T3, T4, (), T5, T6> {
#[must_use]
pub fn config(self, config: Config) -> ApplicationBuilder<T1, T2, T3, T4, Config, T5, T6> {
ApplicationBuilder {
terminal: self.terminal,
client: self.client,
categories: self.categories,
cache: self.cache,
config,
theme: self.theme,
interactor: self.interactor,
authenticator: self.authenticator,
github_client: self.github_client,
clock: self.clock,
dry_run: self.dry_run,
}
}
}
impl<T1, T2, T3, T4, T5, T6> ApplicationBuilder<T1, T2, T3, T4, T5, (), T6> {
#[must_use]
pub fn theme(self, theme: Theme) -> ApplicationBuilder<T1, T2, T3, T4, T5, Theme, T6> {
ApplicationBuilder {
terminal: self.terminal,
client: self.client,
categories: self.categories,
cache: self.cache,
config: self.config,
theme,
interactor: self.interactor,
authenticator: self.authenticator,
github_client: self.github_client,
clock: self.clock,
dry_run: self.dry_run,
}
}
}
impl<T1, T2, T3, T4, T5, T6> ApplicationBuilder<T1, T2, T3, T4, T5, T6, ()> {
#[must_use]
pub fn interactor(
self,
interactor: Box<dyn Interact>,
) -> ApplicationBuilder<T1, T2, T3, T4, T5, T6, Box<dyn Interact>> {
ApplicationBuilder {
terminal: self.terminal,
client: self.client,
categories: self.categories,
cache: self.cache,
config: self.config,
theme: self.theme,
interactor,
authenticator: self.authenticator,
github_client: self.github_client,
clock: self.clock,
dry_run: self.dry_run,
}
}
}
impl<T1, T2, T3, T4, T5, T6, T7> ApplicationBuilder<T1, T2, T3, T4, T5, T6, T7> {
#[must_use]
pub fn authenticator(self, authenticator: Authenticator) -> Self {
Self {
authenticator: Some(authenticator),
..self
}
}
#[must_use]
pub fn github_client(self, github_client: GithubClient) -> Self {
Self {
github_client: Some(github_client),
..self
}
}
#[must_use]
pub fn clock(self, clock: Box<dyn Clock>) -> Self {
Self {
clock: Some(clock),
..self
}
}
#[must_use]
pub fn dry_run(self, dry_run: bool) -> Self {
Self { dry_run, ..self }
}
}
impl ApplicationBuilder<Terminal, Client, Categories, Cache, Config, Theme, Box<dyn Interact>> {
#[must_use]
pub fn build(self) -> Application {
Application::new(self)
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/application/state.rs | crates/synd_term/src/application/state.rs | use crate::command::Command;
use bitflags::bitflags;
bitflags! {
pub(super) struct Should: u64 {
const Render = 1 << 0;
const Quit = 1 << 1;
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub(crate) enum TerminalFocus {
Gained,
Lost,
}
pub(super) struct State {
pub(super) flags: Should,
focus: TerminalFocus,
}
impl State {
pub(super) fn new() -> Self {
Self {
flags: Should::empty(),
focus: TerminalFocus::Gained,
}
}
pub(super) fn focus(&self) -> TerminalFocus {
self.focus
}
pub(super) fn focus_gained(&mut self) -> Option<Command> {
self.focus = TerminalFocus::Gained;
None
}
pub(super) fn focus_lost(&mut self) -> Option<Command> {
self.focus = TerminalFocus::Lost;
None
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/application/clock.rs | crates/synd_term/src/application/clock.rs | use chrono::{DateTime, Utc};
pub trait Clock {
fn now(&self) -> DateTime<Utc>;
}
pub struct SystemClock;
impl Clock for SystemClock {
fn now(&self) -> DateTime<Utc> {
Utc::now()
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/application/input_parser.rs | crates/synd_term/src/application/input_parser.rs | use nom_language::error::{VerboseError, VerboseErrorKind};
use thiserror::Error;
use crate::{
client::synd_api::mutation::subscribe_feed::SubscribeFeedInput,
config::Categories,
types::{self},
};
type NomError<'s> = VerboseError<&'s str>;
const CTX_REQUIREMENT: &str = "requirement";
const CTX_CATEGORY: &str = "category";
const CTX_CATEGORY_POST: &str = "category_post";
const CTX_URL: &str = "url";
#[derive(Error, Debug, PartialEq, Eq)]
pub(super) enum ParseFeedError {
#[error("parse feed error: {0}")]
Parse(String),
}
pub(super) struct InputParser<'a> {
input: &'a str,
}
impl<'a> InputParser<'a> {
pub(super) const SUSBSCRIBE_FEED_PROMPT: &'static str =
"# Please enter the requirement, category, and URL for subscription in the following format
#
# <requirement> <category> <url>
#
# * The requirement must be one of
# * \"MUST\"
# * \"SHOULD\"
# * \"MAY\"
# * For the category, please choose one category of the feed(for example, \"rust\")
#
# with '#' will be ignored, and an empty URL aborts the subscription.
#
# Example:
# MUST rust https://this-week-in-rust.org/atom.xml
";
pub(super) fn new(input: &'a str) -> Self {
Self { input }
}
pub(super) fn parse_feed_subscription(
&self,
categories: &Categories,
) -> Result<SubscribeFeedInput, ParseFeedError> {
feed::parse(self.input)
.map(|mut input| {
if let Some(category) = input.category {
input.category = Some(categories.normalize(category));
}
input
})
.map_err(|mut verbose_err: NomError| {
let msg = match verbose_err.errors.pop() {
Some((input, VerboseErrorKind::Context(CTX_REQUIREMENT))) => {
format!(
"Invalid requirement: must be one of 'MUST' 'SHOULD' 'MAY'. {input}"
)
}
Some((input, VerboseErrorKind::Context(CTX_CATEGORY_POST))) => {
format!("Invalid category: {input}",)
}
Some((input, VerboseErrorKind::Context(CTX_URL))) => {
format!("Invalid url: {input}")
}
Some((input, _)) => format!("Failed to parse input: {input}"),
None => "Failed to parse input".to_owned(),
};
ParseFeedError::Parse(msg)
})
}
pub(super) fn edit_feed_prompt(feed: &types::Feed) -> String {
format!(
"{}\n{requirement} {category} {feed_url}",
Self::SUSBSCRIBE_FEED_PROMPT,
requirement = feed.requirement(),
category = feed.category(),
feed_url = feed.url,
)
}
}
mod feed {
use nom::{
AsChar, Finish, IResult, Parser,
branch::alt,
bytes::complete::{tag_no_case, take_while, take_while_m_n},
character::complete::{multispace0, multispace1},
combinator::{map, value},
error::context,
sequence::delimited,
};
use nom_language::error::{VerboseError, VerboseErrorKind};
use synd_feed::types::{Category, FeedUrl};
use url::Url;
use super::NomError;
use crate::{
application::input_parser::{
CTX_CATEGORY, CTX_CATEGORY_POST, CTX_REQUIREMENT, CTX_URL, comment,
},
client::synd_api::mutation::subscribe_feed::{Requirement, SubscribeFeedInput},
};
pub(super) fn parse(s: &'_ str) -> Result<SubscribeFeedInput, NomError<'_>> {
delimited(comment::comments, feed_input, comment::comments)
.parse(s)
.finish()
.map(|(_, input)| input)
}
fn feed_input(s: &'_ str) -> IResult<&'_ str, SubscribeFeedInput, NomError<'_>> {
let (remain, (_, requirement, _, category, _, feed_url, _)) = (
multispace0,
requirement,
multispace1,
category,
context(CTX_CATEGORY_POST, multispace1),
url,
multispace0,
)
.parse(s)?;
Ok((
remain,
SubscribeFeedInput {
url: feed_url,
requirement: Some(requirement),
category: Some(category),
},
))
}
pub fn requirement(s: &'_ str) -> IResult<&'_ str, Requirement, NomError<'_>> {
context(
CTX_REQUIREMENT,
alt((
value(Requirement::MUST, tag_no_case("MUST")),
value(Requirement::SHOULD, tag_no_case("SHOULD")),
value(Requirement::MAY, tag_no_case("MAY")),
)),
)
.parse(s)
}
fn category(s: &'_ str) -> IResult<&'_ str, Category<'static>, NomError<'_>> {
let (remain, category) = context(
CTX_CATEGORY,
take_while_m_n(1, 20, |c: char| c.is_alphanum()),
)
.parse(s)?;
Ok((
remain,
Category::new(category.to_owned()).expect("this is a bug"),
))
}
fn url(s: &'_ str) -> IResult<&'_ str, FeedUrl, NomError<'_>> {
let (remain, url) = context(
CTX_URL,
map(take_while(|c: char| !c.is_whitespace()), |s: &str| {
s.to_owned()
}),
)
.parse(s)?;
match Url::parse(&url) {
Ok(url) => Ok((remain, FeedUrl::from(url))),
Err(err) => {
tracing::warn!("Invalid url: {err}");
let nom_err = VerboseError {
errors: vec![(s, VerboseErrorKind::Context("url"))],
};
Err(nom::Err::Failure(nom_err))
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_requirement() {
assert_eq!(requirement("must"), Ok(("", Requirement::MUST)));
assert_eq!(requirement("Must"), Ok(("", Requirement::MUST)));
assert_eq!(requirement("MUST"), Ok(("", Requirement::MUST)));
assert_eq!(requirement("should"), Ok(("", Requirement::SHOULD)));
assert_eq!(requirement("Should"), Ok(("", Requirement::SHOULD)));
assert_eq!(requirement("SHOULD"), Ok(("", Requirement::SHOULD)));
assert_eq!(requirement("may"), Ok(("", Requirement::MAY)));
assert_eq!(requirement("May"), Ok(("", Requirement::MAY)));
assert_eq!(requirement("MAY"), Ok(("", Requirement::MAY)));
}
#[test]
fn parse_category() {
assert_eq!(category("rust"), Ok(("", Category::new("rust").unwrap())));
assert_eq!(category("Rust"), Ok(("", Category::new("rust").unwrap())));
}
#[test]
fn parse_feed_input() {
assert_eq!(
feed_input("MUST rust https://example.ymgyt.io/atom.xml"),
Ok((
"",
SubscribeFeedInput {
url: "https://example.ymgyt.io/atom.xml".try_into().unwrap(),
requirement: Some(Requirement::MUST),
category: Some(Category::new("rust").unwrap())
}
))
);
}
#[test]
fn parse_feed_input_error() {
let tests = vec![
(
"foo rust https://example.ymgyt.io/atom.xml",
CTX_REQUIREMENT,
),
(
"should https://example.ymgyt.io/atom.xml",
CTX_CATEGORY_POST,
),
];
for test in tests {
let (_, kind) = feed_input(test.0)
.finish()
.unwrap_err()
.errors
.pop()
.unwrap();
assert_eq!(kind, VerboseErrorKind::Context(test.1));
}
let err = feed_input("should https://example.ymgyt.io/atom.xml")
.finish()
.unwrap_err()
.errors;
println!("{err:?}");
}
}
}
mod comment {
use nom::{
IResult, Parser,
bytes::complete::{tag, take_until},
character::complete::line_ending,
combinator::value,
multi::fold_many0,
sequence::delimited,
};
use crate::application::input_parser::NomError;
pub(super) fn comments(s: &'_ str) -> IResult<&'_ str, (), NomError<'_>> {
fold_many0(comment, || (), |acc, ()| acc).parse(s)
}
pub(super) fn comment(s: &'_ str) -> IResult<&'_ str, (), NomError<'_>> {
value((), delimited(tag("#"), take_until("\n"), line_ending)).parse(s)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_comment() {
assert_eq!(comment("# foo\n"), Ok(("", ())),);
assert_eq!(comment("# foo\r\n"), Ok(("", ())),);
}
#[test]
fn parse_comments() {
let s = "# comment1\n# comment2\n";
assert_eq!(comments(s), Ok(("", ())));
}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/application/mod.rs | crates/synd_term/src/application/mod.rs | use std::{
collections::VecDeque,
future,
num::NonZero,
ops::{ControlFlow, Sub},
pin::Pin,
sync::Arc,
time::Duration,
};
use chrono::{DateTime, Utc};
use crossterm::event::{Event as CrosstermEvent, KeyEvent, KeyEventKind};
use either::Either;
use futures_util::{FutureExt, Stream, StreamExt};
use itertools::Itertools;
use ratatui::widgets::Widget;
use synd_auth::device_flow::DeviceAuthorizationResponse;
use synd_feed::types::FeedUrl;
use tokio::time::{Instant, Sleep};
use update_informer::Version;
use url::Url;
use crate::{
application::event::KeyEventResult,
auth::{self, AuthenticationProvider, Credential, CredentialError, Verified},
client::{
github::{FetchNotificationsParams, GithubClient},
synd_api::{Client, SyndApiError, mutation::subscribe_feed::SubscribeFeedInput},
},
command::{ApiResponse, Command},
config::{self, Categories},
interact::Interact,
job::Jobs,
keymap::{KeymapId, Keymaps},
terminal::Terminal,
types::github::{IssueOrPullRequest, Notification},
ui::{
self,
components::{
Components, authentication::AuthenticateState, filter::Filterer,
gh_notifications::GhNotifications, root::Root, subscription::UnsubscribeSelection,
tabs::Tab,
},
theme::{Palette, Theme},
},
};
mod direction;
pub(crate) use direction::{Direction, IndexOutOfRange};
mod in_flight;
pub(crate) use in_flight::{InFlight, RequestId, RequestSequence};
mod input_parser;
use input_parser::InputParser;
pub use auth::authenticator::{Authenticator, DeviceFlows, JwtService};
mod clock;
pub use clock::{Clock, SystemClock};
mod cache;
pub use cache::{Cache, LoadCacheError, PersistCacheError};
mod builder;
pub use builder::ApplicationBuilder;
mod app_config;
pub use app_config::{Config, Features};
pub(crate) mod event;
mod state;
pub(crate) use state::TerminalFocus;
use state::{Should, State};
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum Populate {
Append,
Replace,
}
pub struct Application {
clock: Box<dyn Clock>,
terminal: Terminal,
client: Client,
github_client: Option<GithubClient>,
jobs: Jobs,
background_jobs: Jobs,
components: Components,
interactor: Box<dyn Interact>,
authenticator: Authenticator,
in_flight: InFlight,
cache: Cache,
theme: Theme,
idle_timer: Pin<Box<Sleep>>,
config: Config,
key_handlers: event::KeyHandlers,
categories: Categories,
latest_release: Option<Version>,
state: State,
}
impl Application {
/// Construct `ApplicationBuilder`
pub fn builder() -> ApplicationBuilder {
ApplicationBuilder::default()
}
/// Construct `Application` from builder.
/// Configure keymaps for terminal use
fn new(
builder: ApplicationBuilder<
Terminal,
Client,
Categories,
Cache,
Config,
Theme,
Box<dyn Interact>,
>,
) -> Self {
let ApplicationBuilder {
terminal,
client,
github_client,
categories,
cache,
config,
theme,
authenticator,
interactor,
clock,
dry_run,
} = builder;
let key_handlers = {
let mut keymaps = Keymaps::default();
keymaps.enable(KeymapId::Global);
keymaps.enable(KeymapId::Login);
let mut key_handlers = event::KeyHandlers::new();
key_handlers.push(event::KeyHandler::Keymaps(keymaps));
key_handlers
};
let mut state = State::new();
if dry_run {
state.flags = Should::Quit;
}
Self {
clock: clock.unwrap_or_else(|| Box::new(SystemClock)),
terminal,
client,
github_client,
// The secondary rate limit of the GitHub API is 100 concurrent requests, so we have set it to 90.
jobs: Jobs::new(NonZero::new(90).unwrap()),
background_jobs: Jobs::new(NonZero::new(10).unwrap()),
components: Components::new(&config.features),
interactor,
authenticator: authenticator.unwrap_or_else(Authenticator::new),
in_flight: InFlight::new().with_throbber_timer_interval(config.throbber_timer_interval),
cache,
theme,
idle_timer: Box::pin(tokio::time::sleep(config.idle_timer_interval)),
config,
key_handlers,
categories,
latest_release: None,
state,
}
}
fn now(&self) -> DateTime<Utc> {
self.clock.now()
}
fn jwt_service(&self) -> &JwtService {
&self.authenticator.jwt_service
}
fn keymaps(&mut self) -> &mut Keymaps {
self.key_handlers.keymaps_mut().unwrap()
}
pub async fn run<S>(mut self, input: &mut S) -> anyhow::Result<()>
where
S: Stream<Item = std::io::Result<CrosstermEvent>> + Unpin,
{
self.init().await?;
self.event_loop(input).await;
self.cleanup().ok();
Ok(())
}
/// Initialize application.
/// Setup terminal and handle cache.
async fn init(&mut self) -> anyhow::Result<()> {
match self.terminal.init() {
Ok(()) => Ok(()),
Err(err) => {
if self.state.flags.contains(Should::Quit) {
tracing::warn!("Failed to init terminal: {err}");
Ok(())
} else {
Err(err)
}
}
}?;
if self.config.features.enable_github_notification {
// Restore previous filter options
match self.cache.load_gh_notification_filter_options() {
Ok(options) => {
self.components.gh_notifications =
GhNotifications::with_filter_options(options);
}
Err(err) => {
tracing::warn!("Load github notification filter options: {err}");
}
}
}
match self.restore_credential().await {
Ok(cred) => self.handle_initial_credential(cred),
Err(err) => tracing::warn!("Restore credential: {err}"),
}
Ok(())
}
async fn restore_credential(&self) -> Result<Verified<Credential>, CredentialError> {
let restore = auth::Restore {
jwt_service: self.jwt_service(),
cache: &self.cache,
now: self.now(),
persist_when_refreshed: true,
};
restore.restore().await
}
fn handle_initial_credential(&mut self, cred: Verified<Credential>) {
self.set_credential(cred);
self.initial_fetch();
self.check_latest_release();
self.components.auth.authenticated();
self.reset_idle_timer();
self.should_render();
self.keymaps()
.disable(KeymapId::Login)
.enable(KeymapId::Tabs)
.enable(KeymapId::Entries)
.enable(KeymapId::Filter);
self.config
.features
.enable_github_notification
.then(|| self.keymaps().enable(KeymapId::GhNotification));
}
fn set_credential(&mut self, cred: Verified<Credential>) {
self.schedule_credential_refreshing(&cred);
self.client.set_credential(cred);
}
fn initial_fetch(&mut self) {
tracing::info!("Initial fetch");
self.jobs.push(
future::ready(Ok(Command::FetchEntries {
after: None,
first: self.config.entries_per_pagination,
}))
.boxed(),
);
if self.config.features.enable_github_notification
&& let Some(fetch) = self.components.gh_notifications.fetch_next_if_needed()
{
self.jobs.push(future::ready(Ok(fetch)).boxed());
}
}
/// Restore terminal state and print something to console if necesseary
fn cleanup(&mut self) -> anyhow::Result<()> {
if self.config.features.enable_github_notification {
let options = self.components.gh_notifications.filter_options();
match self.cache.persist_gh_notification_filter_options(options) {
Ok(()) => {}
Err(err) => {
tracing::warn!("Failed to persist github notification filter options: {err}");
}
}
}
self.terminal.restore()?;
// Make sure inform after terminal restored
self.inform_latest_release();
Ok(())
}
async fn event_loop<S>(&mut self, input: &mut S)
where
S: Stream<Item = std::io::Result<CrosstermEvent>> + Unpin,
{
self.render();
loop {
if self.event_loop_until_idle(input).await.is_break() {
break;
}
}
}
pub async fn event_loop_until_idle<S>(&mut self, input: &mut S) -> ControlFlow<()>
where
S: Stream<Item = std::io::Result<CrosstermEvent>> + Unpin,
{
let mut queue = VecDeque::with_capacity(2);
loop {
let command = tokio::select! {
biased;
Some(event) = input.next() => {
self.handle_terminal_event(event)
}
Some(command) = self.jobs.next() => {
Some(command.unwrap())
}
Some(command) = self.background_jobs.next() => {
Some(command.unwrap())
}
() = self.in_flight.throbber_timer() => {
Some(Command::RenderThrobber)
}
() = &mut self.idle_timer => {
Some(Command::Idle)
}
};
if let Some(command) = command {
queue.push_back(command);
self.apply(&mut queue);
}
if self.state.flags.contains(Should::Render) {
self.render();
self.state.flags.remove(Should::Render);
self.components.prompt.clear_error_message();
}
if self.state.flags.contains(Should::Quit) {
self.state.flags.remove(Should::Quit); // for testing
break ControlFlow::Break(());
}
}
}
#[expect(clippy::too_many_lines)]
#[tracing::instrument(skip_all)]
fn apply(&mut self, queue: &mut VecDeque<Command>) {
while let Some(command) = queue.pop_front() {
let _guard = tracing::info_span!("apply", %command).entered();
match command {
Command::Nop => {}
Command::Quit => self.state.flags.insert(Should::Quit),
Command::ResizeTerminal { .. } => {
self.should_render();
}
Command::RenderThrobber => {
self.in_flight.reset_throbber_timer();
self.in_flight.inc_throbber_step();
self.should_render();
}
Command::Idle => {
self.handle_idle();
}
Command::Authenticate => {
if self.components.auth.state() != &AuthenticateState::NotAuthenticated {
continue;
}
let provider = self.components.auth.selected_provider();
self.init_device_flow(provider);
}
Command::MoveAuthenticationProvider(direction) => {
self.components.auth.move_selection(direction);
self.should_render();
}
Command::HandleApiResponse {
request_seq,
response,
} => {
self.in_flight.remove(request_seq);
match response {
ApiResponse::DeviceFlowAuthorization {
provider,
device_authorization,
} => {
self.handle_device_flow_authorization_response(
provider,
device_authorization,
);
}
ApiResponse::DeviceFlowCredential { credential } => {
self.complete_device_authroize_flow(credential);
}
ApiResponse::SubscribeFeed { feed } => {
self.components.subscription.upsert_subscribed_feed(*feed);
self.fetch_entries(
Populate::Replace,
None,
self.config.entries_per_pagination,
);
self.should_render();
}
ApiResponse::UnsubscribeFeed { url } => {
self.components.subscription.remove_unsubscribed_feed(&url);
self.components.entries.remove_unsubscribed_entries(&url);
self.components.filter.update_categories(
&self.categories,
Populate::Replace,
self.components.entries.entries(),
);
self.should_render();
}
ApiResponse::FetchSubscription {
populate,
subscription,
} => {
// paginate
subscription.feeds.page_info.has_next_page.then(|| {
queue.push_back(Command::FetchSubscription {
after: subscription.feeds.page_info.end_cursor.clone(),
first: subscription.feeds.nodes.len().try_into().unwrap_or(0),
});
});
// how we show fetched errors in ui?
if !subscription.feeds.errors.is_empty() {
tracing::warn!(
"Failed fetched feeds: {:?}",
subscription.feeds.errors
);
}
self.components
.subscription
.update_subscription(populate, subscription);
self.should_render();
}
ApiResponse::FetchEntries { populate, payload } => {
self.components.filter.update_categories(
&self.categories,
populate,
payload.entries.as_slice(),
);
// paginate
payload.page_info.has_next_page.then(|| {
queue.push_back(Command::FetchEntries {
after: payload.page_info.end_cursor.clone(),
first: self
.config
.entries_limit
.saturating_sub(
self.components.entries.count() + payload.entries.len(),
)
.min(payload.entries.len())
.try_into()
.unwrap_or(0),
});
});
self.components.entries.update_entries(populate, payload);
self.should_render();
}
ApiResponse::FetchGithubNotifications {
notifications,
populate,
} => {
self.components
.gh_notifications
.update_notifications(populate, notifications)
.into_iter()
.for_each(|command| queue.push_back(command));
self.components
.gh_notifications
.fetch_next_if_needed()
.into_iter()
.for_each(|command| queue.push_back(command));
if populate == Populate::Replace {
self.components.filter.clear_gh_notifications_categories();
}
self.should_render();
}
ApiResponse::FetchGithubIssue {
notification_id,
issue,
} => {
if let Some(notification) = self
.components
.gh_notifications
.update_issue(notification_id, issue, &self.categories)
{
let categories = notification.categories().cloned();
self.components.filter.update_gh_notification_categories(
&self.categories,
Populate::Append,
categories,
);
}
self.should_render();
}
ApiResponse::FetchGithubPullRequest {
notification_id,
pull_request,
} => {
if let Some(notification) =
self.components.gh_notifications.update_pull_request(
notification_id,
pull_request,
&self.categories,
)
{
let categories = notification.categories().cloned();
self.components.filter.update_gh_notification_categories(
&self.categories,
Populate::Append,
categories,
);
}
self.should_render();
}
ApiResponse::MarkGithubNotificationAsDone { notification_id } => {
self.components
.gh_notifications
.marked_as_done(notification_id);
self.should_render();
}
ApiResponse::UnsubscribeGithubThread { .. } => {
// do nothing
}
}
}
Command::RefreshCredential { credential } => {
self.set_credential(credential);
}
Command::MoveTabSelection(direction) => {
self.keymaps()
.disable(KeymapId::Subscription)
.disable(KeymapId::Entries)
.disable(KeymapId::GhNotification);
match self.components.tabs.move_selection(direction) {
Tab::Feeds => {
self.keymaps().enable(KeymapId::Subscription);
if !self.components.subscription.has_subscription() {
queue.push_back(Command::FetchSubscription {
after: None,
first: self.config.feeds_per_pagination,
});
}
}
Tab::Entries => {
self.keymaps().enable(KeymapId::Entries);
}
Tab::GitHub => {
self.keymaps().enable(KeymapId::GhNotification);
}
}
self.should_render();
}
Command::MoveSubscribedFeed(direction) => {
self.components.subscription.move_selection(direction);
self.should_render();
}
Command::MoveSubscribedFeedFirst => {
self.components.subscription.move_first();
self.should_render();
}
Command::MoveSubscribedFeedLast => {
self.components.subscription.move_last();
self.should_render();
}
Command::PromptFeedSubscription => {
self.prompt_feed_subscription();
self.should_render();
}
Command::PromptFeedEdition => {
self.prompt_feed_edition();
self.should_render();
}
Command::PromptFeedUnsubscription => {
if self.components.subscription.selected_feed().is_some() {
self.components.subscription.toggle_unsubscribe_popup(true);
self.keymaps().enable(KeymapId::UnsubscribePopupSelection);
self.should_render();
}
}
Command::MoveFeedUnsubscriptionPopupSelection(direction) => {
self.components
.subscription
.move_unsubscribe_popup_selection(direction);
self.should_render();
}
Command::SelectFeedUnsubscriptionPopup => {
if let (UnsubscribeSelection::Yes, Some(feed)) =
self.components.subscription.unsubscribe_popup_selection()
{
self.unsubscribe_feed(feed.url.clone());
}
queue.push_back(Command::CancelFeedUnsubscriptionPopup);
self.should_render();
}
Command::CancelFeedUnsubscriptionPopup => {
self.components.subscription.toggle_unsubscribe_popup(false);
self.keymaps().disable(KeymapId::UnsubscribePopupSelection);
self.should_render();
}
Command::SubscribeFeed { input } => {
self.subscribe_feed(input);
self.should_render();
}
Command::FetchSubscription { after, first } => {
self.fetch_subscription(Populate::Append, after, first);
}
Command::ReloadSubscription => {
self.fetch_subscription(
Populate::Replace,
None,
self.config.feeds_per_pagination,
);
self.should_render();
}
Command::OpenFeed => {
self.open_feed();
}
Command::FetchEntries { after, first } => {
self.fetch_entries(Populate::Append, after, first);
}
Command::ReloadEntries => {
self.fetch_entries(Populate::Replace, None, self.config.entries_per_pagination);
self.should_render();
}
Command::MoveEntry(direction) => {
self.components.entries.move_selection(direction);
self.should_render();
}
Command::MoveEntryFirst => {
self.components.entries.move_first();
self.should_render();
}
Command::MoveEntryLast => {
self.components.entries.move_last();
self.should_render();
}
Command::OpenEntry => {
self.open_entry();
}
Command::BrowseEntry => {
self.browse_entry();
}
Command::MoveFilterRequirement(direction) => {
let filterer = self.components.filter.move_requirement(direction);
self.apply_filterer(filterer)
.into_iter()
.for_each(|command| queue.push_back(command));
self.should_render();
}
Command::ActivateCategoryFilterling => {
let keymap = self
.components
.filter
.activate_category_filtering(self.components.tabs.current().into());
self.keymaps().update(KeymapId::CategoryFiltering, keymap);
self.should_render();
}
Command::ActivateSearchFiltering => {
let prompt = self.components.filter.activate_search_filtering();
self.key_handlers.push(event::KeyHandler::Prompt(prompt));
self.should_render();
}
Command::PromptChanged => {
if self.components.filter.is_search_active() {
let filterer = self
.components
.filter
.filterer(self.components.tabs.current().into());
self.apply_filterer(filterer)
.into_iter()
.for_each(|command| queue.push_back(command));
self.should_render();
}
}
Command::DeactivateFiltering => {
self.components.filter.deactivate_filtering();
self.keymaps().disable(KeymapId::CategoryFiltering);
self.key_handlers.remove_prompt();
self.should_render();
}
Command::ToggleFilterCategory { category, lane } => {
let filter = self
.components
.filter
.toggle_category_state(&category, lane);
self.apply_filterer(filter)
.into_iter()
.for_each(|command| queue.push_back(command));
self.should_render();
}
Command::ActivateAllFilterCategories { lane } => {
let filterer = self.components.filter.activate_all_categories_state(lane);
self.apply_filterer(filterer)
.into_iter()
.for_each(|command| queue.push_back(command));
self.should_render();
}
Command::DeactivateAllFilterCategories { lane } => {
let filterer = self.components.filter.deactivate_all_categories_state(lane);
self.apply_filterer(filterer)
.into_iter()
.for_each(|command| queue.push_back(command));
self.should_render();
}
Command::FetchGhNotifications { populate, params } => {
self.fetch_gh_notifications(populate, params);
}
Command::MoveGhNotification(direction) => {
self.components.gh_notifications.move_selection(direction);
self.should_render();
}
Command::MoveGhNotificationFirst => {
self.components.gh_notifications.move_first();
self.should_render();
}
Command::MoveGhNotificationLast => {
self.components.gh_notifications.move_last();
self.should_render();
}
Command::OpenGhNotification { with_mark_as_done } => {
self.open_notification();
with_mark_as_done.then(|| self.mark_gh_notification_as_done(false));
}
Command::ReloadGhNotifications => {
let params = self.components.gh_notifications.reload();
self.fetch_gh_notifications(Populate::Replace, params);
}
Command::FetchGhNotificationDetails { contexts } => {
self.fetch_gh_notification_details(contexts);
}
Command::MarkGhNotificationAsDone { all } => {
self.mark_gh_notification_as_done(all);
}
Command::UnsubscribeGhThread => {
// Unlike the web UI, simply unsubscribing does not mark it as done
// and it remains as unread.
// Therefore, when reloading, the unsubscribed notification is displayed again.
// To address this, we will implicitly mark it as done when unsubscribing.
self.unsubscribe_gh_thread();
self.mark_gh_notification_as_done(false);
}
Command::OpenGhNotificationFilterPopup => {
self.components.gh_notifications.open_filter_popup();
self.keymaps().enable(KeymapId::GhNotificationFilterPopup);
self.keymaps().disable(KeymapId::GhNotification);
self.keymaps().disable(KeymapId::Filter);
self.keymaps().disable(KeymapId::Entries);
self.keymaps().disable(KeymapId::Subscription);
self.should_render();
}
Command::CloseGhNotificationFilterPopup => {
self.components
.gh_notifications
.close_filter_popup()
.into_iter()
.for_each(|command| queue.push_back(command));
self.keymaps().disable(KeymapId::GhNotificationFilterPopup);
self.keymaps().enable(KeymapId::GhNotification);
self.keymaps().enable(KeymapId::Filter);
self.keymaps().enable(KeymapId::Entries);
self.keymaps().enable(KeymapId::Subscription);
self.should_render();
}
Command::UpdateGhnotificationFilterPopupOptions(updater) => {
self.components
.gh_notifications
.update_filter_options(&updater);
self.should_render();
}
Command::RotateTheme => {
self.rotate_theme();
self.should_render();
}
Command::InformLatestRelease(version) => {
self.latest_release = Some(version);
}
Command::HandleError { message } => {
self.handle_error_message(message, None);
}
Command::HandleApiError { error, request_seq } => {
let message = match Arc::into_inner(error).expect("error never cloned") {
SyndApiError::Unauthorized { url } => {
tracing::warn!(
"api return unauthorized status code. the cached credential are likely invalid, so try to clean cache"
);
self.cache.clean().ok();
format!(
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | true |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/application/direction.rs | crates/synd_term/src/application/direction.rs | #[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub(crate) enum Direction {
Up,
Down,
Left,
Right,
}
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
pub(crate) enum IndexOutOfRange {
Wrapping,
#[allow(dead_code)]
Saturating,
}
impl Direction {
#[allow(
clippy::cast_sign_loss,
clippy::cast_possible_truncation,
clippy::cast_possible_wrap
)]
pub(crate) fn apply(self, index: usize, len: usize, out: IndexOutOfRange) -> usize {
if len == 0 {
return 0;
}
let diff = match self {
Direction::Up | Direction::Left => -1,
Direction::Down | Direction::Right => 1,
};
let index = index as i64;
if index + diff < 0 {
match out {
IndexOutOfRange::Wrapping => len - 1,
IndexOutOfRange::Saturating => 0,
}
} else if index + diff >= len as i64 {
match out {
IndexOutOfRange::Wrapping => 0,
IndexOutOfRange::Saturating => len - 1,
}
} else {
(index + diff) as usize
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use proptest::prelude::{Just, ProptestConfig, Strategy, prop_oneof, proptest};
proptest! {
#![proptest_config(ProptestConfig::default())]
#[test]
#[allow(clippy::cast_possible_wrap)]
fn apply(
dir in direction_strategy(),
index in 0..10_usize,
len in 0..10_usize,
out in index_out_of_range_strategy())
{
let apply = dir.apply(index, len,out) as i64;
let index = index as i64;
let len = len as i64;
assert!(
(apply - index).abs() == 1 ||
apply == 0 ||
apply == len-1
);
}
}
fn direction_strategy() -> impl Strategy<Value = Direction> {
prop_oneof![
Just(Direction::Up),
Just(Direction::Down),
Just(Direction::Left),
Just(Direction::Right),
]
}
fn index_out_of_range_strategy() -> impl Strategy<Value = IndexOutOfRange> {
prop_oneof![
Just(IndexOutOfRange::Wrapping),
Just(IndexOutOfRange::Saturating)
]
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/application/app_config.rs | crates/synd_term/src/application/app_config.rs | use std::time::Duration;
use crate::config;
#[derive(Debug, Clone, Default)]
pub struct Features {
pub enable_github_notification: bool,
}
#[derive(Debug, Clone)]
pub struct Config {
pub idle_timer_interval: Duration,
pub throbber_timer_interval: Duration,
pub entries_limit: usize,
pub entries_per_pagination: i64,
pub feeds_per_pagination: i64,
pub features: Features,
}
impl Default for Config {
fn default() -> Self {
Self {
idle_timer_interval: Duration::from_secs(250),
throbber_timer_interval: Duration::from_millis(250),
entries_limit: config::feed::DEFAULT_ENTRIES_LIMIT,
entries_per_pagination: config::client::DEFAULT_ENTRIES_PER_PAGINATION,
feeds_per_pagination: config::client::DEFAULT_FEEDS_PER_PAGINATION,
features: Features::default(),
}
}
}
impl Config {
#[must_use]
pub fn with_idle_timer_interval(self, idle_timer_interval: Duration) -> Self {
Self {
idle_timer_interval,
..self
}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/application/in_flight.rs | crates/synd_term/src/application/in_flight.rs | use std::{
collections::HashMap,
pin::Pin,
sync::atomic::{AtomicU64, Ordering},
time::Duration,
};
use tokio::time::{Instant, Sleep};
use crate::types::github::{IssueId, NotificationId, PullRequestId};
pub type RequestSequence = u64;
#[derive(Clone, Copy, PartialEq, Eq)]
pub(crate) enum RequestId {
DeviceFlowDeviceAuthorize,
DeviceFlowPollAccessToken,
FetchEntries,
FetchSubscription,
FetchGithubNotifications { page: u8 },
FetchGithubIssue { id: IssueId },
FetchGithubPullRequest { id: PullRequestId },
SubscribeFeed,
UnsubscribeFeed,
MarkGithubNotificationAsDone { id: NotificationId },
UnsubscribeGithubThread,
}
/// Mangae in flight requests state
pub struct InFlight {
next_request_sequence: AtomicU64,
in_flights: HashMap<RequestSequence, InFlightEntry>,
throbber_timer: Pin<Box<Sleep>>,
throbber_step: i8,
throbber_timer_interval: Duration,
}
impl InFlight {
pub fn new() -> Self {
Self {
next_request_sequence: AtomicU64::new(0),
in_flights: HashMap::new(),
throbber_timer: Box::pin(tokio::time::sleep(Duration::from_secs(3600 * 24))),
throbber_step: 0,
throbber_timer_interval: Duration::from_millis(250),
}
}
#[must_use]
pub fn with_throbber_timer_interval(self, interval: Duration) -> Self {
Self {
throbber_timer_interval: interval,
..self
}
}
pub(crate) fn recent_in_flight(&self) -> Option<RequestId> {
self.in_flights
.iter()
.max_by_key(|(_, entry)| entry.start)
.map(|(_, entry)| entry.request_id)
}
pub async fn throbber_timer(&mut self) {
self.throbber_timer.as_mut().await;
}
pub fn reset_throbber_timer(&mut self) {
self.throbber_timer
.as_mut()
.reset(Instant::now() + self.throbber_timer_interval);
}
pub fn inc_throbber_step(&mut self) {
self.throbber_step = self.throbber_step.wrapping_add(1);
}
pub fn throbber_step(&self) -> i8 {
self.throbber_step
}
pub(crate) fn add(&mut self, request_id: RequestId) -> RequestSequence {
let seq = self.next_request_sequence();
self.in_flights.insert(
seq,
InFlightEntry {
start: Instant::now(),
request_id,
},
);
self.reset_throbber_timer();
seq
}
pub(crate) fn remove(&mut self, seq: RequestSequence) -> Option<RequestId> {
let req_id = self.in_flights.remove(&seq).map(|entry| entry.request_id);
if self.in_flights.is_empty() {
self.throbber_timer
.as_mut()
.reset(Instant::now() + Duration::from_secs(3600 * 24));
}
req_id
}
fn next_request_sequence(&self) -> RequestSequence {
self.next_request_sequence.fetch_add(1, Ordering::Relaxed)
}
}
struct InFlightEntry {
// request started at(approximate)
start: Instant,
request_id: RequestId,
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/application/cache/mod.rs | crates/synd_term/src/application/cache/mod.rs | use std::{
borrow::Borrow,
io,
path::{Path, PathBuf},
};
use serde::{Serialize, de::DeserializeOwned};
use synd_stdx::fs::{FileSystem, fsimpl};
use thiserror::Error;
use crate::{
auth::{Credential, Unverified},
config,
ui::components::gh_notifications::GhNotificationFilterOptions,
};
#[derive(Debug, Error)]
pub enum PersistCacheError {
#[error("io error: {path} {io} ")]
Io { path: PathBuf, io: io::Error },
#[error("serialize error: {0}")]
Serialize(#[from] serde_json::Error),
}
#[derive(Debug, Error)]
pub enum LoadCacheError {
#[error("cache entry not found")]
NotFound,
#[error("io error: {path} {io}")]
Io { path: PathBuf, io: io::Error },
#[error("deserialize error: {0}")]
Deserialize(#[from] serde_json::Error),
}
pub struct Cache<FS = fsimpl::FileSystem> {
dir: PathBuf,
fs: FS,
}
impl Cache<fsimpl::FileSystem> {
pub fn new(dir: impl Into<PathBuf>) -> Self {
Self::with(dir, fsimpl::FileSystem::new())
}
}
impl<FS> Cache<FS>
where
FS: FileSystem,
{
pub fn with(dir: impl Into<PathBuf>, fs: FS) -> Self {
Self {
dir: dir.into(),
fs,
}
}
/// Persist credential in filesystem.
/// This is blocking operation.
pub fn persist_credential(
&self,
cred: impl Borrow<Credential>,
) -> Result<(), PersistCacheError> {
self.persist(&self.credential_file(), cred.borrow())
}
pub(crate) fn persist_gh_notification_filter_options(
&self,
options: impl Borrow<GhNotificationFilterOptions>,
) -> Result<(), PersistCacheError> {
self.persist(&self.gh_notification_filter_option_file(), options.borrow())
}
fn persist<T>(&self, path: &Path, entry: &T) -> Result<(), PersistCacheError>
where
T: ?Sized + Serialize,
{
if let Some(parent) = path.parent() {
self.fs
.create_dir_all(parent)
.map_err(|err| PersistCacheError::Io {
path: parent.to_path_buf(),
io: err,
})?;
}
self.fs
.create_file(path)
.map_err(|err| PersistCacheError::Io {
path: path.to_path_buf(),
io: err,
})
.and_then(|mut file| {
serde_json::to_writer(&mut file, entry).map_err(PersistCacheError::Serialize)
})
}
/// Load credential from filesystem.
/// This is blocking operation.
pub fn load_credential(&self) -> Result<Unverified<Credential>, LoadCacheError> {
self.load::<Credential>(&self.credential_file())
.map(Unverified::from)
}
pub(crate) fn load_gh_notification_filter_options(
&self,
) -> Result<GhNotificationFilterOptions, LoadCacheError> {
self.load(&self.gh_notification_filter_option_file())
}
fn load<T>(&self, path: &Path) -> Result<T, LoadCacheError>
where
T: DeserializeOwned,
{
self.fs
.open_file(path)
.map_err(|err| LoadCacheError::Io {
io: err,
path: path.to_path_buf(),
})
.and_then(|mut file| {
serde_json::from_reader::<_, T>(&mut file).map_err(LoadCacheError::Deserialize)
})
}
fn credential_file(&self) -> PathBuf {
self.dir.join(config::cache::CREDENTIAL_FILE)
}
fn gh_notification_filter_option_file(&self) -> PathBuf {
self.dir
.join(config::cache::GH_NOTIFICATION_FILTER_OPTION_FILE)
}
/// Remove all cache files
pub(crate) fn clean(&self) -> io::Result<()> {
// User can specify any directory as the cache
// so instead of deleting the entire directory with `remove_dir_all`, delete files individually.
match self.fs.remove_file(self.credential_file()) {
Ok(()) => Ok(()),
Err(err) => match err.kind() {
io::ErrorKind::NotFound => Ok(()),
_ => Err(err),
},
}
}
}
#[cfg(test)]
mod tests {
use crate::auth::Credential;
use super::*;
#[test]
fn persist_then_load_credential() {
let tmp = temp_dir();
let cache = Cache::new(tmp);
let cred = Credential::Github {
access_token: "rust is fun".into(),
};
assert!(cache.persist_credential(&cred).is_ok());
let loaded = cache.load_credential().unwrap();
assert_eq!(loaded, Unverified::from(cred),);
}
#[test]
fn filesystem_error() {
let cache = Cache::new("/dev/null");
assert!(
cache
.persist_credential(Credential::Github {
access_token: "dummy".into(),
})
.is_err()
);
}
fn temp_dir() -> PathBuf {
tempfile::TempDir::new().unwrap().keep()
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/application/event/mod.rs | crates/synd_term/src/application/event/mod.rs | use crate::command::Command;
mod key_handlers;
pub use key_handlers::{KeyHandler, KeyHandlers};
#[expect(clippy::large_enum_variant)]
pub(crate) enum KeyEventResult {
Consumed {
command: Option<Command>,
should_render: bool,
},
Ignored,
}
impl KeyEventResult {
pub(super) fn is_consumed(&self) -> bool {
matches!(self, KeyEventResult::Consumed { .. })
}
pub(crate) fn consumed(command: Command) -> Self {
KeyEventResult::Consumed {
command: Some(command),
should_render: false,
}
}
pub(crate) fn should_render(self, should_render: bool) -> Self {
match self {
KeyEventResult::Consumed { command, .. } => KeyEventResult::Consumed {
command,
should_render,
},
KeyEventResult::Ignored => KeyEventResult::Ignored,
}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/application/event/key_handlers.rs | crates/synd_term/src/application/event/key_handlers.rs | use std::{cell::RefCell, ops::ControlFlow, rc::Rc};
use crossterm::event::KeyEvent;
use crate::{application::event::KeyEventResult, keymap::Keymaps, ui::widgets::prompt::Prompt};
pub enum KeyHandler {
Prompt(Rc<RefCell<Prompt>>),
Keymaps(Keymaps),
}
impl KeyHandler {
fn handle(&mut self, event: &KeyEvent) -> KeyEventResult {
match self {
KeyHandler::Prompt(prompt) => prompt.borrow_mut().handle_key_event(event),
KeyHandler::Keymaps(keymaps) => keymaps.search(event),
}
}
}
pub struct KeyHandlers {
handlers: Vec<KeyHandler>,
}
impl KeyHandlers {
pub fn new() -> Self {
Self {
handlers: Vec::new(),
}
}
pub fn push(&mut self, handler: KeyHandler) {
self.handlers.push(handler);
}
pub fn remove_prompt(&mut self) {
self.handlers
.retain(|h| !matches!(h, KeyHandler::Prompt(_)));
}
pub fn keymaps_mut(&mut self) -> Option<&mut Keymaps> {
for handler in &mut self.handlers {
match handler {
KeyHandler::Keymaps(keymaps) => return Some(keymaps),
KeyHandler::Prompt(_) => (),
}
}
None
}
pub fn handle(&mut self, event: KeyEvent) -> KeyEventResult {
match self.handlers.iter_mut().rev().try_for_each(|h| {
let result = h.handle(&event);
if result.is_consumed() {
ControlFlow::Break(result)
} else {
ControlFlow::Continue(())
}
}) {
ControlFlow::Break(consumed) => consumed,
ControlFlow::Continue(()) => KeyEventResult::Ignored,
}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/types/time.rs | crates/synd_term/src/types/time.rs | pub type Time = synd_feed::types::Time;
pub trait TimeExt {
fn local_ymd(&self) -> String;
fn local_ymd_hm(&self) -> String;
}
#[cfg(feature = "integration")]
impl TimeExt for Time {
fn local_ymd(&self) -> String {
self.format("%Y-%m-%d").to_string()
}
fn local_ymd_hm(&self) -> String {
self.format("%Y-%m-%d %H:%M (%:z)").to_string()
}
}
#[cfg(not(feature = "integration"))]
impl TimeExt for Time {
fn local_ymd(&self) -> String {
self.with_timezone(&chrono::Local)
.format("%Y-%m-%d")
.to_string()
}
fn local_ymd_hm(&self) -> String {
self.with_timezone(&chrono::Local)
.format("%Y-%m-%d %H:%M (%:z)")
.to_string()
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/types/requirement_ext.rs | crates/synd_term/src/types/requirement_ext.rs | use ratatui::{style::Style, text::Span};
use synd_feed::types::Requirement;
use crate::ui::theme::RequirementLabelTheme;
pub trait RequirementExt {
fn label(&self, theme: &RequirementLabelTheme) -> Span<'static>;
}
impl RequirementExt for Requirement {
fn label(&self, theme: &RequirementLabelTheme) -> Span<'static> {
let (label, color) = match self {
Requirement::Must => ("MST", theme.must),
Requirement::Should => ("SHD", theme.should),
Requirement::May => ("MAY", theme.may),
};
Span::styled(label, Style::default().bg(color).fg(theme.fg))
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/types/mod.rs | crates/synd_term/src/types/mod.rs | use chrono::DateTime;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use synd_feed::types::{Category, FeedType, FeedUrl, Requirement};
use tracing::warn;
use crate::{
client::synd_api::{
mutation,
query::{self},
},
ui,
};
mod time;
pub use time::{Time, TimeExt};
mod page_info;
pub use page_info::PageInfo;
mod requirement_ext;
pub use requirement_ext::RequirementExt;
pub(crate) mod github;
#[derive(Debug, Clone)]
#[cfg_attr(test, derive(fake::Dummy))]
pub struct Link {
pub href: String,
pub rel: Option<String>,
pub media_type: Option<String>,
pub title: Option<String>,
}
impl From<query::subscription::Link> for Link {
fn from(v: query::subscription::Link) -> Self {
Self {
href: v.href,
rel: v.rel,
media_type: v.media_type,
title: v.title,
}
}
}
impl From<mutation::subscribe_feed::Link> for Link {
fn from(v: mutation::subscribe_feed::Link) -> Self {
Self {
href: v.href,
rel: v.rel,
media_type: v.media_type,
title: v.title,
}
}
}
#[derive(Debug, Clone)]
#[cfg_attr(test, derive(fake::Dummy))]
pub struct EntryMeta {
pub title: Option<String>,
pub published: Option<Time>,
pub updated: Option<Time>,
pub summary: Option<String>,
}
impl From<query::subscription::EntryMeta> for EntryMeta {
fn from(e: query::subscription::EntryMeta) -> Self {
Self {
title: e.title,
published: e.published.map(parse_time),
updated: e.updated.map(parse_time),
summary: e.summary,
}
}
}
impl From<mutation::subscribe_feed::EntryMeta> for EntryMeta {
fn from(e: mutation::subscribe_feed::EntryMeta) -> Self {
Self {
title: e.title,
published: e.published.map(parse_time),
updated: e.updated.map(parse_time),
summary: e.summary,
}
}
}
impl EntryMeta {
pub fn summary_text(&self, width: usize) -> Option<String> {
self.summary.as_deref().and_then(|summary| {
match html2text::from_read(summary.as_bytes(), width) {
Ok(text) => Some(text),
Err(err) => {
warn!("convert summary html to text: {err}");
None
}
}
})
}
}
#[derive(Debug, Clone)]
#[cfg_attr(test, derive(fake::Dummy))]
pub struct Feed {
pub feed_type: Option<FeedType>,
pub title: Option<String>,
pub url: FeedUrl,
pub updated: Option<Time>,
pub links: Vec<Link>,
pub website_url: Option<String>,
pub description: Option<String>,
pub generator: Option<String>,
pub entries: Vec<EntryMeta>,
pub authors: Vec<String>,
requirement: Option<Requirement>,
category: Option<Category<'static>>,
}
impl Feed {
pub fn requirement(&self) -> Requirement {
self.requirement.unwrap_or(ui::DEFAULT_REQUIREMNET)
}
pub fn category(&self) -> &Category<'static> {
self.category.as_ref().unwrap_or(ui::default_category())
}
#[must_use]
pub fn with_url(self, url: FeedUrl) -> Self {
Self { url, ..self }
}
#[must_use]
pub fn with_requirement(self, requirement: Requirement) -> Self {
Self {
requirement: Some(requirement),
..self
}
}
#[must_use]
pub fn with_category(self, category: Category<'static>) -> Self {
Self {
category: Some(category),
..self
}
}
}
impl From<query::subscription::Feed> for Feed {
fn from(f: query::subscription::Feed) -> Self {
Self {
feed_type: match f.type_ {
query::subscription::FeedType::ATOM => Some(FeedType::Atom),
query::subscription::FeedType::RSS1 => Some(FeedType::RSS1),
query::subscription::FeedType::RSS2 => Some(FeedType::RSS2),
query::subscription::FeedType::RSS0 => Some(FeedType::RSS0),
query::subscription::FeedType::JSON => Some(FeedType::JSON),
query::subscription::FeedType::Other(_) => None,
},
title: f.title,
url: f.url,
updated: f.updated.map(parse_time),
links: f.links.nodes.into_iter().map(From::from).collect(),
website_url: f.website_url,
description: f.description,
generator: f.generator,
entries: f.entries.nodes.into_iter().map(From::from).collect(),
authors: f.authors.nodes,
requirement: f.requirement.and_then(|r| match r {
query::subscription::Requirement::MUST => Some(Requirement::Must),
query::subscription::Requirement::SHOULD => Some(Requirement::Should),
query::subscription::Requirement::MAY => Some(Requirement::May),
query::subscription::Requirement::Other(_) => None,
}),
category: f.category,
}
}
}
impl From<mutation::subscribe_feed::Feed> for Feed {
fn from(f: mutation::subscribe_feed::Feed) -> Self {
Self {
feed_type: match f.type_ {
mutation::subscribe_feed::FeedType::ATOM => Some(FeedType::Atom),
mutation::subscribe_feed::FeedType::RSS1 => Some(FeedType::RSS1),
mutation::subscribe_feed::FeedType::RSS2 => Some(FeedType::RSS2),
mutation::subscribe_feed::FeedType::RSS0 => Some(FeedType::RSS0),
mutation::subscribe_feed::FeedType::JSON => Some(FeedType::JSON),
mutation::subscribe_feed::FeedType::Other(_) => None,
},
title: f.title,
url: f.url,
updated: f.updated.map(parse_time),
links: f.links.nodes.into_iter().map(From::from).collect(),
website_url: f.website_url,
description: f.description,
generator: f.generator,
entries: f.entries.nodes.into_iter().map(From::from).collect(),
authors: f.authors.nodes,
requirement: f.requirement.and_then(|r| match r {
mutation::subscribe_feed::Requirement::MUST => Some(Requirement::Must),
mutation::subscribe_feed::Requirement::SHOULD => Some(Requirement::Should),
mutation::subscribe_feed::Requirement::MAY => Some(Requirement::May),
mutation::subscribe_feed::Requirement::Other(_) => None,
}),
category: f.category,
}
}
}
#[derive(Debug, Clone)]
pub struct Entry {
pub title: Option<String>,
pub published: Option<Time>,
pub updated: Option<Time>,
pub website_url: Option<String>,
pub summary: Option<String>,
pub feed_title: Option<String>,
pub feed_url: FeedUrl,
requirement: Option<Requirement>,
category: Option<Category<'static>>,
}
impl Entry {
pub fn summary_text(&self, width: usize) -> Option<String> {
self.summary.as_deref().map(|summary| {
html2text::config::plain()
.string_from_read(summary.as_bytes(), width)
.unwrap_or_default()
})
}
pub fn requirement(&self) -> Requirement {
self.requirement.unwrap_or(ui::DEFAULT_REQUIREMNET)
}
pub fn category(&self) -> &Category<'static> {
self.category
.as_ref()
.unwrap_or_else(|| ui::default_category())
}
}
impl From<query::entries::Entry> for Entry {
fn from(v: query::entries::Entry) -> Self {
Self {
title: v.title,
published: v.published.map(parse_time),
updated: v.updated.map(parse_time),
website_url: v.website_url,
feed_title: v.feed.title,
feed_url: v.feed.url,
summary: v.summary,
requirement: match v.feed.requirement {
Some(query::entries::Requirement::MUST) => Some(Requirement::Must),
Some(query::entries::Requirement::SHOULD) => Some(Requirement::Should),
Some(query::entries::Requirement::MAY) => Some(Requirement::May),
_ => None,
},
category: v.feed.category,
}
}
}
#[derive(Serialize, Deserialize, JsonSchema)]
pub struct ExportedFeed {
pub title: Option<String>,
pub url: FeedUrl,
pub requirement: Option<Requirement>,
pub category: Option<Category<'static>>,
}
impl From<query::export_subscription::ExportSubscriptionOutputFeedsNodes> for ExportedFeed {
fn from(v: query::export_subscription::ExportSubscriptionOutputFeedsNodes) -> Self {
Self {
title: v.title,
url: v.url,
requirement: v.requirement.and_then(|r| match r {
query::export_subscription::Requirement::MUST => Some(Requirement::Must),
query::export_subscription::Requirement::SHOULD => Some(Requirement::Should),
query::export_subscription::Requirement::MAY => Some(Requirement::May),
query::export_subscription::Requirement::Other(_) => None,
}),
category: v.category,
}
}
}
impl From<ExportedFeed> for mutation::subscribe_feed::SubscribeFeedInput {
fn from(feed: ExportedFeed) -> Self {
Self {
url: feed.url,
requirement: feed.requirement.map(|r| match r {
Requirement::Must => mutation::subscribe_feed::Requirement::MUST,
Requirement::Should => mutation::subscribe_feed::Requirement::SHOULD,
Requirement::May => mutation::subscribe_feed::Requirement::MAY,
}),
category: feed.category,
}
}
}
fn parse_time(t: impl AsRef<str>) -> Time {
DateTime::parse_from_rfc3339(t.as_ref())
.expect("invalid rfc3339 time")
.with_timezone(&chrono::Utc)
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/types/github.rs | crates/synd_term/src/types/github.rs | use std::{fmt::Display, ops::Deref, str::FromStr};
use either::Either;
use octocrab::models::{self, activity::Subject};
use ratatui::{
style::{Color, Stylize},
text::Span,
};
use serde::{Deserialize, Serialize};
use synd_feed::types::Category;
use url::Url;
use crate::{
client::github::{issue_query, pull_request_query},
config::Categories,
types::Time,
ui::{self, icon},
};
pub(crate) type ThreadId = octocrab::models::ThreadId;
pub(crate) type NotificationId = octocrab::models::NotificationId;
macro_rules! new_id {
($id:ident, $pri:ty) => {
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) struct $id($pri);
impl Display for $id {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
impl Deref for $id {
type Target = $pri;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl $id {
pub(crate) fn into_inner(self) -> $pri {
self.0
}
}
};
}
new_id!(IssueId, i64);
new_id!(PullRequestId, i64);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)]
pub(crate) enum RepoVisibility {
Public,
Private,
}
#[derive(Debug, Clone)]
pub(crate) struct RepositoryKey {
pub(crate) name: String,
pub(crate) owner: String,
}
#[derive(Debug, Clone)]
pub(crate) struct Repository {
pub(crate) name: String,
pub(crate) owner: String,
pub(crate) visibility: RepoVisibility,
}
#[derive(Debug, Clone)]
pub(crate) struct NotificationContext<ID> {
pub(crate) id: ID,
pub(crate) notification_id: NotificationId,
pub(crate) repository_key: RepositoryKey,
}
pub(crate) type IssueOrPullRequest =
Either<NotificationContext<IssueId>, NotificationContext<PullRequestId>>;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub(crate) enum SubjectType {
Issue,
PullRequest,
Ci,
Release,
Discussion,
}
/// Additional information fetched from api
#[derive(Debug, Clone)]
pub(crate) enum SubjectContext {
Issue(IssueContext),
PullRequest(PullRequestContext),
}
/// `https://docs.github.com/en/rest/activity/notifications?apiVersion=2022-11-28#about-notification-reasons`
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)]
pub(crate) enum Reason {
Assign,
Author,
CiActivity,
ManuallySubscribed,
Mention,
TeamMention,
ReviewRequested,
WatchingRepo,
Other(String),
}
#[derive(Clone, Debug)]
pub(crate) struct Notification {
pub(crate) id: NotificationId,
pub(crate) thread_id: Option<ThreadId>,
pub(crate) reason: Reason,
#[allow(unused)]
pub(crate) unread: bool,
pub(crate) updated_at: Time,
pub(crate) last_read_at: Option<Time>,
pub(crate) repository: Repository,
pub(crate) subject_context: Option<SubjectContext>,
categories: Vec<Category<'static>>,
subject_type: Option<SubjectType>,
subject: Subject,
}
impl From<models::activity::Notification> for Notification {
fn from(
models::activity::Notification {
id,
repository,
subject,
reason,
unread,
updated_at,
last_read_at,
url,
..
}: models::activity::Notification,
) -> Self {
let (owner, name) = if let Some(full_name) = repository.full_name {
let mut s = full_name.splitn(2, '/');
if let (Some(owner), Some(repo)) = (s.next(), s.next()) {
(owner.to_owned(), repo.to_owned())
} else {
tracing::warn!("Unexpected repository full_name: `{full_name}`");
(String::new(), repository.name)
}
} else {
tracing::warn!("Repository full_name not found");
(String::new(), repository.name)
};
let repository = Repository {
name,
owner,
visibility: if repository.private.unwrap_or(false) {
RepoVisibility::Private
} else {
RepoVisibility::Public
},
};
// Assume url is like "https://api.github.com/notifications/threads/11122223333"
let thread_id = url
.path_segments()
.and_then(|mut seg| seg.nth(2))
.and_then(|id| id.parse::<u64>().ok())
.map(ThreadId::from);
let categories = vec![ui::default_category().clone()];
let subject_type = match subject.r#type.as_str() {
typ if typ.eq_ignore_ascii_case("issue") => Some(SubjectType::Issue),
typ if typ.eq_ignore_ascii_case("pullrequest") => Some(SubjectType::PullRequest),
typ if typ.eq_ignore_ascii_case("checksuite") && reason == "ci_activity" => {
Some(SubjectType::Ci)
}
typ if typ.eq_ignore_ascii_case("release") => Some(SubjectType::Release),
typ if typ.eq_ignore_ascii_case("discussion") => Some(SubjectType::Discussion),
_ => {
tracing::warn!("Unknown url: {url:?} reason: {reason} subject: `{subject:?}`");
None
}
};
let reason = match reason.as_str() {
"assign" => Reason::Assign,
"author" => Reason::Author,
"ci_activity" => Reason::CiActivity,
"manual" => Reason::ManuallySubscribed,
"mention" => Reason::Mention,
"team_mention" => Reason::TeamMention,
"review_requested" => Reason::ReviewRequested,
"subscribed" => Reason::WatchingRepo,
other => Reason::Other(other.to_owned()),
};
Self {
id,
thread_id,
reason,
unread,
updated_at,
last_read_at,
repository,
categories,
subject,
subject_type,
subject_context: None,
}
}
}
impl Notification {
pub(crate) fn subject_type(&self) -> Option<SubjectType> {
self.subject_type
}
pub(crate) fn subject_icon(&'_ self) -> Span<'_> {
match self.subject_type() {
Some(SubjectType::Issue) => match self.subject_context {
Some(SubjectContext::Issue(ref issue)) => match issue.state {
IssueState::Open => {
if matches!(issue.state_reason, Some(IssueStateReason::ReOpened)) {
Span::from(icon!(issuereopened)).green()
} else {
Span::from(icon!(issueopen)).green()
}
}
IssueState::Closed => {
if matches!(issue.state_reason, Some(IssueStateReason::NotPlanned)) {
Span::from(icon!(issuenotplanned)).gray()
} else {
Span::from(icon!(issueclosed)).light_magenta()
}
}
},
_ => Span::from(icon!(issueopen)),
},
Some(SubjectType::PullRequest) => match self.subject_context {
Some(SubjectContext::PullRequest(ref pr)) => match pr.state {
PullRequestState::Open => {
if pr.is_draft {
Span::from(icon!(pullrequestdraft)).gray()
} else {
Span::from(icon!(pullrequest)).green()
}
}
PullRequestState::Merged => {
Span::from(icon!(pullrequestmerged)).light_magenta()
}
PullRequestState::Closed => Span::from(icon!(pullrequestclosed)).red(),
},
_ => Span::from(icon!(pullrequest)),
},
Some(SubjectType::Ci) => Span::from(icon!(cross)).red(),
Some(SubjectType::Release) => Span::from(icon!(tag)).green(),
Some(SubjectType::Discussion) => Span::from(icon!(discussion)),
None => Span::from(" "),
}
}
pub(crate) fn title(&self) -> &str {
&self.subject.title
}
pub(crate) fn browser_url(&self) -> Option<Url> {
let mut url = self.base_url();
match self.subject_type()? {
SubjectType::Issue => {
// construct like "https://github.com/ymgyt/syndicationd/issues/{issue-id}#issumecomment-{commentid}"
url.path_segments_mut()
.unwrap()
.extend(["issues", &self.issue_id()?.to_string()]);
if let Some(commend_id) = self.comment_id() {
url.set_fragment(Some(&format!("issuecomment-{commend_id}")));
}
Some(url)
}
SubjectType::PullRequest => {
// construct like "https://github.com/ymgyt/syndicationd/pull/{pr-id}#pullrequestreview-123"
url.path_segments_mut()
.unwrap()
.extend(["pull", &self.pull_request_id()?.to_string()]);
// How to get PR review comment id?
Some(url)
}
SubjectType::Ci => {
// In th UI, it transitions to the failed actions
// but I don't know how to identify which action failed
url.path_segments_mut().unwrap().extend(["actions"]);
Some(url)
}
SubjectType::Release => {
// Since the release ID is stored in the subject.url, obtaining the release information might help determine the specific destination
url.path_segments_mut().unwrap().extend(["releases"]);
Some(url)
}
SubjectType::Discussion => {
url.path_segments_mut().unwrap().extend(["discussions"]);
Some(url)
}
}
}
pub(crate) fn context(&self) -> Option<IssueOrPullRequest> {
match self.subject_type()? {
SubjectType::Issue => Some(Either::Left(NotificationContext {
id: self.issue_id()?,
notification_id: self.id,
repository_key: self.repository_key().clone(),
})),
SubjectType::PullRequest => Some(Either::Right(NotificationContext {
id: self.pull_request_id()?,
notification_id: self.id,
repository_key: self.repository_key().clone(),
})),
// Currently ignore ci, release, discussion
_ => None,
}
}
pub(crate) fn author(&self) -> Option<String> {
match self.subject_context {
Some(SubjectContext::Issue(ref issue)) => issue.author.clone(),
Some(SubjectContext::PullRequest(ref pr)) => pr.author.clone(),
_ => None,
}
}
pub(crate) fn body(&self) -> Option<String> {
match self.subject_context {
Some(SubjectContext::Issue(ref issue)) => Some(issue.body.clone()),
Some(SubjectContext::PullRequest(ref pr)) => Some(pr.body.clone()),
_ => None,
}
}
pub(crate) fn last_comment(&self) -> Option<Comment> {
match self.subject_context {
Some(SubjectContext::Issue(ref issue)) => issue.last_comment.clone(),
Some(SubjectContext::PullRequest(ref pr)) => pr.last_comment.clone(),
_ => None,
}
}
pub(crate) fn issue_id(&self) -> Option<IssueId> {
// Assume url is like "https://api.github.com/repos/ymgyt/synd/issues/123"
let mut segments = self.subject.url.as_ref()?.path_segments()?.skip(3);
(segments.next() == Some("issues"))
.then(|| segments.next())?
.and_then(|id| id.parse().ok())
.map(IssueId)
}
pub(crate) fn pull_request_id(&self) -> Option<PullRequestId> {
// Assume url is like "https://api.github.com/repos/ymgyt/synd/pulls/123"
let mut segments = self.subject.url.as_ref()?.path_segments()?.skip(3);
(segments.next() == Some("pulls"))
.then(|| segments.next())?
.and_then(|id| id.parse().ok())
.map(PullRequestId)
}
fn repository_key(&self) -> RepositoryKey {
RepositoryKey {
name: self.repository.name.clone(),
owner: self.repository.owner.clone(),
}
}
fn comment_id(&self) -> Option<String> {
// Assume url is like "https://api.github.com/repos/ymgyt/synd/issues/comments/123"
let mut segments = self
.subject
.latest_comment_url
.as_ref()?
.path_segments()?
.skip(4);
(segments.next() == Some("comments"))
.then(|| segments.next())?
.map(ToString::to_string)
}
// Return https://github.com/{owner}/{repo}
fn base_url(&self) -> Url {
let mut url = Url::parse("https://github.com").unwrap();
url.path_segments_mut().unwrap().extend([
self.repository.owner.as_str(),
self.repository.name.as_str(),
]);
url
}
pub(crate) fn categories(&self) -> impl Iterator<Item = &Category<'static>> {
self.categories.iter()
}
pub(crate) fn update_categories(&mut self, config: &Categories) {
self.categories.clear();
if let Some(category) = config.lookup(&self.repository.owner) {
self.categories.push(category);
}
if let Some(category) = config.lookup(&self.repository.name) {
self.categories.push(category);
}
if let Some(topics) = self.topics().map(|topics| {
topics
.filter_map(|topic| config.lookup(topic))
.collect::<Vec<_>>()
}) {
self.categories.extend(topics);
}
if self.categories.is_empty() {
self.categories.push(ui::default_category().clone());
}
}
fn topics(&self) -> Option<impl Iterator<Item = &str>> {
match self.subject_context {
Some(SubjectContext::Issue(ref issue)) => Some(issue.topics.iter().map(String::as_str)),
Some(SubjectContext::PullRequest(ref pr)) => Some(pr.topics.iter().map(String::as_str)),
_ => None,
}
}
pub(crate) fn labels(&self) -> Option<impl Iterator<Item = &Label>> {
match self.subject_context {
Some(SubjectContext::Issue(ref issue)) => {
if issue.labels.is_empty() {
None
} else {
Some(issue.labels.iter())
}
}
Some(SubjectContext::PullRequest(ref pr)) => {
if pr.labels.is_empty() {
None
} else {
Some(pr.labels.iter())
}
}
_ => None,
}
}
}
#[derive(Debug, Clone)]
pub(crate) struct Comment {
pub(crate) author: String,
pub(crate) body: String,
}
#[derive(Debug, Clone)]
pub(crate) struct Label {
pub(crate) name: String,
pub(crate) color: Option<Color>,
pub(crate) luminance: Option<f64>,
}
#[derive(Debug, Clone)]
pub(crate) enum IssueState {
Open,
Closed,
}
#[derive(Debug, Clone)]
pub(crate) enum IssueStateReason {
ReOpened,
NotPlanned,
Completed,
}
#[derive(Debug, Clone)]
pub(crate) struct IssueContext {
author: Option<String>,
#[allow(unused)]
topics: Vec<String>,
state: IssueState,
state_reason: Option<IssueStateReason>,
body: String,
last_comment: Option<Comment>,
labels: Vec<Label>,
}
impl From<issue_query::ResponseData> for IssueContext {
fn from(data: issue_query::ResponseData) -> Self {
let repo = data
.repository
.expect("ResponseData does not have repository");
let topics: Vec<String> = repo
.repository_topics
.nodes
.unwrap_or_default()
.into_iter()
.filter_map(|node| node.map(|node| node.topic.name))
.collect();
let issue = repo.issue.expect("ResponseData does not have issue");
let author: Option<String> = issue.author.map(|author| author.login);
let state = match issue.state {
issue_query::IssueState::OPEN | issue_query::IssueState::Other(_) => IssueState::Open,
issue_query::IssueState::CLOSED => IssueState::Closed,
};
let state_reason = match issue.state_reason {
Some(issue_query::IssueStateReason::REOPENED) => Some(IssueStateReason::ReOpened),
Some(issue_query::IssueStateReason::NOT_PLANNED) => Some(IssueStateReason::NotPlanned),
Some(issue_query::IssueStateReason::COMPLETED) => Some(IssueStateReason::Completed),
_ => None,
};
let body = issue.body_text;
let last_comment: Option<Comment> = issue
.comments
.nodes
.unwrap_or_default()
.into_iter()
.find_map(|node| {
node.map(|node| Comment {
author: node.author.map(|author| author.login).unwrap_or_default(),
body: node.body_text,
})
});
let labels = issue
.labels
.and_then(|labels| labels.nodes)
.unwrap_or_default()
.into_iter()
.flatten()
.map(|label| Label {
name: label.name,
color: Color::from_str(&format!("#{}", label.color)).ok(),
luminance: luminance(&label.color),
})
.collect();
Self {
author,
topics,
state,
state_reason,
body,
last_comment,
labels,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)]
pub(crate) enum PullRequestState {
Open,
Merged,
Closed,
}
#[derive(Debug, Clone)]
pub(crate) struct PullRequestContext {
author: Option<String>,
#[allow(unused)]
topics: Vec<String>,
pub(crate) state: PullRequestState,
is_draft: bool,
body: String,
last_comment: Option<Comment>,
labels: Vec<Label>,
}
impl From<pull_request_query::ResponseData> for PullRequestContext {
fn from(data: pull_request_query::ResponseData) -> Self {
let repo = data
.repository
.expect("ResponseData does not have repository");
let topics: Vec<String> = repo
.repository_topics
.nodes
.unwrap_or_default()
.into_iter()
.filter_map(|node| node.map(|node| node.topic.name))
.collect();
let pr = repo
.pull_request
.expect("ResponseData does not have pull request");
let author: Option<String> = pr.author.map(|author| author.login);
let state = match pr.state {
pull_request_query::PullRequestState::OPEN
| pull_request_query::PullRequestState::Other(_) => PullRequestState::Open,
pull_request_query::PullRequestState::CLOSED => PullRequestState::Closed,
pull_request_query::PullRequestState::MERGED => PullRequestState::Merged,
};
let is_draft = pr.is_draft;
let body = pr.body_text;
let last_comment: Option<Comment> = pr
.comments
.nodes
.unwrap_or_default()
.into_iter()
.find_map(|node| {
node.map(|node| Comment {
author: node.author.map(|author| author.login).unwrap_or_default(),
body: node.body_text,
})
});
let labels = pr
.labels
.and_then(|labels| labels.nodes)
.unwrap_or_default()
.into_iter()
.flatten()
.map(|label| Label {
name: label.name,
color: Color::from_str(&format!("#{}", label.color)).ok(),
luminance: luminance(&label.color),
})
.collect();
Self {
author,
topics,
state,
is_draft,
body,
last_comment,
labels,
}
}
}
// Assume color is "RRGGBB" in hex format
#[allow(clippy::cast_lossless)]
fn luminance(color: &str) -> Option<f64> {
if color.len() != 6 {
return None;
}
let r = u8::from_str_radix(&color[..2], 16).ok()? as f64;
let g = u8::from_str_radix(&color[2..4], 16).ok()? as f64;
let b = u8::from_str_radix(&color[4..], 16).ok()? as f64;
Some((0.2126 * r + 0.7152 * g + 0.0722 * b) / 255.)
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/types/page_info.rs | crates/synd_term/src/types/page_info.rs | use crate::client::synd_api::query;
#[derive(Debug, Clone)]
pub struct PageInfo {
pub has_next_page: bool,
pub end_cursor: Option<String>,
}
impl From<query::entries::PageInfo> for PageInfo {
fn from(v: query::entries::PageInfo) -> Self {
Self {
has_next_page: v.has_next_page,
end_cursor: v.end_cursor,
}
}
}
impl From<query::export_subscription::ExportSubscriptionOutputFeedsPageInfo> for PageInfo {
fn from(v: query::export_subscription::ExportSubscriptionOutputFeedsPageInfo) -> Self {
Self {
has_next_page: v.has_next_page,
end_cursor: v.end_cursor,
}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/terminal/integration_backend.rs | crates/synd_term/src/terminal/integration_backend.rs | use ratatui::backend::TestBackend;
pub type Buffer = ratatui::buffer::Buffer;
pub type TerminalBackend = TestBackend;
pub fn new_backend() -> TerminalBackend {
TestBackend::new(10, 10)
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/terminal/backend.rs | crates/synd_term/src/terminal/backend.rs | use ratatui::backend::CrosstermBackend;
pub type TerminalBackend = CrosstermBackend<std::io::Stdout>;
pub fn new_backend() -> TerminalBackend {
CrosstermBackend::new(std::io::stdout())
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/terminal/mod.rs | crates/synd_term/src/terminal/mod.rs | use crossterm::{
ExecutableCommand,
event::{EnableFocusChange, EventStream},
terminal::{self, EnterAlternateScreen, LeaveAlternateScreen},
};
use futures_util::{Stream, future::Either, stream};
use ratatui::Frame;
use std::io::{self, IsTerminal};
#[cfg(not(feature = "integration"))]
mod backend;
#[cfg(not(feature = "integration"))]
pub use backend::{TerminalBackend, new_backend};
#[cfg(feature = "integration")]
mod integration_backend;
#[cfg(feature = "integration")]
pub use integration_backend::{Buffer, TerminalBackend, new_backend};
/// Provide terminal manipulation operations.
pub struct Terminal {
backend: ratatui::Terminal<TerminalBackend>,
}
impl Terminal {
/// Construct Terminal with default backend
pub fn new() -> anyhow::Result<Self> {
let backend = new_backend();
Ok(Terminal::with(ratatui::Terminal::new(backend)?))
}
pub fn with(backend: ratatui::Terminal<TerminalBackend>) -> Self {
Self { backend }
}
/// Initialize terminal
pub fn init(&mut self) -> io::Result<()> {
terminal::enable_raw_mode()?;
crossterm::execute!(io::stdout(), EnterAlternateScreen, EnableFocusChange)?;
let panic_hook = std::panic::take_hook();
std::panic::set_hook(Box::new(move |panic| {
Self::restore_backend().expect("Failed to reset terminal");
panic_hook(panic);
}));
self.backend.hide_cursor().ok();
self.backend.clear().ok();
Ok(())
}
/// Reset terminal
pub fn restore(&mut self) -> io::Result<()> {
Self::restore_backend()?;
self.backend.show_cursor().ok();
Ok(())
}
fn restore_backend() -> io::Result<()> {
terminal::disable_raw_mode()?;
io::stdout().execute(LeaveAlternateScreen)?;
Ok(())
}
pub fn render<F>(&mut self, f: F) -> anyhow::Result<()>
where
F: FnOnce(&mut Frame),
{
self.backend.draw(f)?;
Ok(())
}
pub fn force_redraw(&mut self) {
self.backend.clear().unwrap();
}
#[cfg(feature = "integration")]
pub fn buffer(&self) -> &Buffer {
self.backend.backend().buffer()
}
}
pub fn event_stream() -> impl Stream<Item = std::io::Result<crossterm::event::Event>> + Unpin {
// When tests are run with nix(crane), /dev/tty is not available
// In such cases, executing `EventStream::new()` will cause a panic.
// Currently, this issue only arises during testing with nix, so an empty stream that does not panic is returned
// https://github.com/crossterm-rs/crossterm/blob/fce58c879a748f3159216f68833100aa16141ab0/src/terminal/sys/file_descriptor.rs#L74
// https://github.com/crossterm-rs/crossterm/blob/fce58c879a748f3159216f68833100aa16141ab0/src/event/read.rs#L39
let is_terminal = std::io::stdout().is_terminal();
if is_terminal {
Either::Left(EventStream::new())
} else {
Either::Right(stream::empty())
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/keymap/default.rs | crates/synd_term/src/keymap/default.rs | use crate::keymap::{KeymapsConfig, macros::keymap};
pub fn default() -> KeymapsConfig {
let login = keymap!({
"enter" => authenticate,
"k" | "up" => move_up_authentication_provider,
"j" | "down" => move_down_authentication_provider,
});
let tabs = keymap!({
"tab" => move_right_tab_selection,
"backtab" => move_left_tab_selection,
});
let entries = keymap!({
"k" | "up" => move_up_entry,
"j" | "down" => move_down_entry,
"r" => reload_entries,
"enter" => open_entry,
"space" => browse_entry,
"g" => {
"g" => move_entry_first,
"e" => move_entry_last,
},
});
let subscription = keymap!({
"a" => prompt_feed_subscription,
"e" => prompt_feed_edition,
"d" => prompt_feed_unsubscription,
"k" | "up" => move_up_subscribed_feed,
"j" | "down" => move_down_subscribed_feed,
"r" => reload_subscription,
"enter" => open_feed,
"g" => {
"g" => move_subscribed_feed_first,
"e" => move_subscribed_feed_last,
},
});
let gh_notification = keymap!({
"k" | "up" => move_up_gh_notification,
"j" | "down" => move_down_gh_notification,
"enter" => open_gh_notification,
"A-enter" => open_gh_notification_with_done,
"r" => reload_gh_notifications,
"d" => mark_gh_notification_as_done,
"S-d" => mark_gh_notification_as_done_all,
"u" => unsubscribe_gh_thread,
"g" => {
"g" => move_gh_notification_first,
"e" => move_gh_notification_last,
},
"f" => open_gh_notification_filter_popup,
});
let gh_notification_filter_popup = keymap!({
"u" => {
"n" => toggle_gh_notification_filter_popup_include_unread,
},
"c" => {
"l" => toggle_gh_notification_filter_popup_pr_closed,
},
"p" => {
"a" => toggle_gh_notification_filter_popup_participating,
"u" => toggle_gh_notification_filter_popup_visibility_public,
"r" => toggle_gh_notification_filter_popup_visibility_private,
},
"o" => {
"p" => toggle_gh_notification_filter_popup_pr_open,
},
"m" => {
"e" => toggle_gh_notification_filter_popup_reason_mentioned,
"r" => toggle_gh_notification_filter_popup_pr_merged,
},
"r" => {
"e" => toggle_gh_notification_filter_popup_reason_review,
},
"esc" | "enter" => close_gh_notification_filter_popup,
});
let filter = keymap!({
"h" | "left" => move_filter_requirement_left,
"l" | "right" => move_filter_requirement_right,
"c" => activate_category_filtering,
"/" => activate_search_filtering,
"esc" => deactivate_filtering,
});
let unsubscribe_popup = keymap!({
"h" | "left" => move_feed_unsubscription_popup_selection_left,
"l" | "right" => move_feed_unsubscription_popup_selection_right,
"enter" => select_feed_unsubscription_popup,
"esc" => cancel_feed_unsubscription_popup,
});
let global = keymap!({
"q" | "C-c" => quit ,
"S-t" => rotate_theme,
});
KeymapsConfig {
login,
tabs,
entries,
subscription,
gh_notification,
gh_notification_filter_popup,
filter,
unsubscribe_popup,
global,
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/keymap/macros.rs | crates/synd_term/src/keymap/macros.rs | macro_rules! keymap {
( @count $token:tt ) => { () };
( @trie $cmd:ident ) => { $crate::keymap::KeyTrie::Command($crate::command::Command::$cmd()) };
(@trie
{ $( $($key:literal)|+ => $value:tt, )+ }
) => {
keymap!({ $( $($key)|+ => $value, )+ })
};
(
{ $( $($key:literal)|+ => $value:tt, )+ }
) => {
{
// https://danielkeep.github.io/tlborm/book/blk-counting.html#slice-length
let capacity = <[()]>::len(&[
$(
$( keymap!(@count $key) ),*
),*
]);
let mut map = ::std::collections::HashMap::with_capacity(capacity);
$(
$(
let key_event = $crate::keymap::parse($key).unwrap();
let trie = keymap!(@trie $value );
map.insert(key_event, trie);
)*
)*
let node = $crate::keymap::KeyTrieNode { map };
$crate::keymap::KeyTrie::Node(node)
}
};
}
pub(crate) use keymap;
#[macro_export]
macro_rules! key {
( enter ) => {
crossterm::event::Event::Key(crossterm::event::KeyEvent::from(
crossterm::event::KeyCode::Enter,
))
};
( tab ) => {
crossterm::event::Event::Key(crossterm::event::KeyEvent::from(
crossterm::event::KeyCode::Tab,
))
};
( esc ) => {
crossterm::event::Event::Key(crossterm::event::KeyEvent::from(
crossterm::event::KeyCode::Esc,
))
};
( backspace ) => {
crossterm::event::Event::Key(crossterm::event::KeyEvent::from(
crossterm::event::KeyCode::Backspace,
))
};
( $char:literal ) => {
crossterm::event::Event::Key(crossterm::event::KeyEvent::from(
crossterm::event::KeyCode::Char($char),
))
};
}
#[macro_export]
macro_rules! shift {
( $char:literal ) => {{
let mut k = crossterm::event::KeyEvent::from(crossterm::event::KeyCode::Char($char));
k.modifiers.insert(crossterm::event::KeyModifiers::SHIFT);
crossterm::event::Event::Key(k)
}};
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/keymap/mod.rs | crates/synd_term/src/keymap/mod.rs | use std::{collections::HashMap, ops::ControlFlow};
use anyhow::{anyhow, bail};
use crossterm::event::{KeyCode, KeyEvent, KeyModifiers};
mod default;
pub mod macros;
use crate::{application::event::KeyEventResult, command::Command};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum KeymapId {
Global = 0,
Login = 1,
Tabs = 2,
Entries = 3,
Subscription = 4,
GhNotification = 5,
Filter = 6,
CategoryFiltering = 7,
UnsubscribePopupSelection = 8,
GhNotificationFilterPopup = 9,
}
#[derive(Debug)]
pub(crate) struct Keymap {
#[allow(dead_code)]
id: KeymapId,
enable: bool,
trie: KeyTrie,
pending_keys: Vec<KeyEvent>,
}
impl Keymap {
/// Construct a `Keymap`
pub fn new(id: KeymapId, trie: KeyTrie) -> Self {
Self {
id,
enable: false,
trie,
pending_keys: Vec::with_capacity(2),
}
}
pub fn from_map(id: KeymapId, map: HashMap<KeyEvent, KeyTrie>) -> Self {
Self::new(id, KeyTrie::Node(KeyTrieNode { map }))
}
fn search(&mut self, event: &KeyEvent) -> Option<Command> {
let first = self.pending_keys.first().unwrap_or(event);
let trie = match self.trie.search(&[*first]) {
Some(KeyTrie::Command(cmd)) => return Some(cmd),
Some(KeyTrie::Node(node)) => KeyTrie::Node(node),
None => return None,
};
self.pending_keys.push(*event);
match trie.search(&self.pending_keys[1..]) {
Some(KeyTrie::Command(cmd)) => {
self.pending_keys.drain(..);
Some(cmd)
}
Some(KeyTrie::Node(_)) => None,
_ => {
self.pending_keys.drain(..);
None
}
}
}
}
pub(crate) struct KeymapsConfig {
pub login: KeyTrie,
pub tabs: KeyTrie,
pub entries: KeyTrie,
pub subscription: KeyTrie,
pub gh_notification: KeyTrie,
pub gh_notification_filter_popup: KeyTrie,
pub filter: KeyTrie,
pub unsubscribe_popup: KeyTrie,
pub global: KeyTrie,
}
impl Default for KeymapsConfig {
fn default() -> Self {
default::default()
}
}
#[derive(Debug)]
pub(crate) struct Keymaps {
keymaps: Vec<Keymap>,
}
impl Keymaps {
pub fn new(config: KeymapsConfig) -> Self {
// order is matter
let keymaps = vec![
Keymap::new(KeymapId::Global, config.global),
Keymap::new(KeymapId::Login, config.login),
Keymap::new(KeymapId::Tabs, config.tabs),
Keymap::new(KeymapId::Entries, config.entries),
Keymap::new(KeymapId::Subscription, config.subscription),
Keymap::new(KeymapId::GhNotification, config.gh_notification),
Keymap::new(KeymapId::Filter, config.filter),
Keymap::new(KeymapId::CategoryFiltering, KeyTrie::default()),
Keymap::new(
KeymapId::UnsubscribePopupSelection,
config.unsubscribe_popup,
),
Keymap::new(
KeymapId::GhNotificationFilterPopup,
config.gh_notification_filter_popup,
),
];
Self { keymaps }
}
pub fn enable(&mut self, id: KeymapId) -> &mut Self {
self.keymaps[id as usize].enable = true;
self
}
pub fn disable(&mut self, id: KeymapId) -> &mut Self {
self.keymaps[id as usize].enable = false;
self
}
pub fn update(&mut self, id: KeymapId, keymap: Keymap) {
let mut keymap = keymap;
keymap.enable = true;
self.keymaps[id as usize] = keymap;
}
pub fn search(&mut self, event: &KeyEvent) -> KeyEventResult {
match self
.keymaps
.iter_mut()
.rev()
.filter(|k| k.enable)
.try_for_each(|keymap| match keymap.search(event) {
Some(command) => {
ControlFlow::Break(KeyEventResult::consumed(command).should_render(true))
}
None => ControlFlow::Continue(()),
}) {
ControlFlow::Break(r) => r,
ControlFlow::Continue(()) => KeyEventResult::Ignored,
}
}
}
impl Default for Keymaps {
fn default() -> Self {
Self::new(KeymapsConfig::default())
}
}
#[expect(clippy::large_enum_variant)]
#[derive(Clone, Debug)]
pub(crate) enum KeyTrie {
Command(Command),
Node(KeyTrieNode),
}
impl KeyTrie {
pub fn search(&self, keys: &[KeyEvent]) -> Option<KeyTrie> {
let mut trie = self;
for key in keys {
trie = match trie {
KeyTrie::Command(_) => return Some(trie.clone()),
KeyTrie::Node(trie) => trie.map.get(key)?,
}
}
Some(trie.clone())
}
}
impl Default for KeyTrie {
fn default() -> Self {
KeyTrie::Node(KeyTrieNode {
map: HashMap::new(),
})
}
}
#[derive(Clone, Debug)]
pub struct KeyTrieNode {
map: HashMap<KeyEvent, KeyTrie>,
}
fn parse(s: &str) -> anyhow::Result<KeyEvent> {
let mut tokens: Vec<_> = s.split('-').collect();
let code = match tokens.pop().ok_or_else(|| anyhow!("no token"))? {
"enter" => KeyCode::Enter,
"tab" => KeyCode::Tab,
"backtab" => KeyCode::BackTab,
"left" => KeyCode::Left,
"right" => KeyCode::Right,
"up" => KeyCode::Up,
"down" => KeyCode::Down,
"esc" => KeyCode::Esc,
"space" => KeyCode::Char(' '),
single if single.chars().count() == 1 => KeyCode::Char(single.chars().next().unwrap()),
undefined => bail!("`{undefined}` is not implemented yet"),
};
let mut modifiers = KeyModifiers::NONE;
for token in tokens {
let modifier = match token {
"C" => KeyModifiers::CONTROL,
"S" => KeyModifiers::SHIFT,
"A" => KeyModifiers::ALT,
undefined => bail!("`{undefined}` modifier is not implemented yet"),
};
modifiers.insert(modifier);
}
// Handling special case
if code == KeyCode::BackTab {
modifiers.insert(KeyModifiers::SHIFT);
}
Ok(KeyEvent::new(code, modifiers))
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/config/parse.rs | crates/synd_term/src/config/parse.rs | pub(crate) mod flag {
use std::time::Duration;
use synd_stdx::time::humantime::{DurationError, parse_duration};
pub(crate) fn parse_duration_opt(s: &str) -> Result<Duration, DurationError> {
parse_duration(s)
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/config/categories.rs | crates/synd_term/src/config/categories.rs | use std::{collections::HashMap, path::Path};
use anyhow::Context;
use ratatui::style::Color;
use serde::{Deserialize, Serialize};
use synd_feed::types::Category;
#[derive(Clone, Deserialize, Debug)]
pub struct Categories {
categories: HashMap<String, Entry>,
#[serde(skip)]
aliases: HashMap<String, String>,
}
impl Categories {
pub fn default_toml() -> Self {
let s = include_str!("../../categories.toml");
let mut c: Self = toml::from_str(s).unwrap();
c.update_aliases();
c
}
pub fn load(path: impl AsRef<Path>) -> anyhow::Result<Self> {
let path = path.as_ref();
let buf =
std::fs::read_to_string(path).with_context(|| format!("path: {}", path.display()))?;
let mut c: Self = toml::from_str(&buf)?;
c.update_aliases();
Ok(c)
}
pub fn icon(&self, category: &Category<'_>) -> Option<&Icon> {
self.categories
.get(category.as_str())
.map(|entry| &entry.icon)
}
pub fn normalize(&self, category: Category<'static>) -> Category<'static> {
match self.aliases.get(category.as_str()) {
Some(normalized) => Category::new(normalized.to_owned()).unwrap_or(category),
None => category,
}
}
fn update_aliases(&mut self) {
let new_map = self.categories.iter().fold(
HashMap::with_capacity(self.categories.len()),
|mut m, (category, entry)| {
entry.aliases.iter().for_each(|alias| {
m.insert(alias.to_lowercase(), category.to_lowercase());
});
m
},
);
self.aliases = new_map;
}
pub(crate) fn lookup(&self, category: &str) -> Option<Category<'static>> {
let normalized = match self.aliases.get(category) {
Some(normalized) => normalized,
None => category,
};
if self.categories.contains_key(normalized) {
Category::new(normalized.to_owned()).ok()
} else {
None
}
}
pub(super) fn merge(&mut self, other: HashMap<String, Entry>) {
self.categories.extend(other);
self.update_aliases();
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub(super) struct Entry {
icon: Icon,
#[serde(default)]
aliases: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Icon {
symbol: String,
color: Option<IconColor>,
}
impl Icon {
pub fn new(symbol: impl Into<String>) -> Self {
Self {
symbol: symbol.into(),
color: None,
}
}
#[must_use]
pub fn with_color(self, color: IconColor) -> Self {
Self {
color: Some(color),
..self
}
}
pub fn symbol(&self) -> &str {
self.symbol.as_str()
}
pub fn color(&self) -> Option<Color> {
self.color.as_ref().and_then(IconColor::color)
}
}
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
pub struct IconColor {
rgb: Option<u32>,
// https://docs.rs/ratatui/latest/ratatui/style/enum.Color.html#variant.Red
name: Option<String>,
#[serde(skip)]
color: Option<Color>,
}
impl IconColor {
pub fn new(color: Color) -> Self {
Self {
rgb: None,
name: None,
color: Some(color),
}
}
}
impl IconColor {
fn color(&self) -> Option<Color> {
self.color.or(self
.rgb
.as_ref()
.map(|rgb| Color::from_u32(*rgb))
.or(self.name.as_ref().and_then(|s| s.parse().ok())))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn should_parse_default_toml() {
let c = Categories::default_toml();
let icon = c.icon(&Category::new("rust").unwrap()).unwrap();
assert_eq!(icon.symbol(), "");
assert_eq!(icon.color(), Some(Color::Rgb(247, 76, 0)));
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/config/file.rs | crates/synd_term/src/config/file.rs | use std::{collections::HashMap, io, path::PathBuf, time::Duration};
use serde::{Deserialize, Serialize};
use thiserror::Error;
use url::Url;
use crate::{cli::Palette, config::categories};
#[derive(Debug, Serialize, Deserialize)]
pub struct CacheEntry {
pub(super) directory: Option<PathBuf>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct LogEntry {
pub(super) path: Option<PathBuf>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ThemeEntry {
pub(super) name: Option<Palette>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct FeedEntry {
pub(super) entries_limit: Option<usize>,
pub(super) browser: Option<FeedBrowserEntry>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct FeedBrowserEntry {
pub(super) command: Option<PathBuf>,
pub(super) args: Option<Vec<String>>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ApiEntry {
pub(super) endpoint: Option<Url>,
#[serde(
default,
deserialize_with = "synd_stdx::time::humantime::de::parse_duration_opt"
)]
pub(super) timeout: Option<Duration>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct GithubEntry {
pub(super) enable: Option<bool>,
pub(super) pat: Option<String>,
}
#[derive(Error, Debug)]
pub enum ConfigFileError {
#[error(transparent)]
Io(#[from] io::Error),
#[error(transparent)]
Deserialize(#[from] toml::de::Error),
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ConfigFile {
pub(super) cache: Option<CacheEntry>,
pub(super) log: Option<LogEntry>,
pub(super) theme: Option<ThemeEntry>,
pub(super) api: Option<ApiEntry>,
pub(super) feed: Option<FeedEntry>,
pub(super) github: Option<GithubEntry>,
pub(super) categories: Option<HashMap<String, categories::Entry>>,
}
impl ConfigFile {
pub(super) fn new<R: io::Read>(mut src: R) -> Result<Self, ConfigFileError> {
let mut buf = String::new();
src.read_to_string(&mut buf)?;
toml::from_str(&buf).map_err(ConfigFileError::from)
}
}
pub static INIT_CONFIG: &str = r#"
[cache]
# Cache directory
# directory = "path/to/dir"
[log]
# Log file path
# path = "path/to/log"
[theme]
# Theme name
# The available themes can be found by `synd --help`
# name = "ferra"
[api]
# Backend api endpoint
# endpoint = "https://api.syndicationd.ymgyt.io"
# Client timeout duration
# timeout = "30s"
[feed]
# Feed entries to fetch
# entries_limit = 200
# Command to browse feed
# browser = { command = "", args = [] }
[github]
# Enable github notification feature
# enable = true
# Github Personal access token(PAT) to browse notifications
# pat = "ghp_xxxx"
"#;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn deserialize() {
let src = r#"
[cache]
directory = "/tmp/synd/cache"
[log]
path = "/tmp/synd/synd.log"
[theme]
name = "ferra"
[api]
endpoint = "https://api.syndicationd.ymgyt.io"
timeout = "30s"
[feed]
entries_limit = 100
browser = { command = "w3m", args = ["--foo", "--bar"] }
[github]
enable = true
pat = "ghp_xxxx"
[categories.rust]
icon = { symbol = "S", color = { rgb = 0xF74C00 }}
aliases = ["rs"]
"#;
let config = ConfigFile::new(src.as_bytes()).unwrap();
insta::assert_debug_snapshot!("deserialized_config", config);
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/config/mod.rs | crates/synd_term/src/config/mod.rs | use std::{path::PathBuf, sync::OnceLock};
use directories::ProjectDirs;
mod categories;
pub use categories::{Categories, Icon, IconColor};
mod file;
pub use file::INIT_CONFIG;
pub(crate) mod parse;
mod resolver;
pub use resolver::ConfigResolver;
pub mod api {
pub const ENDPOINT: &str = "https://api.syndicationd.ymgyt.io:6100";
}
pub mod env {
macro_rules! env_key {
($key:expr) => {
concat!("SYND", "_", $key)
};
}
/// Log directive
pub const LOG_DIRECTIVE: &str = env_key!("LOG");
pub const ENDPOINT: &str = env_key!("ENDPOINT");
pub const CLIENT_TIMEOUT: &str = env_key!("CLIENT_TIMEOUT");
pub const CONFIG_FILE: &str = env_key!("CONFIG_FILE");
pub const LOG_FILE: &str = env_key!("LOG_FILE");
pub const CACHE_DIR: &str = env_key!("CACHE_DIR");
pub const THEME: &str = env_key!("THEME");
pub const FEED_ENTRIES_LIMIT: &str = env_key!("ENTRIES_LIMIT");
pub const FEED_BROWSER: &str = env_key!("BROWSER");
pub const FEED_BROWSER_ARGS: &str = env_key!("BROWSER_ARGS");
pub const ENABLE_GITHUB: &str = env_key!("ENABLE_GH");
pub const GITHUB_PAT: &str = env_key!("GH_PAT");
}
pub mod client {
use std::time::Duration;
pub const DEFAULT_TIMEOUT: Duration = Duration::from_secs(30);
pub const USER_AGENT: &str = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"));
/// Number of entries to fetch in one request
pub const DEFAULT_ENTRIES_PER_PAGINATION: i64 = 200;
/// Number of feeds to fetch in one request
pub const DEFAULT_FEEDS_PER_PAGINATION: i64 = 50;
}
pub mod credential {
use std::time::Duration;
/// The `Duration` considered as expired before actually performing the refresh process
pub const EXPIRE_MARGIN: Duration = Duration::from_secs(60);
pub const FALLBACK_EXPIRE: Duration = Duration::from_secs(60 * 15);
}
pub mod feed {
use std::path::PathBuf;
/// Default entries limit to fetch
pub const DEFAULT_ENTRIES_LIMIT: usize = 200;
pub fn default_brower_command() -> PathBuf {
PathBuf::new()
}
}
pub mod cache {
use std::path::Path;
/// Credential cache file name
pub const CREDENTIAL_FILE: &str = "credential.json";
pub const GH_NOTIFICATION_FILTER_OPTION_FILE: &str = "gh_notification_filter_options.json";
pub fn dir() -> &'static Path {
super::project_dirs().cache_dir()
}
}
pub(crate) mod github {
use std::time::Duration;
/// GitHub pagination rest api is 1 origin
pub(crate) const INITIAL_PAGE_NUM: u8 = 1;
pub(crate) const NOTIFICATION_PER_PAGE: u8 = 40;
pub(crate) const CLIENT_TIMEOUT: Duration = Duration::from_secs(30);
}
pub(crate) mod theme {
use crate::cli::Palette;
pub(crate) const DEFAULT_PALETTE: Palette = Palette::Ferra;
}
pub fn log_path() -> PathBuf {
project_dirs().data_dir().join("synd.log")
}
pub fn config_path() -> PathBuf {
project_dirs().config_dir().join("config.toml")
}
fn project_dirs() -> &'static ProjectDirs {
static PROJECT_DIRS: OnceLock<ProjectDirs> = OnceLock::new();
PROJECT_DIRS.get_or_init(|| {
// Prioritizing consistency with Linux, the qualifier and organization have not been specified
ProjectDirs::from("", "", "syndicationd").expect("Failed to get project dirs")
})
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/config/resolver.rs | crates/synd_term/src/config/resolver.rs | use std::{
io::{self, ErrorKind},
path::PathBuf,
time::Duration,
};
use synd_stdx::{
conf::Entry,
fs::{FileSystem, fsimpl},
};
use thiserror::Error;
use url::Url;
use crate::{
cli::{self, ApiOptions, FeedOptions, GithubOptions},
config::{
self, Categories,
file::{ConfigFile, ConfigFileError},
},
ui::theme::Palette,
};
/// `ConfigResolver` is responsible for resolving the application's configration
/// while taking priority into account.
/// Specifically, it takes the following elements into account
/// with the first elements having the highest priority
/// * command line arguments
/// * environment variables
/// * configuration file
/// * default values
#[derive(Debug)]
pub struct ConfigResolver {
config_file: PathBuf,
log_file: Entry<PathBuf>,
cache_dir: Entry<PathBuf>,
api_endpoint: Entry<Url>,
api_timeout: Entry<Duration>,
feed_entries_limit: Entry<usize>,
feed_browser_command: Entry<PathBuf>,
feed_browser_args: Entry<Vec<String>>,
github_enable: Entry<bool>,
github_pat: Entry<String>,
palette: Entry<Palette>,
categories: Categories,
}
impl ConfigResolver {
pub fn builder() -> ConfigResolverBuilder {
ConfigResolverBuilder::default()
}
pub fn config_file(&self) -> PathBuf {
self.config_file.clone()
}
pub fn log_file(&self) -> PathBuf {
self.log_file.resolve_ref().clone()
}
pub fn cache_dir(&self) -> PathBuf {
self.cache_dir.resolve_ref().clone()
}
pub fn api_endpoint(&self) -> Url {
self.api_endpoint.resolve_ref().clone()
}
pub fn api_timeout(&self) -> Duration {
self.api_timeout.resolve()
}
pub fn feed_entries_limit(&self) -> usize {
self.feed_entries_limit.resolve()
}
pub fn feed_browser_command(&self) -> PathBuf {
self.feed_browser_command.resolve_ref().clone()
}
pub fn feed_browser_args(&self) -> Vec<String> {
self.feed_browser_args.resolve_ref().clone()
}
pub fn is_github_enable(&self) -> bool {
self.github_enable.resolve()
}
pub fn github_pat(&self) -> String {
self.github_pat.resolve_ref().clone()
}
pub fn palette(&self) -> Palette {
self.palette.resolve_ref().clone()
}
pub fn categories(&self) -> Categories {
self.categories.clone()
}
}
impl ConfigResolver {
/// performs validation based on the relationshsips between the various settings.
fn validate(self) -> Result<Self, ConfigResolverBuildError> {
if self.github_enable.resolve() && self.github_pat.resolve_ref().is_empty() {
return Err(ConfigResolverBuildError::ValidateConfigFile(
"github pat is required for github feature".into(),
));
}
Ok(self)
}
}
#[derive(Error, Debug)]
pub enum ConfigResolverBuildError {
#[error("failed to open {path} {err}")]
ConfigFileOpen { path: String, err: io::Error },
#[error(transparent)]
ConfigFileLoad(#[from] ConfigFileError),
#[error("invalid configration: {0}")]
ValidateConfigFile(String),
}
#[derive(Default)]
pub struct ConfigResolverBuilder<FS = fsimpl::FileSystem> {
config_file: Option<PathBuf>,
log_file_flag: Option<PathBuf>,
cache_dir_flag: Option<PathBuf>,
api_flags: Option<ApiOptions>,
feed_flags: Option<FeedOptions>,
github_flags: Option<GithubOptions>,
palette_flag: Option<cli::Palette>,
fs: FS,
}
impl ConfigResolverBuilder {
#[must_use]
pub fn config_file(self, config_file: Option<PathBuf>) -> Self {
Self {
config_file,
..self
}
}
#[must_use]
pub fn log_file(self, log_file_flag: Option<PathBuf>) -> Self {
Self {
log_file_flag,
..self
}
}
#[must_use]
pub fn cache_dir(self, cache_dir_flag: Option<PathBuf>) -> Self {
Self {
cache_dir_flag,
..self
}
}
#[must_use]
pub fn api_options(self, api_options: ApiOptions) -> Self {
Self {
api_flags: Some(api_options),
..self
}
}
#[must_use]
pub fn feed_options(self, feed_options: FeedOptions) -> Self {
Self {
feed_flags: Some(feed_options),
..self
}
}
#[must_use]
pub fn github_options(self, github_options: GithubOptions) -> Self {
Self {
github_flags: Some(github_options),
..self
}
}
#[must_use]
pub fn palette(self, palette: Option<cli::Palette>) -> Self {
Self {
palette_flag: palette,
..self
}
}
pub fn build(self) -> ConfigResolver {
self.try_build().expect("failed to build config resolver")
}
pub fn try_build(self) -> Result<ConfigResolver, ConfigResolverBuildError> {
let (mut config_file, config_path) = if let Some(path) = self.config_file {
// If a configuration file path is explicitly specified, search for that file
// and return an error if it is not found.
match self.fs.open_file(&path) {
Ok(f) => (Some(ConfigFile::new(f)?), path),
Err(err) => {
return Err(ConfigResolverBuildError::ConfigFileOpen {
path: path.display().to_string(),
err,
});
}
}
// If the path is not specified, builder search for the default path
// but will not return an error even if it is not found.
} else {
let default_path = config::config_path();
match self.fs.open_file(&default_path) {
Ok(f) => (Some(ConfigFile::new(f)?), default_path),
Err(err) => match err.kind() {
ErrorKind::NotFound => {
tracing::debug!(path = %default_path.display(), "default config file not found");
(None, default_path)
}
_ => {
return Err(ConfigResolverBuildError::ConfigFileOpen {
path: default_path.display().to_string(),
err,
});
}
},
}
};
// construct categories
let mut categories = Categories::default_toml();
if let Some(user_defined) = config_file.as_mut().and_then(|c| c.categories.take()) {
categories.merge(user_defined);
}
let ConfigResolverBuilder {
api_flags:
Some(ApiOptions {
endpoint,
client_timeout,
}),
feed_flags:
Some(FeedOptions {
entries_limit,
browser,
browser_args,
}),
github_flags:
Some(GithubOptions {
enable_github_notification,
github_pat,
}),
log_file_flag,
cache_dir_flag,
palette_flag,
..
} = self
else {
panic!()
};
let resolver = ConfigResolver {
config_file: config_path,
log_file: Entry::with_default(config::log_path())
.with_file(
config_file
.as_mut()
.and_then(|c| c.log.as_mut())
.and_then(|log| log.path.take()),
)
.with_flag(log_file_flag),
cache_dir: Entry::with_default(config::cache::dir().to_owned())
.with_file(
config_file
.as_mut()
.and_then(|c| c.cache.as_mut())
.and_then(|cache| cache.directory.take()),
)
.with_flag(cache_dir_flag),
api_endpoint: Entry::with_default(Url::parse(config::api::ENDPOINT).unwrap())
.with_file(
config_file
.as_mut()
.and_then(|c| c.api.as_mut())
.and_then(|api| api.endpoint.take()),
)
.with_flag(endpoint),
api_timeout: Entry::with_default(config::client::DEFAULT_TIMEOUT)
.with_file(
config_file
.as_mut()
.and_then(|c| c.api.as_mut())
.and_then(|api| api.timeout.take()),
)
.with_flag(client_timeout),
feed_entries_limit: Entry::with_default(config::feed::DEFAULT_ENTRIES_LIMIT)
.with_file(
config_file
.as_mut()
.and_then(|c| c.feed.as_mut())
.and_then(|feed| feed.entries_limit),
)
.with_flag(entries_limit),
feed_browser_command: Entry::with_default(config::feed::default_brower_command())
.with_file(
config_file
.as_mut()
.and_then(|c| c.feed.as_mut())
.and_then(|feed| feed.browser.as_mut())
.and_then(|brower| brower.command.take()),
)
.with_flag(browser),
feed_browser_args: Entry::with_default(Vec::new())
.with_file(
config_file
.as_mut()
.and_then(|c| c.feed.as_mut())
.and_then(|feed| feed.browser.as_mut())
.and_then(|brower| brower.args.take()),
)
.with_flag(browser_args),
github_enable: Entry::with_default(false)
.with_file(
config_file
.as_mut()
.and_then(|c| c.github.as_mut())
.and_then(|gh| gh.enable.take()),
)
.with_flag(enable_github_notification),
github_pat: Entry::with_default(String::new())
.with_file(
config_file
.as_mut()
.and_then(|c| c.github.as_mut())
.and_then(|gh| gh.pat.take()),
)
.with_flag(github_pat),
palette: Entry::with_default(config::theme::DEFAULT_PALETTE.into())
.with_file(
config_file
.as_mut()
.and_then(|c| c.theme.as_mut())
.and_then(|theme| theme.name.take())
.map(Into::into),
)
.with_flag(palette_flag.map(Into::into)),
categories,
};
resolver.validate()
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/cli/port.rs | crates/synd_term/src/cli/port.rs | use std::{path::PathBuf, time::Duration};
use anyhow::anyhow;
use url::Url;
use crate::{
application::{Cache, Clock, JwtService, SystemClock},
auth,
client::synd_api::Client,
};
pub(super) struct PortContext {
pub(super) client: Client,
}
impl PortContext {
pub(super) async fn new(endpoint: Url, cache_dir: PathBuf) -> anyhow::Result<Self> {
let mut client = Client::new(endpoint, Duration::from_secs(10))?;
let jwt_service = JwtService::new();
let cache = Cache::new(cache_dir);
let restore = auth::Restore {
jwt_service: &jwt_service,
cache: &cache,
now: SystemClock.now(),
persist_when_refreshed: false,
};
let credential = restore
.restore()
.await
.map_err(|_| anyhow!("You are not authenticated, try login in first"))?;
client.set_credential(credential);
Ok(Self { client })
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/cli/mod.rs | crates/synd_term/src/cli/mod.rs | use std::{path::PathBuf, time::Duration};
use clap::{Parser, Subcommand};
use serde::{Deserialize, Serialize};
use url::Url;
use crate::{config, ui::theme};
mod command;
mod port;
#[derive(Copy, Clone, PartialEq, Eq, Debug, clap::ValueEnum, Serialize, Deserialize)]
#[serde(rename_all(deserialize = "kebab-case"))]
pub enum Palette {
Dracula,
Eldritch,
Ferra,
SolarizedDark,
Helix,
}
impl From<Palette> for theme::Palette {
fn from(p: Palette) -> Self {
match p {
Palette::Dracula => theme::Palette::dracula(),
Palette::Eldritch => theme::Palette::eldritch(),
Palette::Ferra => theme::Palette::ferra(),
Palette::SolarizedDark => theme::Palette::solarized_dark(),
Palette::Helix => theme::Palette::helix(),
}
}
}
#[derive(Parser, Debug)]
#[command(version, propagate_version = true, name = "synd")]
pub struct Args {
/// Configuration file path
#[arg(long, short = 'c', env = config::env::CONFIG_FILE)]
pub config: Option<PathBuf>,
/// Log file path
#[arg(long, env = config::env::LOG_FILE)]
pub log: Option<PathBuf>,
/// Cache directory
#[arg(long, env = config::env::CACHE_DIR)]
pub cache_dir: Option<PathBuf>,
/// Color theme
#[arg(value_enum, long = "theme", env = config::env::THEME, value_name = "THEME")]
pub palette: Option<Palette>,
#[command(subcommand)]
pub command: Option<Command>,
#[command(flatten)]
pub api: ApiOptions,
#[command(flatten)]
pub feed: FeedOptions,
#[command(flatten)]
pub github: GithubOptions,
#[arg(hide = true, long = "dry-run", hide_long_help = true)]
pub dry_run: bool,
}
#[derive(clap::Args, Debug)]
#[command(next_help_heading = "Api options")]
pub struct ApiOptions {
/// `synd_api` endpoint
#[arg(long, global = true, env = config::env::ENDPOINT)]
pub endpoint: Option<Url>,
/// Client timeout(ex. 30s)
#[arg(long, value_parser = config::parse::flag::parse_duration_opt, env = config::env::CLIENT_TIMEOUT)]
pub client_timeout: Option<Duration>,
}
#[derive(clap::Args, Debug)]
#[command(next_help_heading = "Feed options")]
pub struct FeedOptions {
/// Feed entries limit to fetch
#[arg(long, aliases = ["max-entries"], env = config::env::FEED_ENTRIES_LIMIT)]
pub entries_limit: Option<usize>,
/// Browser command to open feed entry
#[arg(long, env = config::env::FEED_BROWSER)]
pub browser: Option<PathBuf>,
/// Args for launching the browser command
#[arg(long, env = config::env::FEED_BROWSER_ARGS)]
pub browser_args: Option<Vec<String>>,
}
#[derive(clap::Args, Debug)]
#[command(next_help_heading = "GitHub options")]
pub struct GithubOptions {
/// Enable GitHub notification feature
#[arg(
long,
short = 'G',
visible_alias = "enable-gh",
env = config::env::ENABLE_GITHUB,
)]
pub enable_github_notification: Option<bool>,
/// GitHub personal access token to fetch notifications
#[arg(
long,
env = config::env::GITHUB_PAT,
hide_env_values = true,
)]
pub github_pat: Option<String>,
}
#[derive(Subcommand, Debug)]
pub enum Command {
#[command(alias = "clear")]
Clean(command::clean::CleanCommand),
Check(command::check::CheckCommand),
Export(command::export::ExportCommand),
Import(command::import::ImportCommand),
Config(command::config::ConfigCommand),
}
pub fn parse() -> Args {
Args::parse()
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/cli/command/check.rs | crates/synd_term/src/cli/command/check.rs | use std::{io, path::Path, process::ExitCode, time::Duration};
use anyhow::Context;
use clap::Args;
use synd_o11y::health_check::Health;
use crate::{client::synd_api::Client, config::ConfigResolver};
#[derive(Copy, Clone, PartialEq, Eq, Debug, clap::ValueEnum)]
pub enum CheckFormat {
Human,
Json,
}
/// Check application conditions
#[derive(Args, Debug)]
pub struct CheckCommand {
#[arg(value_enum, long, default_value_t = CheckFormat::Human)]
pub format: CheckFormat,
}
impl CheckCommand {
#[allow(clippy::unused_self)]
pub async fn run(self, config: ConfigResolver) -> ExitCode {
if let Err(err) = self.check(config).await {
tracing::error!("{err:?}");
ExitCode::from(1)
} else {
ExitCode::SUCCESS
}
}
async fn check(self, config: ConfigResolver) -> anyhow::Result<()> {
let Self { format } = self;
let client = Client::new(config.api_endpoint(), Duration::from_secs(10))?;
let api_health = client
.health()
.await
.context("api health check")
.inspect_err(|err| eprintln!("{err:?}"))
.ok();
let cache_dir = config.cache_dir();
let log_path = config.log_file();
let config_path = config.config_file();
match format {
CheckFormat::Human => {
Self::print(
io::stdout(),
api_health,
&config_path,
&cache_dir,
log_path.as_path(),
)?;
}
CheckFormat::Json => {
let health = match api_health {
Some(health) => serde_json::json!(&health),
None => serde_json::json!("unknown"),
};
println!(
"{}",
serde_json::json!({
"api": health,
"config": config_path.display().to_string(),
"cache": cache_dir.display().to_string(),
"log": log_path.display().to_string(),
})
);
}
}
Ok(())
}
fn print(
mut writer: impl io::Write,
health: Option<Health>,
config_path: &Path,
cache_dir: &Path,
log_path: &Path,
) -> io::Result<()> {
let w = &mut writer;
writeln!(
w,
" Api Health: {}",
health
.as_ref()
.map_or("unknown".into(), |h| h.status.to_string())
)?;
writeln!(
w,
"Api Version: {}",
health.and_then(|h| h.version).unwrap_or("unknown".into())
)?;
writeln!(w, " Config: {}", config_path.display())?;
writeln!(w, " Cache: {}", cache_dir.display())?;
writeln!(w, " Log: {}", log_path.display())?;
Ok(())
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/cli/command/mod.rs | crates/synd_term/src/cli/command/mod.rs | pub mod check;
pub mod clean;
pub mod config;
pub mod export;
pub mod import;
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/cli/command/export.rs | crates/synd_term/src/cli/command/export.rs | use std::{path::PathBuf, process::ExitCode};
use clap::Args;
use schemars::JsonSchema;
use serde::Serialize;
use url::Url;
use crate::{cli::port::PortContext, config, types::ExportedFeed};
#[derive(Serialize, JsonSchema)]
struct Export {
feeds: Vec<ExportedFeed>,
}
/// Export subscribed feeds
#[derive(Args, Debug)]
pub struct ExportCommand {
/// Print exported data json schema
#[arg(
long,
default_value_t = false,
action = clap::ArgAction::SetTrue,
visible_alias = "print-json-schema",
)]
print_schema: bool,
/// Cache directory
#[arg(
long,
default_value = config::cache::dir().to_path_buf().into_os_string(),
)]
cache_dir: PathBuf,
}
impl ExportCommand {
pub async fn run(self, endpoint: Url) -> ExitCode {
let err = if self.print_schema {
Self::print_json_schema()
} else {
self.export(endpoint).await
};
if let Err(err) = err {
tracing::error!("{err:?}");
ExitCode::from(1)
} else {
ExitCode::SUCCESS
}
}
fn print_json_schema() -> anyhow::Result<()> {
let schema = schemars::schema_for!(Export);
serde_json::to_writer_pretty(std::io::stdout(), &schema).map_err(anyhow::Error::from)
}
async fn export(self, endpoint: Url) -> anyhow::Result<()> {
let cx = PortContext::new(endpoint, self.cache_dir).await?;
let mut after = None;
let mut exported_feeds = Vec::new();
loop {
let response = cx.client.export_subscription(after.take(), 50).await?;
exported_feeds.extend(response.feeds);
if !response.page_info.has_next_page {
break;
}
after = response.page_info.end_cursor;
}
let output = Export {
feeds: exported_feeds,
};
serde_json::to_writer_pretty(std::io::stdout(), &output)?;
Ok(())
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/cli/command/import.rs | crates/synd_term/src/cli/command/import.rs | use std::{
io,
path::{Path, PathBuf},
process::ExitCode,
time::Duration,
};
use clap::Args;
use either::Either;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use url::Url;
use crate::{
cli::port::PortContext,
client::synd_api::{
Client, SubscribeFeedError, SyndApiError, mutation::subscribe_feed::SubscribeFeedInput,
},
config,
types::{self, ExportedFeed},
};
#[derive(Serialize, Deserialize, JsonSchema)]
struct Input {
feeds: Vec<ExportedFeed>,
}
/// Import subscribed feeds
#[derive(Args, Debug)]
pub struct ImportCommand {
/// Print json schema for import data
#[arg(
long,
default_value_t = false,
action = clap::ArgAction::SetTrue,
visible_alias = "print-json-schema",
)]
print_schema: bool,
/// Cache directory
#[arg(
long,
default_value = config::cache::dir().to_path_buf().into_os_string(),
)]
cache_dir: PathBuf,
/// Path to input file, '-' means stdin.
#[arg()]
input: Option<PathBuf>,
}
impl ImportCommand {
pub async fn run(self, endpoint: Url) -> ExitCode {
let err = if self.print_schema {
Self::print_json_schema()
} else {
self.import(endpoint).await
};
if let Err(err) = err {
tracing::error!("{err:?}");
ExitCode::from(1)
} else {
ExitCode::SUCCESS
}
}
fn print_json_schema() -> anyhow::Result<()> {
let schema = schemars::schema_for!(Input);
serde_json::to_writer_pretty(std::io::stdout(), &schema).map_err(anyhow::Error::from)
}
async fn import(self, endpoint: Url) -> anyhow::Result<()> {
let input = match self.input {
Some(input) => Self::read_input(input.as_path())?,
None => {
anyhow::bail!("input file path required")
}
};
let cx = PortContext::new(endpoint, self.cache_dir).await?;
let import = Import {
client: cx.client,
input,
out: io::stdout(),
interval: Duration::from_millis(500),
};
import.import().await
}
fn read_input(path: &Path) -> anyhow::Result<Input> {
let src = if path == Path::new("-") {
Either::Left(std::io::stdin())
} else {
Either::Right(std::fs::File::open(path)?)
};
serde_json::from_reader(src).map_err(anyhow::Error::from)
}
}
#[cfg_attr(test, mockall::automock)]
trait SubscribeFeed {
async fn subscribe_feed(&self, input: SubscribeFeedInput) -> Result<types::Feed, SyndApiError>;
}
impl SubscribeFeed for Client {
async fn subscribe_feed(&self, input: SubscribeFeedInput) -> Result<types::Feed, SyndApiError> {
Client::subscribe_feed(self, input).await
}
}
/// Represents import process
struct Import<Client, Out> {
client: Client,
input: Input,
out: Out,
interval: Duration,
}
impl<Client, Out> Import<Client, Out>
where
Client: SubscribeFeed,
Out: io::Write,
{
async fn import(self) -> anyhow::Result<()> {
let Import {
client,
input,
mut out,
interval,
} = self;
let max_category_width = input
.feeds
.iter()
.map(|f| {
f.category
.as_ref()
.map_or(0, |c| c.as_str().chars().count())
})
.max()
.unwrap_or(0);
let feeds_count = input.feeds.len();
let mut ok: usize = 0;
let mut interval = tokio::time::interval(interval);
for feed in input.feeds {
interval.tick().await;
let url = feed.url.clone();
match client.subscribe_feed(SubscribeFeedInput::from(feed)).await {
Ok(imported) => {
writeln!(
&mut out,
"OK {req:<6} {category:<cat_width$} {url}",
req = imported.requirement(),
category = imported.category(),
cat_width = max_category_width,
url = imported.url,
)?;
ok = ok.saturating_add(1);
}
Err(SyndApiError::SubscribeFeed(SubscribeFeedError::FeedUnavailable {
feed_url,
message,
})) => {
writeln!(&mut out, "ERROR {feed_url} {message}",)?;
}
Err(err) => {
writeln!(&mut out, "ERROR {url} {err}")?;
}
}
}
writeln!(&mut out, "{ok}/{feeds_count} feeds successfully subscribed")?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use std::time::{Duration, Instant};
use super::*;
use fake::{Fake as _, Faker};
use synd_feed::types::{Category, FeedUrl, Requirement};
#[tokio::test]
async fn usecase() {
let url_ok1: FeedUrl = "https://ok1.ymgyt.io/feed.xml".try_into().unwrap();
let url_ok2: FeedUrl = "https://ok2.ymgyt.io/feed.xml".try_into().unwrap();
let url_unavailable: FeedUrl = "https://err_unavailable.ymgyt.io/feed.xml"
.try_into()
.unwrap();
let cat_rust = Category::new("rust").unwrap();
let cat_long = Category::new("longcategory").unwrap();
let input = Input {
feeds: vec![
ExportedFeed {
title: Some(String::from("ok1")),
url: url_ok1.clone(),
requirement: Some(Requirement::Must),
category: Some(cat_rust.clone()),
},
ExportedFeed {
title: Some(String::from("err unuvailable")),
url: url_unavailable.clone(),
requirement: Some(Requirement::Must),
category: Some(cat_rust.clone()),
},
ExportedFeed {
title: Some(String::from("ok2")),
url: url_ok2.clone(),
requirement: Some(Requirement::Should),
category: Some(cat_long.clone()),
},
],
};
let base_feed: types::Feed = Faker.fake();
let interval = Duration::from_millis(100);
let mut prev = None;
let mut client = MockSubscribeFeed::new();
client.expect_subscribe_feed().returning(move |input| {
let now = Instant::now();
if let Some(prev) = prev {
assert!(
// Dut to insability in the CI execution
// the interval assertion has been relaxed
now.duration_since(prev) >= (interval - Duration::from_millis(50)),
"the interval between requests is too short"
);
}
prev = Some(now);
match input.url.as_str() {
"https://ok1.ymgyt.io/feed.xml" => Ok(base_feed
.clone()
.with_url(url_ok1.clone())
.with_requirement(Requirement::Must)
.with_category(cat_rust.clone())),
"https://ok2.ymgyt.io/feed.xml" => Ok(base_feed
.clone()
.with_url(url_ok2.clone())
.with_requirement(Requirement::Should)
.with_category(cat_long.clone())),
"https://err_unavailable.ymgyt.io/feed.xml" => Err(SyndApiError::SubscribeFeed(
SubscribeFeedError::FeedUnavailable {
feed_url: url_unavailable.clone(),
message: "server return 500 error".into(),
},
)),
_ => panic!(),
}
});
let mut out = Vec::new();
let import = Import {
client,
input,
out: &mut out,
interval,
};
import.import().await.unwrap();
let buf = String::from_utf8_lossy(out.as_slice());
insta::with_settings!({
description => "import command output"
}, {
insta::assert_snapshot!("import_usecase",buf);
});
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/cli/command/clean.rs | crates/synd_term/src/cli/command/clean.rs | use std::{
io::ErrorKind,
path::{Path, PathBuf},
process::ExitCode,
};
use anyhow::Context;
use clap::Args;
use synd_stdx::fs::FileSystem;
use crate::{application::Cache, config};
/// Clean cache and logs
#[derive(Args, Debug)]
pub struct CleanCommand {
/// Cache directory
#[arg(
long,
default_value = config::cache::dir().to_path_buf().into_os_string(),
)]
cache_dir: PathBuf,
}
impl CleanCommand {
#[allow(clippy::unused_self)]
pub fn run<FS>(self, fs: &FS) -> ExitCode
where
FS: FileSystem + Clone,
{
ExitCode::from(self.clean(fs, config::log_path().as_path()))
}
fn clean<FS>(self, fs: &FS, log: &Path) -> u8
where
FS: FileSystem + Clone,
{
if let Err(err) = self.try_clean(fs, log) {
tracing::error!("{err}");
1
} else {
0
}
}
fn try_clean<FS>(self, fs: &FS, log: &Path) -> anyhow::Result<()>
where
FS: FileSystem + Clone,
{
let CleanCommand { cache_dir } = self;
let cache = Cache::with(&cache_dir, fs.clone());
cache
.clean()
.map_err(anyhow::Error::from)
.with_context(|| format!("path: {}", cache_dir.display()))?;
// remove log
match fs.remove_file(log) {
Ok(()) => {
tracing::info!("Remove {}", log.display());
}
Err(err) => match err.kind() {
ErrorKind::NotFound => {}
_ => {
return Err(anyhow::Error::from(err))
.with_context(|| format!("path: {}", log.display()));
}
},
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use std::io;
use synd_stdx::fs::fsimpl;
use tempfile::{NamedTempFile, TempDir};
use crate::filesystem::mock::MockFileSystem;
use super::*;
#[test]
fn remove_log_file() {
let clean = CleanCommand {
cache_dir: TempDir::new().unwrap().keep(),
};
let log_file = NamedTempFile::new().unwrap();
let exit_code = clean.clean(&fsimpl::FileSystem::new(), log_file.path());
assert_eq!(exit_code, 0);
assert!(!log_file.path().exists());
}
#[test]
fn ignore_log_file_not_found() {
let clean = CleanCommand {
cache_dir: TempDir::new().unwrap().keep(),
};
let log_file = Path::new("./not_exists");
let fs = MockFileSystem::default().with_remove_errors(log_file, io::ErrorKind::NotFound);
let exit_code = clean.clean(&fs, log_file);
assert_eq!(exit_code, 0);
}
#[test]
fn exit_code_on_permission_error() {
let clean = CleanCommand {
cache_dir: TempDir::new().unwrap().keep(),
};
let log_file = Path::new("./not_allowed");
let fs =
MockFileSystem::default().with_remove_errors(log_file, io::ErrorKind::PermissionDenied);
let exit_code = clean.clean(&fs, log_file);
assert_eq!(exit_code, 1);
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/cli/command/config/mod.rs | crates/synd_term/src/cli/command/config/mod.rs | use std::process::ExitCode;
use clap::{Args, Subcommand};
mod init;
/// Manage configurations
#[derive(Args, Debug)]
pub struct ConfigCommand {
#[command(subcommand)]
pub command: ConfigSubcommand,
}
#[derive(Subcommand, Debug)]
pub enum ConfigSubcommand {
Init(init::ConfigInitCommand),
}
impl ConfigCommand {
pub fn run(self) -> ExitCode {
let ConfigCommand { command } = self;
match command {
ConfigSubcommand::Init(init) => init.run(),
}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/cli/command/config/init.rs | crates/synd_term/src/cli/command/config/init.rs | use std::process::ExitCode;
use clap::Args;
use crate::config;
/// Print configuration template
#[derive(Args, Debug)]
pub struct ConfigInitCommand {}
impl ConfigInitCommand {
#[allow(clippy::unused_self)]
pub fn run(self) -> ExitCode {
print!("{}", config::INIT_CONFIG.trim_start().trim_end());
ExitCode::SUCCESS
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/client/mod.rs | crates/synd_term/src/client/mod.rs | pub mod github;
pub mod synd_api;
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/client/github/mod.rs | crates/synd_term/src/client/github/mod.rs | use graphql_client::GraphQLQuery;
use octocrab::Octocrab;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use crate::{
config,
types::github::{
IssueContext, IssueId, Notification, NotificationContext, NotificationId,
PullRequestContext, PullRequestId, RepositoryKey, ThreadId,
},
};
#[derive(Debug, Error)]
pub enum GithubError {
#[error("invalid credential. please make sure a valid PAT is set")]
BadCredential,
// https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api?apiVersion=2022-11-28#about-secondary-rate-limits
#[error("secondary rate limits exceeded")]
SecondaryRateLimit,
#[error("not found: {0}")]
NotFound(String),
#[error("graphql: {0:?}")]
Graphql(Vec<graphql_client::Error>),
#[error("github api error: {0}")]
Api(Box<octocrab::Error>),
}
impl From<octocrab::Error> for GithubError {
fn from(err: octocrab::Error) -> Self {
match &err {
octocrab::Error::GitHub { source, .. } => match source.status_code.as_u16() {
401 => GithubError::BadCredential,
403 if source.message.contains("secondary rate limit") => {
GithubError::SecondaryRateLimit
}
_ => GithubError::Api(Box::new(err)),
},
_ => GithubError::Api(Box::new(err)),
}
}
}
impl From<Vec<graphql_client::Error>> for GithubError {
fn from(err: Vec<graphql_client::Error>) -> Self {
GithubError::Graphql(err)
}
}
#[derive(Clone)]
pub struct GithubClient {
client: Octocrab,
}
impl GithubClient {
pub fn new(pat: impl Into<String>) -> Result<Self, GithubError> {
let pat = pat.into();
if pat.is_empty() {
return Err(GithubError::BadCredential);
}
let timeout = Some(config::github::CLIENT_TIMEOUT);
let octo = Octocrab::builder()
.personal_token(pat)
.set_connect_timeout(timeout)
.set_read_timeout(timeout)
.set_write_timeout(timeout)
.build()
.unwrap();
Ok(Self::with(octo))
}
#[must_use]
pub fn with(client: Octocrab) -> Self {
Self { client }
}
pub(crate) async fn mark_thread_as_done(&self, id: NotificationId) -> Result<(), GithubError> {
self.client
.activity()
.notifications()
.mark_as_read(id)
.await
.map_err(GithubError::from)
}
pub(crate) async fn unsubscribe_thread(&self, id: ThreadId) -> Result<(), GithubError> {
// The reasons for not using the `set_thread_subscription` method of `NotificationHandler` are twofold:
// 1. Since the API require the PUT method, but it is implemented using GET, it results in a "Not found" error.
// 2. During the deserialization of the `ThreadSubscription` response type, an empty string is assigned to the reason, causing an error when deserializing the `Reason` enum.
// https://github.com/XAMPPRocky/octocrab/pull/661
#[derive(serde::Serialize)]
struct Inner {
ignored: bool,
}
#[derive(serde::Deserialize)]
struct Response {}
let thread = id;
let ignored = true;
let route = format!("/notifications/threads/{thread}/subscription");
let body = Inner { ignored };
self.client
.put::<Response, _, _>(route, Some(&body))
.await?;
Ok(())
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)]
pub(crate) enum FetchNotificationInclude {
/// Fetch only unread notifications
OnlyUnread,
All,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)]
pub(crate) enum FetchNotificationParticipating {
/// Fetch only participating notifications
OnlyParticipating,
All,
}
#[derive(Debug, Clone)]
pub(crate) struct FetchNotificationsParams {
pub(crate) page: u8,
pub(crate) include: FetchNotificationInclude,
pub(crate) participating: FetchNotificationParticipating,
}
impl GithubClient {
#[tracing::instrument(skip(self))]
pub(crate) async fn fetch_notifications(
&self,
FetchNotificationsParams {
page,
include,
participating,
}: FetchNotificationsParams,
) -> Result<Vec<Notification>, GithubError> {
let mut page = self
.client
.activity()
.notifications()
.list()
.participating(participating == FetchNotificationParticipating::OnlyParticipating)
.all(include == FetchNotificationInclude::All)
.page(page) // 1 Origin
.per_page(config::github::NOTIFICATION_PER_PAGE)
.send()
.await?;
let notifications: Vec<_> = page
.take_items()
.into_iter()
.map(Notification::from)
.collect();
tracing::debug!(
"Fetch {} github notifications: {page:?}",
notifications.len()
);
Ok(notifications)
}
}
#[derive(GraphQLQuery)]
#[graphql(
schema_path = "src/client/github/schema.json",
query_path = "src/client/github/issue_query.gql",
variables_derives = "Clone, Debug",
response_derives = "Clone, Debug"
)]
pub(crate) struct IssueQuery;
impl GithubClient {
pub(crate) async fn fetch_issue(
&self,
NotificationContext {
id,
repository_key: RepositoryKey { name, owner },
..
}: NotificationContext<IssueId>,
) -> Result<IssueContext, GithubError> {
let response: octocrab::Result<graphql_client::Response<issue_query::ResponseData>> = self
.client
.graphql(&IssueQuery::build_query(issue_query::Variables {
repository_owner: owner,
repository_name: name,
issue_number: id.into_inner(),
}))
.await;
match response {
Ok(response) => match (response.data, response.errors) {
(_, Some(errors)) => {
tracing::error!("{errors:?}");
Err(err::handle_gql_error(errors))
}
(Some(data), _) => Ok(IssueContext::from(data)),
_ => unreachable!(),
},
Err(error) => Err(GithubError::from(error)),
}
}
}
#[derive(GraphQLQuery)]
#[graphql(
schema_path = "src/client/github/schema.json",
query_path = "src/client/github/pull_request_query.gql",
variables_derives = "Clone, Debug",
response_derives = "Clone, Debug"
)]
pub(crate) struct PullRequestQuery;
impl GithubClient {
pub(crate) async fn fetch_pull_request(
&self,
NotificationContext {
id,
repository_key: RepositoryKey { name, owner },
..
}: NotificationContext<PullRequestId>,
) -> Result<PullRequestContext, GithubError> {
let response: octocrab::Result<graphql_client::Response<pull_request_query::ResponseData>> =
self.client
.graphql(&PullRequestQuery::build_query(
pull_request_query::Variables {
repository_owner: owner,
repository_name: name,
pull_request_number: id.into_inner(),
},
))
.await;
match response {
Ok(response) => match (response.data, response.errors) {
(_, Some(errors)) => {
tracing::error!("{errors:?}");
Err(err::handle_gql_error(errors))
}
(Some(data), _) => Ok(PullRequestContext::from(data)),
_ => unreachable!(),
},
Err(error) => Err(GithubError::from(error)),
}
}
}
mod err {
use graphql_client::PathFragment;
use crate::client::github::GithubError;
pub(super) fn handle_gql_error(mut errors: Vec<graphql_client::Error>) -> GithubError {
if errors.len() == 1 && is_repository_not_found(&errors[0]) {
return GithubError::NotFound(errors.swap_remove(0).message);
}
GithubError::from(errors)
}
pub(super) fn is_repository_not_found(err: &graphql_client::Error) -> bool {
err.message.starts_with("Could not resolve to a Repository")
&& err.path.as_ref().is_some_and(|p| {
p.iter()
.any(|p| matches!(p, PathFragment::Key(key) if key == "repository"))
})
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/client/synd_api/scalar.rs | crates/synd_term/src/client/synd_api/scalar.rs | pub type Category = synd_feed::types::Category<'static>;
pub type FeedUrl = synd_feed::types::FeedUrl;
pub type Rfc3339Time = String;
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/client/synd_api/mod.rs | crates/synd_term/src/client/synd_api/mod.rs | use std::{fmt::Debug, time::Duration};
use anyhow::anyhow;
use graphql_client::{GraphQLQuery, Response};
use reqwest::header::{self, HeaderValue};
use serde::{Serialize, de::DeserializeOwned};
use synd_o11y::{health_check::Health, opentelemetry::extension::*};
use thiserror::Error;
use tracing::Span;
use url::Url;
use crate::{
auth::{Credential, Verified},
client::synd_api::payload::ExportSubscriptionPayload,
config, types,
};
use self::query::subscription::SubscriptionOutput;
mod scalar;
pub use scalar::*;
#[path = "generated/mutation.rs"]
pub mod mutation;
pub mod payload;
#[path = "generated/query.rs"]
pub mod query;
#[derive(Error, Debug)]
pub enum SubscribeFeedError {
#[error("invalid feed url: `{feed_url}` ({message})`")]
InvalidFeedUrl { feed_url: FeedUrl, message: String },
#[error("{feed_url} {message}")]
FeedUnavailable { feed_url: FeedUrl, message: String },
}
#[derive(Error, Debug)]
pub enum SyndApiError {
#[error("unauthorized")]
Unauthorized { url: Option<Url> },
#[error(transparent)]
BuildRequest(#[from] reqwest::Error),
#[error("graphql error: {errors:?}")]
Graphql { errors: Vec<graphql_client::Error> },
#[error(transparent)]
SubscribeFeed(SubscribeFeedError),
#[error(transparent)]
Internal(#[from] anyhow::Error),
}
/// synd-api client
#[derive(Clone)]
pub struct Client {
#[expect(clippy::struct_field_names)]
client: reqwest::Client,
credential: Option<HeaderValue>,
endpoint: Url,
}
impl Client {
const GRAPHQL: &'static str = "/graphql";
const HEALTH_CHECK: &'static str = "/health";
pub fn new(endpoint: Url, timeout: Duration) -> anyhow::Result<Self> {
let client = reqwest::ClientBuilder::new()
.user_agent(config::client::USER_AGENT)
.timeout(timeout)
.connect_timeout(Duration::from_secs(10))
// this client specifically targets the syndicationd api, so accepts self signed certificates
.danger_accept_invalid_certs(true)
.build()?;
Ok(Self {
client,
endpoint,
credential: None,
})
}
pub(crate) fn set_credential(&mut self, cred: Verified<Credential>) {
let mut token = HeaderValue::try_from(match cred.into_inner() {
Credential::Github { access_token } => format!("github {access_token}"),
Credential::Google { id_token, .. } => format!("google {id_token}"),
})
.unwrap();
token.set_sensitive(true);
self.credential = Some(token);
}
#[tracing::instrument(skip(self))]
pub async fn fetch_subscription(
&self,
after: Option<String>,
first: Option<i64>,
) -> Result<SubscriptionOutput, SyndApiError> {
let var = query::subscription::Variables { after, first };
let request = query::Subscription::build_query(var);
let response: query::subscription::ResponseData = self.request(&request).await?;
Ok(response.output)
}
#[tracing::instrument(skip(self))]
pub async fn subscribe_feed(
&self,
input: mutation::subscribe_feed::SubscribeFeedInput,
) -> Result<types::Feed, SyndApiError> {
use crate::client::synd_api::mutation::subscribe_feed::ResponseCode;
let url = input.url.clone();
let var = mutation::subscribe_feed::Variables {
subscribe_input: input,
};
let request = mutation::SubscribeFeed::build_query(var);
let response: mutation::subscribe_feed::ResponseData = self.request(&request).await?;
match response.subscribe_feed {
mutation::subscribe_feed::SubscribeFeedSubscribeFeed::SubscribeFeedSuccess(success) => {
Ok(types::Feed::from(success.feed))
}
mutation::subscribe_feed::SubscribeFeedSubscribeFeed::SubscribeFeedError(err) => {
match err.status.code {
ResponseCode::OK => unreachable!(),
ResponseCode::INVALID_FEED_URL => Err(SyndApiError::SubscribeFeed(
SubscribeFeedError::InvalidFeedUrl {
feed_url: url,
message: err.message,
},
)),
ResponseCode::FEED_UNAVAILABLE => Err(SyndApiError::SubscribeFeed(
SubscribeFeedError::FeedUnavailable {
feed_url: url,
message: err.message,
},
)),
err_code => Err(SyndApiError::Internal(anyhow::anyhow!(
"Unexpected subscribe_feed error code: {err_code:?}"
))),
}
}
}
}
#[tracing::instrument(skip(self))]
pub async fn unsubscribe_feed(&self, url: FeedUrl) -> Result<(), SyndApiError> {
let var = mutation::unsubscribe_feed::Variables {
unsubscribe_input: mutation::unsubscribe_feed::UnsubscribeFeedInput { url },
};
let request = mutation::UnsubscribeFeed::build_query(var);
let response: mutation::unsubscribe_feed::ResponseData = self.request(&request).await?;
match response.unsubscribe_feed {
mutation::unsubscribe_feed::UnsubscribeFeedUnsubscribeFeed::UnsubscribeFeedSuccess(
_,
) => Ok(()),
mutation::unsubscribe_feed::UnsubscribeFeedUnsubscribeFeed::UnsubscribeFeedError(
err,
) => Err(SyndApiError::Internal(anyhow!(
"Failed to mutate unsubscribe_feed {err:?}"
))),
}
}
#[tracing::instrument(skip(self))]
pub async fn fetch_entries(
&self,
after: Option<String>,
first: i64,
) -> Result<payload::FetchEntriesPayload, SyndApiError> {
tracing::debug!("Fetch entries...");
let var = query::entries::Variables { after, first };
let request = query::Entries::build_query(var);
let response: query::entries::ResponseData = self.request(&request).await?;
tracing::debug!("Got response");
Ok(response.output.into())
}
#[tracing::instrument(skip(self))]
pub async fn export_subscription(
&self,
after: Option<String>,
first: i64,
) -> anyhow::Result<ExportSubscriptionPayload> {
let var = query::export_subscription::Variables { after, first };
let request = query::ExportSubscription::build_query(var);
let response: query::export_subscription::ResponseData = self.request(&request).await?;
Ok(response.output.into())
}
#[tracing::instrument(skip_all, err(Display))]
async fn request<Body, ResponseData>(&self, body: &Body) -> Result<ResponseData, SyndApiError>
where
Body: Serialize + Debug + ?Sized,
ResponseData: DeserializeOwned + Debug,
{
let mut request = self
.client
.post(self.endpoint.join(Self::GRAPHQL).unwrap())
.header(
header::AUTHORIZATION,
self.credential
.as_ref()
.expect("Credential not configured. this is a BUG")
.clone(),
)
.json(body)
.build()
.map_err(SyndApiError::BuildRequest)?;
synd_o11y::opentelemetry::http::inject_with_baggage(
&Span::current().context(),
request.headers_mut(),
std::iter::once(synd_o11y::request_id_key_value()),
);
tracing::debug!(url = request.url().as_str(), "Send request");
let response: Response<ResponseData> = self
.client
.execute(request)
.await?
.error_for_status()
.map_err(|err| match err.status().map(|s| s.as_u16()) {
Some(401) => SyndApiError::Unauthorized {
url: err.url().cloned(),
},
_ => SyndApiError::Internal(anyhow::Error::from(err)),
})?
.json()
.await?;
match (response.data, response.errors) {
(_, Some(errors)) if !errors.is_empty() => Err(SyndApiError::Graphql { errors }),
(Some(data), _) => Ok(data),
_ => Err(SyndApiError::Internal(anyhow!(
"Unexpected error. response does not contain data and errors"
))),
}
}
// call health check api
pub async fn health(&self) -> anyhow::Result<Health> {
self.client
.get(self.endpoint.join(Self::HEALTH_CHECK).unwrap())
.send()
.await?
.error_for_status()?
.json()
.await
.map_err(anyhow::Error::from)
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/client/synd_api/payload.rs | crates/synd_term/src/client/synd_api/payload.rs | use crate::{client::synd_api::query, types};
#[derive(Debug, Clone)]
pub struct FetchEntriesPayload {
pub entries: Vec<types::Entry>,
pub page_info: types::PageInfo,
}
impl From<query::entries::EntriesOutput> for FetchEntriesPayload {
fn from(v: query::entries::EntriesOutput) -> Self {
let page_info = v.entries.page_info.into();
let entries = v.entries.nodes.into_iter().map(Into::into).collect();
Self { entries, page_info }
}
}
pub struct ExportSubscriptionPayload {
pub feeds: Vec<types::ExportedFeed>,
pub page_info: types::PageInfo,
}
impl From<query::export_subscription::ExportSubscriptionOutput> for ExportSubscriptionPayload {
fn from(v: query::export_subscription::ExportSubscriptionOutput) -> Self {
Self {
feeds: v.feeds.nodes.into_iter().map(Into::into).collect(),
page_info: v.feeds.page_info.into(),
}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/client/synd_api/generated/mutation.rs | crates/synd_term/src/client/synd_api/generated/mutation.rs | #![allow(clippy::all, warnings)]
pub struct SubscribeFeed;
pub mod subscribe_feed {
#![allow(dead_code)]
use std::result::Result;
pub const OPERATION_NAME: &str = "SubscribeFeed";
pub const QUERY: &str = "mutation SubscribeFeed($subscribeInput: SubscribeFeedInput!) {\n subscribeFeed(input: $subscribeInput) {\n __typename\n ... on SubscribeFeedSuccess {\n feed {\n ...Feed\n }\n status {\n code\n }\n }\n ... on SubscribeFeedError {\n status {\n code\n }\n message\n }\n }\n}\n\nmutation UnsubscribeFeed($unsubscribeInput: UnsubscribeFeedInput!) {\n unsubscribeFeed(input: $unsubscribeInput) {\n __typename\n ... on UnsubscribeFeedSuccess {\n status {\n code\n }\n }\n ... on UnsubscribeFeedError {\n status {\n code\n }\n }\n }\n}\n\nfragment Feed on Feed {\n id\n type\n title\n url\n updated\n websiteUrl\n description\n generator\n requirement\n category\n entries(first: 20) {\n nodes {\n ...EntryMeta\n }\n }\n links {\n nodes {\n ...Link\n }\n }\n authors {\n nodes\n }\n}\n\nfragment EntryMeta on Entry {\n title\n published\n updated\n summary\n}\n\nfragment Link on Link {\n href\n rel\n mediaType\n title \n}\n";
use super::*;
use serde::{Deserialize, Serialize};
#[allow(dead_code)]
type Boolean = bool;
#[allow(dead_code)]
type Float = f64;
#[allow(dead_code)]
type Int = i64;
#[allow(dead_code)]
type ID = String;
type Category = crate::client::synd_api::scalar::Category;
type FeedUrl = crate::client::synd_api::scalar::FeedUrl;
type Rfc3339Time = crate::client::synd_api::scalar::Rfc3339Time;
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum FeedType {
ATOM,
JSON,
RSS0,
RSS1,
RSS2,
Other(String),
}
impl ::serde::Serialize for FeedType {
fn serialize<S: serde::Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
ser.serialize_str(match *self {
FeedType::ATOM => "ATOM",
FeedType::JSON => "JSON",
FeedType::RSS0 => "RSS0",
FeedType::RSS1 => "RSS1",
FeedType::RSS2 => "RSS2",
FeedType::Other(ref s) => &s,
})
}
}
impl<'de> ::serde::Deserialize<'de> for FeedType {
fn deserialize<D: ::serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let s: String = ::serde::Deserialize::deserialize(deserializer)?;
match s.as_str() {
"ATOM" => Ok(FeedType::ATOM),
"JSON" => Ok(FeedType::JSON),
"RSS0" => Ok(FeedType::RSS0),
"RSS1" => Ok(FeedType::RSS1),
"RSS2" => Ok(FeedType::RSS2),
_ => Ok(FeedType::Other(s)),
}
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum Requirement {
MUST,
SHOULD,
MAY,
Other(String),
}
impl ::serde::Serialize for Requirement {
fn serialize<S: serde::Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
ser.serialize_str(match *self {
Requirement::MUST => "MUST",
Requirement::SHOULD => "SHOULD",
Requirement::MAY => "MAY",
Requirement::Other(ref s) => &s,
})
}
}
impl<'de> ::serde::Deserialize<'de> for Requirement {
fn deserialize<D: ::serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let s: String = ::serde::Deserialize::deserialize(deserializer)?;
match s.as_str() {
"MUST" => Ok(Requirement::MUST),
"SHOULD" => Ok(Requirement::SHOULD),
"MAY" => Ok(Requirement::MAY),
_ => Ok(Requirement::Other(s)),
}
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum ResponseCode {
OK,
UNAUTHORIZED,
INVALID_FEED_URL,
FEED_UNAVAILABLE,
INTERNAL_ERROR,
Other(String),
}
impl ::serde::Serialize for ResponseCode {
fn serialize<S: serde::Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
ser.serialize_str(match *self {
ResponseCode::OK => "OK",
ResponseCode::UNAUTHORIZED => "UNAUTHORIZED",
ResponseCode::INVALID_FEED_URL => "INVALID_FEED_URL",
ResponseCode::FEED_UNAVAILABLE => "FEED_UNAVAILABLE",
ResponseCode::INTERNAL_ERROR => "INTERNAL_ERROR",
ResponseCode::Other(ref s) => &s,
})
}
}
impl<'de> ::serde::Deserialize<'de> for ResponseCode {
fn deserialize<D: ::serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let s: String = ::serde::Deserialize::deserialize(deserializer)?;
match s.as_str() {
"OK" => Ok(ResponseCode::OK),
"UNAUTHORIZED" => Ok(ResponseCode::UNAUTHORIZED),
"INVALID_FEED_URL" => Ok(ResponseCode::INVALID_FEED_URL),
"FEED_UNAVAILABLE" => Ok(ResponseCode::FEED_UNAVAILABLE),
"INTERNAL_ERROR" => Ok(ResponseCode::INTERNAL_ERROR),
_ => Ok(ResponseCode::Other(s)),
}
}
}
#[derive(Serialize, Debug, Clone, PartialEq, Eq)]
pub struct SubscribeFeedInput {
pub url: FeedUrl,
pub requirement: Option<Requirement>,
pub category: Option<Category>,
}
#[derive(Serialize, Debug, Clone, PartialEq, Eq)]
pub struct Variables {
#[serde(rename = "subscribeInput")]
pub subscribe_input: SubscribeFeedInput,
}
impl Variables {}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct Feed {
pub id: ID,
#[serde(rename = "type")]
pub type_: FeedType,
pub title: Option<String>,
pub url: FeedUrl,
pub updated: Option<Rfc3339Time>,
#[serde(rename = "websiteUrl")]
pub website_url: Option<String>,
pub description: Option<String>,
pub generator: Option<String>,
pub requirement: Option<Requirement>,
pub category: Option<Category>,
pub entries: FeedEntries,
pub links: FeedLinks,
pub authors: FeedAuthors,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct FeedEntries {
pub nodes: Vec<FeedEntriesNodes>,
}
pub type FeedEntriesNodes = EntryMeta;
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct FeedLinks {
pub nodes: Vec<FeedLinksNodes>,
}
pub type FeedLinksNodes = Link;
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct FeedAuthors {
pub nodes: Vec<String>,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct EntryMeta {
pub title: Option<String>,
pub published: Option<Rfc3339Time>,
pub updated: Option<Rfc3339Time>,
pub summary: Option<String>,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct Link {
pub href: String,
pub rel: Option<String>,
#[serde(rename = "mediaType")]
pub media_type: Option<String>,
pub title: Option<String>,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct ResponseData {
#[serde(rename = "subscribeFeed")]
pub subscribe_feed: SubscribeFeedSubscribeFeed,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(tag = "__typename")]
pub enum SubscribeFeedSubscribeFeed {
SubscribeFeedSuccess(SubscribeFeedSubscribeFeedOnSubscribeFeedSuccess),
SubscribeFeedError(SubscribeFeedSubscribeFeedOnSubscribeFeedError),
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct SubscribeFeedSubscribeFeedOnSubscribeFeedSuccess {
pub feed: SubscribeFeedSubscribeFeedOnSubscribeFeedSuccessFeed,
pub status: SubscribeFeedSubscribeFeedOnSubscribeFeedSuccessStatus,
}
pub type SubscribeFeedSubscribeFeedOnSubscribeFeedSuccessFeed = Feed;
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct SubscribeFeedSubscribeFeedOnSubscribeFeedSuccessStatus {
pub code: ResponseCode,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct SubscribeFeedSubscribeFeedOnSubscribeFeedError {
pub status: SubscribeFeedSubscribeFeedOnSubscribeFeedErrorStatus,
pub message: String,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct SubscribeFeedSubscribeFeedOnSubscribeFeedErrorStatus {
pub code: ResponseCode,
}
}
impl graphql_client::GraphQLQuery for SubscribeFeed {
type Variables = subscribe_feed::Variables;
type ResponseData = subscribe_feed::ResponseData;
fn build_query(variables: Self::Variables) -> ::graphql_client::QueryBody<Self::Variables> {
graphql_client::QueryBody {
variables,
query: subscribe_feed::QUERY,
operation_name: subscribe_feed::OPERATION_NAME,
}
}
}
pub struct UnsubscribeFeed;
pub mod unsubscribe_feed {
#![allow(dead_code)]
use std::result::Result;
pub const OPERATION_NAME: &str = "UnsubscribeFeed";
pub const QUERY: &str = "mutation SubscribeFeed($subscribeInput: SubscribeFeedInput!) {\n subscribeFeed(input: $subscribeInput) {\n __typename\n ... on SubscribeFeedSuccess {\n feed {\n ...Feed\n }\n status {\n code\n }\n }\n ... on SubscribeFeedError {\n status {\n code\n }\n message\n }\n }\n}\n\nmutation UnsubscribeFeed($unsubscribeInput: UnsubscribeFeedInput!) {\n unsubscribeFeed(input: $unsubscribeInput) {\n __typename\n ... on UnsubscribeFeedSuccess {\n status {\n code\n }\n }\n ... on UnsubscribeFeedError {\n status {\n code\n }\n }\n }\n}\n\nfragment Feed on Feed {\n id\n type\n title\n url\n updated\n websiteUrl\n description\n generator\n requirement\n category\n entries(first: 20) {\n nodes {\n ...EntryMeta\n }\n }\n links {\n nodes {\n ...Link\n }\n }\n authors {\n nodes\n }\n}\n\nfragment EntryMeta on Entry {\n title\n published\n updated\n summary\n}\n\nfragment Link on Link {\n href\n rel\n mediaType\n title \n}\n";
use super::*;
use serde::{Deserialize, Serialize};
#[allow(dead_code)]
type Boolean = bool;
#[allow(dead_code)]
type Float = f64;
#[allow(dead_code)]
type Int = i64;
#[allow(dead_code)]
type ID = String;
type FeedUrl = crate::client::synd_api::scalar::FeedUrl;
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum ResponseCode {
OK,
UNAUTHORIZED,
INVALID_FEED_URL,
FEED_UNAVAILABLE,
INTERNAL_ERROR,
Other(String),
}
impl ::serde::Serialize for ResponseCode {
fn serialize<S: serde::Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
ser.serialize_str(match *self {
ResponseCode::OK => "OK",
ResponseCode::UNAUTHORIZED => "UNAUTHORIZED",
ResponseCode::INVALID_FEED_URL => "INVALID_FEED_URL",
ResponseCode::FEED_UNAVAILABLE => "FEED_UNAVAILABLE",
ResponseCode::INTERNAL_ERROR => "INTERNAL_ERROR",
ResponseCode::Other(ref s) => &s,
})
}
}
impl<'de> ::serde::Deserialize<'de> for ResponseCode {
fn deserialize<D: ::serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let s: String = ::serde::Deserialize::deserialize(deserializer)?;
match s.as_str() {
"OK" => Ok(ResponseCode::OK),
"UNAUTHORIZED" => Ok(ResponseCode::UNAUTHORIZED),
"INVALID_FEED_URL" => Ok(ResponseCode::INVALID_FEED_URL),
"FEED_UNAVAILABLE" => Ok(ResponseCode::FEED_UNAVAILABLE),
"INTERNAL_ERROR" => Ok(ResponseCode::INTERNAL_ERROR),
_ => Ok(ResponseCode::Other(s)),
}
}
}
#[derive(Serialize, Debug, Clone, PartialEq, Eq)]
pub struct UnsubscribeFeedInput {
pub url: FeedUrl,
}
#[derive(Serialize, Debug, Clone, PartialEq, Eq)]
pub struct Variables {
#[serde(rename = "unsubscribeInput")]
pub unsubscribe_input: UnsubscribeFeedInput,
}
impl Variables {}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct ResponseData {
#[serde(rename = "unsubscribeFeed")]
pub unsubscribe_feed: UnsubscribeFeedUnsubscribeFeed,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(tag = "__typename")]
pub enum UnsubscribeFeedUnsubscribeFeed {
UnsubscribeFeedSuccess(UnsubscribeFeedUnsubscribeFeedOnUnsubscribeFeedSuccess),
UnsubscribeFeedError(UnsubscribeFeedUnsubscribeFeedOnUnsubscribeFeedError),
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct UnsubscribeFeedUnsubscribeFeedOnUnsubscribeFeedSuccess {
pub status: UnsubscribeFeedUnsubscribeFeedOnUnsubscribeFeedSuccessStatus,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct UnsubscribeFeedUnsubscribeFeedOnUnsubscribeFeedSuccessStatus {
pub code: ResponseCode,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct UnsubscribeFeedUnsubscribeFeedOnUnsubscribeFeedError {
pub status: UnsubscribeFeedUnsubscribeFeedOnUnsubscribeFeedErrorStatus,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct UnsubscribeFeedUnsubscribeFeedOnUnsubscribeFeedErrorStatus {
pub code: ResponseCode,
}
}
impl graphql_client::GraphQLQuery for UnsubscribeFeed {
type Variables = unsubscribe_feed::Variables;
type ResponseData = unsubscribe_feed::ResponseData;
fn build_query(variables: Self::Variables) -> ::graphql_client::QueryBody<Self::Variables> {
graphql_client::QueryBody {
variables,
query: unsubscribe_feed::QUERY,
operation_name: unsubscribe_feed::OPERATION_NAME,
}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/client/synd_api/generated/query.rs | crates/synd_term/src/client/synd_api/generated/query.rs | #![allow(clippy::all, warnings)]
pub struct Subscription;
pub mod subscription {
#![allow(dead_code)]
use std::result::Result;
pub const OPERATION_NAME: &str = "Subscription";
pub const QUERY: &str = "query Subscription($after: String, $first: Int) {\n output: subscription {\n feeds(after: $after, first: $first) {\n nodes {\n ...Feed\n }\n pageInfo {\n ...PageInfo\n }\n errors {\n url\n errorMessage\n }\n }\n }\n}\n\nfragment Feed on Feed {\n id\n type\n title\n url\n updated\n websiteUrl\n description\n generator\n requirement\n category\n entries(first: 10) {\n nodes {\n ...EntryMeta\n }\n }\n links {\n nodes {\n ...Link\n }\n }\n authors {\n nodes\n }\n}\n\nfragment EntryMeta on Entry {\n title\n published\n updated\n summary\n}\n\nfragment Link on Link {\n href\n rel\n mediaType\n title \n}\n\nquery Entries($after: String, $first: Int!) {\n output: subscription {\n entries(after: $after, first: $first) {\n nodes {\n ...Entry\n }\n pageInfo {\n ...PageInfo\n }\n }\n }\n}\n\nfragment Entry on Entry {\n title\n published\n updated\n summary\n websiteUrl\n feed {\n ...FeedMeta\n }\n}\n\nfragment FeedMeta on FeedMeta {\n title\n url\n requirement\n category\n}\n\nfragment PageInfo on PageInfo {\n hasNextPage\n endCursor\n}\n\nquery ExportSubscription($after: String, $first: Int!) {\n output: subscription {\n feeds(after: $after, first: $first) {\n pageInfo {\n hasNextPage\n endCursor\n }\n nodes {\n title\n url\n description\n requirement\n category\n }\n }\n }\n}\n";
use super::*;
use serde::{Deserialize, Serialize};
#[allow(dead_code)]
type Boolean = bool;
#[allow(dead_code)]
type Float = f64;
#[allow(dead_code)]
type Int = i64;
#[allow(dead_code)]
type ID = String;
type Category = crate::client::synd_api::scalar::Category;
type FeedUrl = crate::client::synd_api::scalar::FeedUrl;
type Rfc3339Time = crate::client::synd_api::scalar::Rfc3339Time;
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum FeedType {
ATOM,
JSON,
RSS0,
RSS1,
RSS2,
Other(String),
}
impl ::serde::Serialize for FeedType {
fn serialize<S: serde::Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
ser.serialize_str(match *self {
FeedType::ATOM => "ATOM",
FeedType::JSON => "JSON",
FeedType::RSS0 => "RSS0",
FeedType::RSS1 => "RSS1",
FeedType::RSS2 => "RSS2",
FeedType::Other(ref s) => &s,
})
}
}
impl<'de> ::serde::Deserialize<'de> for FeedType {
fn deserialize<D: ::serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let s: String = ::serde::Deserialize::deserialize(deserializer)?;
match s.as_str() {
"ATOM" => Ok(FeedType::ATOM),
"JSON" => Ok(FeedType::JSON),
"RSS0" => Ok(FeedType::RSS0),
"RSS1" => Ok(FeedType::RSS1),
"RSS2" => Ok(FeedType::RSS2),
_ => Ok(FeedType::Other(s)),
}
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum Requirement {
MUST,
SHOULD,
MAY,
Other(String),
}
impl ::serde::Serialize for Requirement {
fn serialize<S: serde::Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
ser.serialize_str(match *self {
Requirement::MUST => "MUST",
Requirement::SHOULD => "SHOULD",
Requirement::MAY => "MAY",
Requirement::Other(ref s) => &s,
})
}
}
impl<'de> ::serde::Deserialize<'de> for Requirement {
fn deserialize<D: ::serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let s: String = ::serde::Deserialize::deserialize(deserializer)?;
match s.as_str() {
"MUST" => Ok(Requirement::MUST),
"SHOULD" => Ok(Requirement::SHOULD),
"MAY" => Ok(Requirement::MAY),
_ => Ok(Requirement::Other(s)),
}
}
}
#[derive(Serialize, Debug, Clone, PartialEq, Eq)]
pub struct Variables {
pub after: Option<String>,
pub first: Option<Int>,
}
impl Variables {}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct Feed {
pub id: ID,
#[serde(rename = "type")]
pub type_: FeedType,
pub title: Option<String>,
pub url: FeedUrl,
pub updated: Option<Rfc3339Time>,
#[serde(rename = "websiteUrl")]
pub website_url: Option<String>,
pub description: Option<String>,
pub generator: Option<String>,
pub requirement: Option<Requirement>,
pub category: Option<Category>,
pub entries: FeedEntries,
pub links: FeedLinks,
pub authors: FeedAuthors,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct FeedEntries {
pub nodes: Vec<FeedEntriesNodes>,
}
pub type FeedEntriesNodes = EntryMeta;
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct FeedLinks {
pub nodes: Vec<FeedLinksNodes>,
}
pub type FeedLinksNodes = Link;
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct FeedAuthors {
pub nodes: Vec<String>,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct EntryMeta {
pub title: Option<String>,
pub published: Option<Rfc3339Time>,
pub updated: Option<Rfc3339Time>,
pub summary: Option<String>,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct Link {
pub href: String,
pub rel: Option<String>,
#[serde(rename = "mediaType")]
pub media_type: Option<String>,
pub title: Option<String>,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct PageInfo {
#[serde(rename = "hasNextPage")]
pub has_next_page: Boolean,
#[serde(rename = "endCursor")]
pub end_cursor: Option<String>,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct ResponseData {
pub output: SubscriptionOutput,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct SubscriptionOutput {
pub feeds: SubscriptionOutputFeeds,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct SubscriptionOutputFeeds {
pub nodes: Vec<SubscriptionOutputFeedsNodes>,
#[serde(rename = "pageInfo")]
pub page_info: SubscriptionOutputFeedsPageInfo,
pub errors: Vec<SubscriptionOutputFeedsErrors>,
}
pub type SubscriptionOutputFeedsNodes = Feed;
pub type SubscriptionOutputFeedsPageInfo = PageInfo;
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct SubscriptionOutputFeedsErrors {
pub url: FeedUrl,
#[serde(rename = "errorMessage")]
pub error_message: String,
}
}
impl graphql_client::GraphQLQuery for Subscription {
type Variables = subscription::Variables;
type ResponseData = subscription::ResponseData;
fn build_query(variables: Self::Variables) -> ::graphql_client::QueryBody<Self::Variables> {
graphql_client::QueryBody {
variables,
query: subscription::QUERY,
operation_name: subscription::OPERATION_NAME,
}
}
}
pub struct Entries;
pub mod entries {
#![allow(dead_code)]
use std::result::Result;
pub const OPERATION_NAME: &str = "Entries";
pub const QUERY: &str = "query Subscription($after: String, $first: Int) {\n output: subscription {\n feeds(after: $after, first: $first) {\n nodes {\n ...Feed\n }\n pageInfo {\n ...PageInfo\n }\n errors {\n url\n errorMessage\n }\n }\n }\n}\n\nfragment Feed on Feed {\n id\n type\n title\n url\n updated\n websiteUrl\n description\n generator\n requirement\n category\n entries(first: 10) {\n nodes {\n ...EntryMeta\n }\n }\n links {\n nodes {\n ...Link\n }\n }\n authors {\n nodes\n }\n}\n\nfragment EntryMeta on Entry {\n title\n published\n updated\n summary\n}\n\nfragment Link on Link {\n href\n rel\n mediaType\n title \n}\n\nquery Entries($after: String, $first: Int!) {\n output: subscription {\n entries(after: $after, first: $first) {\n nodes {\n ...Entry\n }\n pageInfo {\n ...PageInfo\n }\n }\n }\n}\n\nfragment Entry on Entry {\n title\n published\n updated\n summary\n websiteUrl\n feed {\n ...FeedMeta\n }\n}\n\nfragment FeedMeta on FeedMeta {\n title\n url\n requirement\n category\n}\n\nfragment PageInfo on PageInfo {\n hasNextPage\n endCursor\n}\n\nquery ExportSubscription($after: String, $first: Int!) {\n output: subscription {\n feeds(after: $after, first: $first) {\n pageInfo {\n hasNextPage\n endCursor\n }\n nodes {\n title\n url\n description\n requirement\n category\n }\n }\n }\n}\n";
use super::*;
use serde::{Deserialize, Serialize};
#[allow(dead_code)]
type Boolean = bool;
#[allow(dead_code)]
type Float = f64;
#[allow(dead_code)]
type Int = i64;
#[allow(dead_code)]
type ID = String;
type Category = crate::client::synd_api::scalar::Category;
type FeedUrl = crate::client::synd_api::scalar::FeedUrl;
type Rfc3339Time = crate::client::synd_api::scalar::Rfc3339Time;
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum Requirement {
MUST,
SHOULD,
MAY,
Other(String),
}
impl ::serde::Serialize for Requirement {
fn serialize<S: serde::Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
ser.serialize_str(match *self {
Requirement::MUST => "MUST",
Requirement::SHOULD => "SHOULD",
Requirement::MAY => "MAY",
Requirement::Other(ref s) => &s,
})
}
}
impl<'de> ::serde::Deserialize<'de> for Requirement {
fn deserialize<D: ::serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let s: String = ::serde::Deserialize::deserialize(deserializer)?;
match s.as_str() {
"MUST" => Ok(Requirement::MUST),
"SHOULD" => Ok(Requirement::SHOULD),
"MAY" => Ok(Requirement::MAY),
_ => Ok(Requirement::Other(s)),
}
}
}
#[derive(Serialize, Debug, Clone, PartialEq, Eq)]
pub struct Variables {
pub after: Option<String>,
pub first: Int,
}
impl Variables {}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct Entry {
pub title: Option<String>,
pub published: Option<Rfc3339Time>,
pub updated: Option<Rfc3339Time>,
pub summary: Option<String>,
#[serde(rename = "websiteUrl")]
pub website_url: Option<String>,
pub feed: EntryFeed,
}
pub type EntryFeed = FeedMeta;
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct FeedMeta {
pub title: Option<String>,
pub url: FeedUrl,
pub requirement: Option<Requirement>,
pub category: Option<Category>,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct PageInfo {
#[serde(rename = "hasNextPage")]
pub has_next_page: Boolean,
#[serde(rename = "endCursor")]
pub end_cursor: Option<String>,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct ResponseData {
pub output: EntriesOutput,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct EntriesOutput {
pub entries: EntriesOutputEntries,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct EntriesOutputEntries {
pub nodes: Vec<EntriesOutputEntriesNodes>,
#[serde(rename = "pageInfo")]
pub page_info: EntriesOutputEntriesPageInfo,
}
pub type EntriesOutputEntriesNodes = Entry;
pub type EntriesOutputEntriesPageInfo = PageInfo;
}
impl graphql_client::GraphQLQuery for Entries {
type Variables = entries::Variables;
type ResponseData = entries::ResponseData;
fn build_query(variables: Self::Variables) -> ::graphql_client::QueryBody<Self::Variables> {
graphql_client::QueryBody {
variables,
query: entries::QUERY,
operation_name: entries::OPERATION_NAME,
}
}
}
pub struct ExportSubscription;
pub mod export_subscription {
#![allow(dead_code)]
use std::result::Result;
pub const OPERATION_NAME: &str = "ExportSubscription";
pub const QUERY: &str = "query Subscription($after: String, $first: Int) {\n output: subscription {\n feeds(after: $after, first: $first) {\n nodes {\n ...Feed\n }\n pageInfo {\n ...PageInfo\n }\n errors {\n url\n errorMessage\n }\n }\n }\n}\n\nfragment Feed on Feed {\n id\n type\n title\n url\n updated\n websiteUrl\n description\n generator\n requirement\n category\n entries(first: 10) {\n nodes {\n ...EntryMeta\n }\n }\n links {\n nodes {\n ...Link\n }\n }\n authors {\n nodes\n }\n}\n\nfragment EntryMeta on Entry {\n title\n published\n updated\n summary\n}\n\nfragment Link on Link {\n href\n rel\n mediaType\n title \n}\n\nquery Entries($after: String, $first: Int!) {\n output: subscription {\n entries(after: $after, first: $first) {\n nodes {\n ...Entry\n }\n pageInfo {\n ...PageInfo\n }\n }\n }\n}\n\nfragment Entry on Entry {\n title\n published\n updated\n summary\n websiteUrl\n feed {\n ...FeedMeta\n }\n}\n\nfragment FeedMeta on FeedMeta {\n title\n url\n requirement\n category\n}\n\nfragment PageInfo on PageInfo {\n hasNextPage\n endCursor\n}\n\nquery ExportSubscription($after: String, $first: Int!) {\n output: subscription {\n feeds(after: $after, first: $first) {\n pageInfo {\n hasNextPage\n endCursor\n }\n nodes {\n title\n url\n description\n requirement\n category\n }\n }\n }\n}\n";
use super::*;
use serde::{Deserialize, Serialize};
#[allow(dead_code)]
type Boolean = bool;
#[allow(dead_code)]
type Float = f64;
#[allow(dead_code)]
type Int = i64;
#[allow(dead_code)]
type ID = String;
type Category = crate::client::synd_api::scalar::Category;
type FeedUrl = crate::client::synd_api::scalar::FeedUrl;
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum Requirement {
MUST,
SHOULD,
MAY,
Other(String),
}
impl ::serde::Serialize for Requirement {
fn serialize<S: serde::Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
ser.serialize_str(match *self {
Requirement::MUST => "MUST",
Requirement::SHOULD => "SHOULD",
Requirement::MAY => "MAY",
Requirement::Other(ref s) => &s,
})
}
}
impl<'de> ::serde::Deserialize<'de> for Requirement {
fn deserialize<D: ::serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let s: String = ::serde::Deserialize::deserialize(deserializer)?;
match s.as_str() {
"MUST" => Ok(Requirement::MUST),
"SHOULD" => Ok(Requirement::SHOULD),
"MAY" => Ok(Requirement::MAY),
_ => Ok(Requirement::Other(s)),
}
}
}
#[derive(Serialize, Debug, Clone, PartialEq, Eq)]
pub struct Variables {
pub after: Option<String>,
pub first: Int,
}
impl Variables {}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct ResponseData {
pub output: ExportSubscriptionOutput,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct ExportSubscriptionOutput {
pub feeds: ExportSubscriptionOutputFeeds,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct ExportSubscriptionOutputFeeds {
#[serde(rename = "pageInfo")]
pub page_info: ExportSubscriptionOutputFeedsPageInfo,
pub nodes: Vec<ExportSubscriptionOutputFeedsNodes>,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct ExportSubscriptionOutputFeedsPageInfo {
#[serde(rename = "hasNextPage")]
pub has_next_page: Boolean,
#[serde(rename = "endCursor")]
pub end_cursor: Option<String>,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct ExportSubscriptionOutputFeedsNodes {
pub title: Option<String>,
pub url: FeedUrl,
pub description: Option<String>,
pub requirement: Option<Requirement>,
pub category: Option<Category>,
}
}
impl graphql_client::GraphQLQuery for ExportSubscription {
type Variables = export_subscription::Variables;
type ResponseData = export_subscription::ResponseData;
fn build_query(variables: Self::Variables) -> ::graphql_client::QueryBody<Self::Variables> {
graphql_client::QueryBody {
variables,
query: export_subscription::QUERY,
operation_name: export_subscription::OPERATION_NAME,
}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/ui/icon.rs | crates/synd_term/src/ui/icon.rs | #[rustfmt::skip]
macro_rules! icon {
(browse) => { "" };
(feeds) => { "" };
(feedsoff) => { "" };
(entries) => { "" };
(category) => { "" };
(calendar) => { "" };
(chat) => { "" };
(check) => { "" };
(comment) => { "" };
(cross) => { "" };
(discussion) => { "" };
(entry) => { "" };
(filter) => { "" };
(github) => { "" };
(google) => { "" };
(issueopen) => { "" };
(issuereopened) => { "" };
(issuenotplanned) => { "" };
(issueclosed) => { "" };
(label) => { "" };
(requirement) => { "" };
(open) => { "" };
(pullrequest) => { "" };
(pullrequestmerged) => { "" };
(pullrequestclosed) => { "" };
(pullrequestdraft) => { "" };
(repository) => { "" };
(search) => { "" };
(summary) => { "" };
(tag) => { "" };
(unread) => { "" };
}
pub(crate) use icon;
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/ui/theme.rs | crates/synd_term/src/ui/theme.rs | use ratatui::style::{Color, Modifier, Style};
#[derive(Clone)]
pub struct Theme {
pub name: &'static str,
pub base: Style,
pub application_title: Style,
pub login: LoginTheme,
pub tabs: Style,
pub tabs_selected: Style,
pub prompt: PromptTheme,
pub subscription: SubscriptionTheme,
pub entries: EntriesTheme,
pub error: ErrorTheme,
pub default_icon_fg: Color,
pub requirement: RequirementLabelTheme,
pub selection_popup: SelectionPopup,
}
#[derive(Clone)]
pub struct LoginTheme {
pub title: Style,
pub selected_auth_provider_item: Style,
}
#[derive(Clone)]
pub struct ErrorTheme {
pub message: Style,
}
#[derive(Clone)]
pub struct PromptTheme {
pub key: Style,
pub key_desc: Style,
pub background: Style,
}
#[derive(Clone)]
pub struct SubscriptionTheme {
pub background: Style,
pub header: Style,
pub selected_feed: Style,
}
#[derive(Clone)]
pub struct EntriesTheme {
pub header: Style,
pub selected_entry: Style,
pub summary: Style,
}
#[derive(Clone)]
pub struct RequirementLabelTheme {
pub must: Color,
pub should: Color,
pub may: Color,
pub fg: Color,
}
#[derive(Clone)]
pub struct SelectionPopup {
pub highlight: Style,
}
#[derive(Clone, Debug)]
pub struct Palette {
name: &'static str,
bg: Color,
fg: Color,
fg_inactive: Color,
fg_focus: Color,
error: Color,
}
impl Palette {
pub fn dracula() -> Self {
Self {
name: "dracula",
bg: Color::Rgb(0x28, 0x2a, 0x36),
fg: Color::Rgb(0xf8, 0xf8, 0xf2),
fg_inactive: Color::Rgb(0x62, 0x72, 0xa4),
fg_focus: Color::Rgb(0xff, 0x79, 0xc6),
error: Color::Rgb(0xff, 0x55, 0x55),
}
}
pub fn eldritch() -> Self {
Self {
name: "eldritch",
bg: Color::Rgb(0x21, 0x23, 0x37),
fg: Color::Rgb(0xeb, 0xfa, 0xfa),
fg_inactive: Color::Rgb(0x70, 0x81, 0xd0),
fg_focus: Color::Rgb(0x37, 0xf4, 0x99),
error: Color::Rgb(0xf1, 0x6c, 0x75),
}
}
pub fn helix() -> Self {
Self {
name: "helix",
bg: Color::Rgb(0x3b, 0x22, 0x4c),
fg: Color::Rgb(0xa4, 0xa0, 0xe8),
fg_inactive: Color::Rgb(0x69, 0x7c, 0x81),
fg_focus: Color::Rgb(0xff, 0xff, 0xff),
error: Color::Rgb(0xf4, 0x78, 0x68),
}
}
pub fn ferra() -> Self {
Self {
name: "ferra",
bg: Color::Rgb(0x2b, 0x29, 0x2d),
fg: Color::Rgb(0xfe, 0xcd, 0xb2),
fg_inactive: Color::Rgb(0x6F, 0x5D, 0x63),
fg_focus: Color::Rgb(0xff, 0xa0, 0x7a),
error: Color::Rgb(0xe0, 0x6b, 0x75),
}
}
pub fn solarized_dark() -> Self {
Self {
name: "solarized_dark",
bg: Color::Rgb(0x00, 0x2b, 0x36),
fg: Color::Rgb(0x93, 0xa1, 0xa1),
fg_inactive: Color::Rgb(0x58, 0x6e, 0x75),
fg_focus: Color::Rgb(0x26, 0x8b, 0xd2),
error: Color::Rgb(0xdc, 0x32, 0x2f),
}
}
}
impl Theme {
#[allow(clippy::needless_pass_by_value)]
pub fn with_palette(p: Palette) -> Self {
let Palette {
name,
bg,
fg,
fg_inactive,
fg_focus,
error,
} = p;
Self {
name,
base: Style::new().bg(bg).fg(fg),
application_title: Style::new().fg(fg).bg(bg).add_modifier(Modifier::BOLD),
login: LoginTheme {
title: Style::new().add_modifier(Modifier::BOLD),
selected_auth_provider_item: Style::new().add_modifier(Modifier::BOLD),
},
tabs: Style::new().fg(fg),
tabs_selected: Style::new().fg(fg_focus).bold(),
prompt: PromptTheme {
key: Style::new().fg(fg_inactive).bg(bg),
key_desc: Style::new().fg(fg_inactive).bg(bg),
background: Style::new().bg(bg),
},
subscription: SubscriptionTheme {
background: Style::new().bg(bg),
header: Style::new().add_modifier(Modifier::BOLD | Modifier::UNDERLINED),
selected_feed: Style::new().fg(fg_focus).add_modifier(Modifier::BOLD),
},
entries: EntriesTheme {
header: Style::new().add_modifier(Modifier::BOLD | Modifier::UNDERLINED),
selected_entry: Style::new().fg(fg_focus).add_modifier(Modifier::BOLD),
summary: Style::new().fg(fg),
},
error: ErrorTheme {
message: Style::new().fg(error).bg(bg),
},
default_icon_fg: fg,
requirement: RequirementLabelTheme {
must: bg,
should: bg,
may: bg,
fg,
},
selection_popup: SelectionPopup {
highlight: Style::new().bg(Color::Yellow).fg(bg),
},
}
}
}
impl Default for Theme {
fn default() -> Self {
Theme::with_palette(Palette::ferra())
}
}
impl Theme {
pub(crate) fn contrast_fg_from_luminance(&self, luminance: f64) -> Color {
if luminance > 0.5 {
self.base.bg.unwrap_or_default()
} else {
self.base.fg.unwrap_or_default()
}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/ui/extension.rs | crates/synd_term/src/ui/extension.rs | use ratatui::{
buffer::Buffer,
prelude::{Constraint, Direction, Layout, Rect},
};
pub(super) trait RectExt {
/// Create centered Rect
fn centered(self, percent_x: u16, percent_y: u16) -> Rect;
/// Reset this area
fn reset(&self, buf: &mut Buffer);
}
impl RectExt for Rect {
fn centered(self, percent_x: u16, percent_y: u16) -> Rect {
// get vertically centered rect
let layout = Layout::default()
.direction(Direction::Vertical)
.constraints([
Constraint::Percentage((100 - percent_y) / 2),
Constraint::Percentage(percent_y),
Constraint::Percentage((100 - percent_y) / 2),
])
.split(self);
// then centered horizontally
Layout::default()
.direction(Direction::Horizontal)
.constraints([
Constraint::Percentage((100 - percent_x) / 2),
Constraint::Percentage(percent_x),
Constraint::Percentage((100 - percent_x) / 2),
])
.split(layout[1])[1]
}
fn reset(&self, buf: &mut Buffer) {
for x in self.x..(self.x + self.width) {
for y in self.y..(self.y + self.height) {
buf.cell_mut((x, y)).unwrap().reset();
}
}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/ui/mod.rs | crates/synd_term/src/ui/mod.rs | use std::{str::FromStr, sync::OnceLock};
use ratatui::style::{Color, Modifier};
use synd_feed::types::{Category, Requirement};
use crate::{
application::{InFlight, TerminalFocus},
config::{Categories, Icon, IconColor},
types::Time,
ui::{components::tabs::Tab, theme::Theme},
};
pub mod components;
pub mod extension;
pub mod theme;
pub mod widgets;
mod icon;
pub(crate) use icon::icon;
pub const UNKNOWN_SYMBOL: &str = "-";
pub const TABLE_HIGHLIGHT_SYMBOL: &str = " ";
pub const DEFAULT_REQUIREMNET: Requirement = Requirement::Should;
pub fn default_category() -> &'static Category<'static> {
static DEFAULT_CATEGORY: OnceLock<Category<'static>> = OnceLock::new();
DEFAULT_CATEGORY.get_or_init(|| Category::new("default").unwrap())
}
pub fn default_icon() -> &'static Icon {
static DEFAULT_ICON: OnceLock<Icon> = OnceLock::new();
DEFAULT_ICON.get_or_init(|| {
Icon::new("").with_color(IconColor::new(Color::from_str("dark gray").unwrap()))
})
}
pub struct Context<'a> {
pub theme: &'a Theme,
pub in_flight: &'a InFlight,
pub categories: &'a Categories,
pub(crate) now: Time,
pub(crate) focus: TerminalFocus,
pub(crate) tab: Tab,
}
impl Context<'_> {
fn table_highlight_modifier(&self) -> Modifier {
match self.focus {
TerminalFocus::Gained => Modifier::empty(),
TerminalFocus::Lost => Modifier::DIM,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn default_icon_is_not_empty() {
assert!(!default_icon().symbol().is_empty());
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/ui/components/tabs.rs | crates/synd_term/src/ui/components/tabs.rs | use ratatui::{
prelude::{Buffer, Constraint, Layout, Rect},
text::Span,
widgets::{Paragraph, Tabs as TuiTabs, Widget},
};
use crate::{
application::{Direction, Features, IndexOutOfRange},
ui::{Context, icon},
};
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum Tab {
Entries,
Feeds,
GitHub,
}
impl Tab {
fn width(self) -> u16 {
match self {
Tab::Entries => 7,
Tab::Feeds => 5,
Tab::GitHub => 6,
}
}
}
pub struct Tabs {
pub selected: usize,
pub tabs: Vec<Tab>,
}
impl Tabs {
const PADDING: &'static str = " ";
pub fn new(features: &'_ Features) -> Self {
let mut tabs = vec![Tab::Entries, Tab::Feeds];
if features.enable_github_notification {
tabs.insert(0, Tab::GitHub);
}
Self { selected: 0, tabs }
}
pub fn current(&self) -> Tab {
self.tabs[self.selected]
}
pub fn move_selection(&mut self, direction: Direction) -> Tab {
self.selected = direction.apply(self.selected, self.tabs.len(), IndexOutOfRange::Wrapping);
self.current()
}
fn width(&self) -> u16 {
#[allow(clippy::cast_possible_truncation)]
self.tabs.iter().fold(0, |width, tab| {
width + tab.width() + (Self::PADDING.len() as u16) + 2
})
}
}
impl Tabs {
pub fn render(&self, area: Rect, buf: &mut Buffer, cx: &Context<'_>) {
let area = Rect {
x: area.x + 2,
width: area.width.saturating_sub(3),
..area
};
let horizontal = Layout::horizontal([Constraint::Min(0), Constraint::Length(self.width())]);
let [title, tabs] = horizontal.areas(area);
Paragraph::new(Span::styled("Syndicationd", cx.theme.application_title)).render(title, buf);
TuiTabs::new(self.tabs.iter().map(|tab| match tab {
Tab::Entries => concat!(icon!(entries), " Entries"),
Tab::Feeds => concat!(icon!(feeds), " Feeds"),
Tab::GitHub => concat!(icon!(github), " GitHub"),
}))
.style(cx.theme.tabs)
.divider("")
.padding(Self::PADDING, "")
.select(self.selected)
.highlight_style(cx.theme.tabs_selected)
.render(tabs, buf);
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/ui/components/status.rs | crates/synd_term/src/ui/components/status.rs | use std::borrow::Cow;
use ratatui::{
prelude::{Alignment, Buffer, Constraint, Layout, Rect},
text::{Line, Span},
widgets::{Paragraph, StatefulWidget, Widget, Wrap},
};
use crate::{
application::RequestId,
ui::{
Context, icon,
widgets::throbber::{
Throbber, ThrobberState,
throbber::{self, WhichUse},
},
},
};
use super::tabs::Tab;
pub struct StatusLine {
error_message: Option<String>,
}
impl StatusLine {
pub fn new() -> Self {
Self {
error_message: None,
}
}
pub fn set_error_message(&mut self, msg: String) {
self.error_message = Some(msg);
}
pub fn clear_error_message(&mut self) {
self.error_message = None;
}
}
impl StatusLine {
pub fn render(&self, area: Rect, buf: &mut Buffer, cx: &Context<'_>, tab: Option<Tab>) {
match self.error_message.as_ref() {
Some(error_message) => Self::render_error(area, buf, cx, error_message),
None => Self::render_prompt(area, buf, cx, tab),
}
}
#[allow(clippy::cast_possible_truncation)]
fn render_prompt(area: Rect, buf: &mut Buffer, cx: &Context<'_>, tab: Option<Tab>) {
let pre_keys = &[
("Tab", ""),
("j/k", ""),
("gg", ""),
("ge", ""),
("c", icon!(category)),
("/", icon!(search)),
][..];
let suf_keys = &[("r", ""), ("q", "")][..];
let per_tab_keys = match tab {
Some(Tab::Feeds) => pre_keys
.iter()
.chain(&[
("h/l", icon!(requirement)),
("Ent", icon!(open)),
("a", ""),
("e", ""),
("d", ""),
])
.chain(suf_keys),
Some(Tab::Entries) => pre_keys
.iter()
.chain(&[
("h/l", icon!(requirement)),
("Ent", icon!(open)),
("Sp", icon!(browse)),
])
.chain(suf_keys),
Some(Tab::GitHub) => pre_keys
.iter()
.chain(&[
("f", icon!(filter)),
("Ent", icon!(open)),
("d", icon!(check)),
("u", ""),
])
.chain(suf_keys),
// Imply login
None => [("j/k", "")][..]
.iter()
.chain(&[("Ent", "")])
.chain(&[("q", "")][..]),
};
let spans = per_tab_keys
.flat_map(|(key, desc)| {
let desc = Span::styled(format!("{key}:{desc} "), cx.theme.prompt.key_desc);
[desc]
})
.collect::<Vec<_>>();
let area = {
if let Some(in_flight) = cx.in_flight.recent_in_flight() {
let label = match in_flight {
RequestId::DeviceFlowDeviceAuthorize => {
Cow::Borrowed("Request device authorization")
}
RequestId::DeviceFlowPollAccessToken => Cow::Borrowed("Polling..."),
RequestId::FetchEntries => Cow::Borrowed("Fetch entries..."),
RequestId::FetchSubscription => Cow::Borrowed("Fetch subscription..."),
RequestId::FetchGithubNotifications { page } => {
Cow::Owned(format!("Fetch github notifications(page: {page})..."))
}
RequestId::FetchGithubIssue { id } => {
Cow::Owned(format!("Fetch github issue(#{id})..."))
}
RequestId::FetchGithubPullRequest { id } => {
Cow::Owned(format!("Fetch github pull request(#{id})..."))
}
RequestId::SubscribeFeed => Cow::Borrowed("Subscribe feed..."),
RequestId::UnsubscribeFeed => Cow::Borrowed("Unsubscribe feed..."),
RequestId::MarkGithubNotificationAsDone { id } => {
Cow::Owned(format!("Mark notification({id}) as done..."))
}
RequestId::UnsubscribeGithubThread => Cow::Borrowed("Unsubscribe thread..."),
};
let horizontal = Layout::horizontal([
Constraint::Length(label.len() as u16 + 1),
Constraint::Fill(1),
]);
let [in_flight_area, area] = horizontal.areas(area);
let mut throbber_state = ThrobberState::default();
throbber_state.calc_step(cx.in_flight.throbber_step());
let throbber = Throbber::default()
.label(label)
.throbber_set(throbber::BRAILLE_EIGHT_DOUBLE)
.use_type(WhichUse::Spin);
throbber.render(in_flight_area, buf, &mut throbber_state);
area
} else {
area
}
};
Paragraph::new(Line::from(spans))
.alignment(Alignment::Center)
.style(cx.theme.prompt.background)
.render(area, buf);
}
fn render_error(area: Rect, buf: &mut Buffer, cx: &Context<'_>, error_message: &str) {
let line = Line::from(error_message);
Paragraph::new(line)
.alignment(Alignment::Left)
.wrap(Wrap { trim: true })
.style(cx.theme.error.message)
.render(area, buf);
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/ui/components/root.rs | crates/synd_term/src/ui/components/root.rs | use ratatui::{
prelude::{Buffer, Constraint, Layout, Rect},
widgets::{Block, Widget},
};
use crate::ui::{
Context,
components::{Components, filter::FilterContext, tabs::Tab},
};
pub struct Root<'a> {
components: &'a Components,
cx: Context<'a>,
}
impl<'a> Root<'a> {
pub fn new(components: &'a Components, cx: Context<'a>) -> Self {
Self { components, cx }
}
fn render_browse(&self, area: Rect, buf: &mut Buffer) {
let cx = &self.cx;
let layout = Layout::vertical([
Constraint::Length(1),
Constraint::Length(2),
Constraint::Min(0),
Constraint::Length(1),
]);
let [tabs_area, filter_area, content_area, prompt_area] = layout.areas(area);
self.components.tabs.render(tabs_area, buf, cx);
self.components.filter.render(
filter_area,
buf,
&FilterContext {
ui: cx,
gh_options: self.components.gh_notifications.filter_options(),
},
);
match cx.tab {
Tab::Feeds => self.components.subscription.render(content_area, buf, cx),
Tab::Entries => self.components.entries.render(content_area, buf, cx),
Tab::GitHub => self
.components
.gh_notifications
.render(content_area, buf, cx),
}
self.components
.prompt
.render(prompt_area, buf, cx, Some(self.components.tabs.current()));
}
}
impl Widget for Root<'_> {
fn render(self, area: Rect, buf: &mut Buffer) {
// Background
Block::new().style(self.cx.theme.base).render(area, buf);
if self.components.auth.should_render() {
let [auth_area, prompt_area] =
Layout::vertical([Constraint::Min(0), Constraint::Length(1)]).areas(area);
self.components.auth.render(auth_area, buf, &self.cx);
self.components
.prompt
.render(prompt_area, buf, &self.cx, None);
} else {
self.render_browse(area, buf);
}
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
ymgyt/syndicationd | https://github.com/ymgyt/syndicationd/blob/956bec58fcb6dbd9e8740d925649772b1670ead9/crates/synd_term/src/ui/components/entries.rs | crates/synd_term/src/ui/components/entries.rs | use std::borrow::Cow;
use crate::{
application::{Direction, Populate},
client::synd_api::payload,
types::{self, RequirementExt, TimeExt},
ui::{
self, Context,
components::{collections::FilterableVec, filter::FeedFilterer},
icon,
widgets::{scrollbar::Scrollbar, table::Table},
},
};
use ratatui::{
prelude::{Alignment, Buffer, Constraint, Layout, Rect},
style::Stylize,
text::{Line, Span, Text},
widgets::{Block, BorderType, Borders, Cell, Padding, Paragraph, Row, Widget, Wrap},
};
use synd_feed::types::FeedUrl;
#[allow(clippy::struct_field_names)]
pub(crate) struct Entries {
entries: FilterableVec<types::Entry, FeedFilterer>,
}
impl Entries {
pub(crate) fn new() -> Self {
Self {
entries: FilterableVec::new(),
}
}
/// Return entries count
pub(crate) fn count(&self) -> usize {
self.entries.len()
}
pub(crate) fn update_entries(
&mut self,
populate: Populate,
payload: payload::FetchEntriesPayload,
) {
self.entries.update(populate, payload.entries);
}
pub(crate) fn update_filterer(&mut self, filterer: FeedFilterer) {
self.entries.update_filter(filterer);
}
pub(crate) fn remove_unsubscribed_entries(&mut self, url: &FeedUrl) {
self.entries.retain(|entry| &entry.feed_url != url);
}
pub(crate) fn move_selection(&mut self, direction: Direction) {
self.entries.move_selection(direction);
}
pub(crate) fn move_first(&mut self) {
self.entries.move_first();
}
pub(crate) fn move_last(&mut self) {
self.entries.move_last();
}
pub(crate) fn entries(&self) -> &[types::Entry] {
self.entries.as_unfiltered_slice()
}
pub(crate) fn selected_entry_website_url(&self) -> Option<&str> {
self.entries
.selected()
.and_then(|entry| entry.website_url.as_deref())
}
fn selected_entry(&self) -> Option<&types::Entry> {
self.entries.selected()
}
}
impl Entries {
pub fn render(&self, area: Rect, buf: &mut Buffer, cx: &Context<'_>) {
let vertical = Layout::vertical([Constraint::Fill(2), Constraint::Fill(1)]);
let [entries_area, detail_area] = vertical.areas(area);
self.render_entries(entries_area, buf, cx);
self.render_detail(detail_area, buf, cx);
}
fn render_entries(&self, area: Rect, buf: &mut Buffer, cx: &Context<'_>) {
let entries_area = Block::new().padding(Padding::top(1)).inner(area);
let (header, widths, rows) = self.entry_rows(cx);
Table::builder()
.header(header)
.widths(widths)
.rows(rows)
.theme(&cx.theme.entries)
.selected_idx(self.entries.selected_index())
.highlight_modifier(cx.table_highlight_modifier())
.build()
.render(entries_area, buf);
let header_rows = 2;
#[allow(clippy::cast_possible_truncation)]
let scrollbar_area = Rect {
y: area.y + header_rows, // table header
height: area
.height
.saturating_sub(header_rows)
.min(self.entries.len() as u16),
..area
};
Scrollbar {
content_length: self.entries.len(),
position: self.entries.selected_index(),
}
.render(scrollbar_area, buf, cx);
}
fn entry_rows<'a>(
&'a self,
cx: &'a Context<'_>,
) -> (
Row<'a>,
impl IntoIterator<Item = Constraint>,
impl IntoIterator<Item = Row<'a>>,
) {
let (n, m) = {
if self.entries.is_empty() {
(Cow::Borrowed("-"), Cow::Borrowed("-"))
} else {
(
Cow::Owned((self.entries.selected_index() + 1).to_string()),
Cow::Owned(self.entries.len().to_string()),
)
}
};
let header = Row::new([
Cell::from("Published"),
Cell::from(format!("Entry {n}/{m}")),
Cell::from("Feed"),
Cell::from("Req"),
]);
let constraints = [
Constraint::Length(10),
Constraint::Fill(2),
Constraint::Fill(1),
Constraint::Length(4),
];
let row = |entry: &'a types::Entry| {
let title = entry.title.as_deref().unwrap_or(ui::UNKNOWN_SYMBOL);
let published = entry
.published
.as_ref()
.or(entry.updated.as_ref())
.map_or_else(|| ui::UNKNOWN_SYMBOL.to_string(), TimeExt::local_ymd);
let category = entry.category();
let icon = cx
.categories
.icon(category)
.unwrap_or_else(|| ui::default_icon());
let feed_title = entry.feed_title.as_deref().unwrap_or(ui::UNKNOWN_SYMBOL);
let requirement = entry.requirement().label(&cx.theme.requirement);
Row::new([
Cell::from(Span::from(published)),
Cell::from(Line::from(vec![
Span::from(icon.symbol()).fg(icon.color().unwrap_or(cx.theme.default_icon_fg)),
Span::from(" "),
Span::from(title),
])),
Cell::from(Span::from(feed_title)),
Cell::from(Line::from(vec![requirement, Span::from(" ")])),
])
};
(header, constraints, self.entries.iter().map(row))
}
fn render_detail(&self, area: Rect, buf: &mut Buffer, cx: &Context<'_>) {
let block = Block::new()
.padding(Padding::horizontal(2))
.borders(Borders::TOP)
.border_type(BorderType::Plain);
let inner = block.inner(area);
Widget::render(block, area, buf);
let Some(entry) = self.selected_entry() else {
return;
};
let vertical = Layout::vertical([
Constraint::Length(1),
Constraint::Length(1),
Constraint::Length(1),
Constraint::Length(1),
Constraint::Length(1),
Constraint::Min(0),
]);
let [
title_area,
url_area,
published_area,
_,
summary_heading_area,
summary_area,
] = vertical.areas(inner);
Line::from(vec![
Span::from(concat!(icon!(entry), " Entry")).bold(),
Span::from(" "),
Span::from(entry.title.as_deref().unwrap_or(ui::UNKNOWN_SYMBOL)),
])
.render(title_area, buf);
Line::from(vec![
Span::from(concat!(icon!(open), " URL")).bold(),
Span::from(" "),
Span::from(entry.website_url.as_deref().unwrap_or_default()),
])
.render(url_area, buf);
Line::from(vec![
Span::from(concat!(icon!(calendar), " Published")).bold(),
Span::from(" "),
Span::from(
entry
.published
.as_ref()
.or(entry.updated.as_ref())
.map_or_else(|| ui::UNKNOWN_SYMBOL.to_string(), TimeExt::local_ymd_hm),
),
])
.render(published_area, buf);
let Some(summary) = entry.summary_text(inner.width.into()) else {
return;
};
Line::from(
Span::from(concat!(icon!(summary), " Summary"))
.bold()
.underlined(),
)
.render(summary_heading_area, buf);
let paragraph = Paragraph::new(Text::from(summary))
.wrap(Wrap { trim: false })
.style(cx.theme.entries.summary)
.alignment(Alignment::Left);
Widget::render(paragraph, summary_area, buf);
}
}
| rust | Apache-2.0 | 956bec58fcb6dbd9e8740d925649772b1670ead9 | 2026-01-04T20:11:26.769763Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.