repo
stringlengths
6
65
file_url
stringlengths
81
311
file_path
stringlengths
6
227
content
stringlengths
0
32.8k
language
stringclasses
1 value
license
stringclasses
7 values
commit_sha
stringlengths
40
40
retrieved_at
stringdate
2026-01-04 15:31:58
2026-01-04 20:25:31
truncated
bool
2 classes
qdrant/qdrant
https://github.com/qdrant/qdrant/blob/f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd/lib/edge/python/src/types/filter/nested.rs
lib/edge/python/src/types/filter/nested.rs
use bytemuck::TransparentWrapper; use derive_more::Into; use pyo3::prelude::*; use segment::json_path::JsonPath; use segment::types::{Filter, Nested, NestedCondition}; use crate::repr::*; use crate::types::*; #[pyclass(name = "NestedCondition")] #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyNestedCondition(pub NestedCondition); #[pyclass_repr] #[pymethods] impl PyNestedCondition { #[new] pub fn new(key: PyJsonPath, filter: PyFilter) -> Self { Self(NestedCondition { nested: Nested { key: JsonPath::from(key), filter: Filter::from(filter), }, }) } #[getter] pub fn key(&self) -> &PyJsonPath { PyJsonPath::wrap_ref(&self.0.nested.key) } #[getter] pub fn filter(&self) -> &PyFilter { PyFilter::wrap_ref(&self.0.nested.filter) } pub fn __repr__(&self) -> String { self.repr() } } impl PyNestedCondition { fn _getters(self) { // Every field should have a getter method let NestedCondition { nested: Nested { key: _, filter: _ }, } = self.0; } }
rust
Apache-2.0
f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd
2026-01-04T15:34:51.524868Z
false
qdrant/qdrant
https://github.com/qdrant/qdrant/blob/f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd/lib/edge/python/src/types/filter/mod.rs
lib/edge/python/src/types/filter/mod.rs
pub mod condition; pub mod field_condition; pub mod geo; pub mod r#match; pub mod min_should; pub mod nested; pub mod range; pub mod value_count; use bytemuck::{TransparentWrapper, TransparentWrapperAlloc as _}; use derive_more::Into; use pyo3::prelude::*; use segment::types::{Filter, MinShould}; pub use self::condition::*; pub use self::field_condition::*; pub use self::geo::*; pub use self::r#match::*; pub use self::min_should::*; pub use self::nested::*; pub use self::range::*; pub use self::value_count::*; use crate::repr::*; #[pyclass(name = "Filter")] #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyFilter(pub Filter); #[pyclass_repr] #[pymethods] impl PyFilter { #[new] #[pyo3(signature = (must=None, should=None, must_not=None, min_should=None))] pub fn new( must: Option<Vec<PyCondition>>, should: Option<Vec<PyCondition>>, must_not: Option<Vec<PyCondition>>, min_should: Option<PyMinShould>, ) -> Self { Self(Filter { must: must.map(PyCondition::peel_vec), should: should.map(PyCondition::peel_vec), must_not: must_not.map(PyCondition::peel_vec), min_should: min_should.map(MinShould::from), }) } #[getter] pub fn must(&self) -> Option<&[PyCondition]> { self.0 .must .as_ref() .map(|must| PyCondition::wrap_slice(must)) } #[getter] pub fn should(&self) -> Option<&[PyCondition]> { self.0 .should .as_ref() .map(|should| PyCondition::wrap_slice(should)) } #[getter] pub fn must_not(&self) -> Option<&[PyCondition]> { self.0 .must_not .as_ref() .map(|must_not| PyCondition::wrap_slice(must_not)) } #[getter] pub fn min_should(&self) -> Option<PyMinShould> { self.0.min_should.clone().map(PyMinShould) } pub fn __repr__(&self) -> String { self.repr() } } impl PyFilter { fn _getters(self) { // Every field should have a getter method let Filter { must: _, should: _, must_not: _, min_should: _, } = self.0; } } impl<'py> IntoPyObject<'py> for &PyFilter { type Target = PyFilter; type Output = Bound<'py, Self::Target>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> PyResult<Self::Output> { IntoPyObject::into_pyobject(self.clone(), py) } }
rust
Apache-2.0
f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd
2026-01-04T15:34:51.524868Z
false
qdrant/qdrant
https://github.com/qdrant/qdrant/blob/f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd/lib/edge/python/src/types/filter/min_should.rs
lib/edge/python/src/types/filter/min_should.rs
use bytemuck::{TransparentWrapper as _, TransparentWrapperAlloc as _}; use derive_more::Into; use pyo3::prelude::*; use segment::types::MinShould; use crate::repr::*; use crate::types::filter::condition::PyCondition; #[pyclass(name = "MinShould")] #[derive(Clone, Debug, Into)] pub struct PyMinShould(pub MinShould); #[pyclass_repr] #[pymethods] impl PyMinShould { #[new] pub fn new(conditions: Vec<PyCondition>, min_count: usize) -> Self { Self(MinShould { conditions: PyCondition::peel_vec(conditions), min_count, }) } #[getter] pub fn conditions(&self) -> &[PyCondition] { PyCondition::wrap_slice(&self.0.conditions) } #[getter] pub fn min_count(&self) -> usize { self.0.min_count } fn __repr__(&self) -> String { self.repr() } } impl PyMinShould { fn _getters(self) { // Every field should have a getter method let MinShould { conditions: _, min_count: _, } = self.0; } }
rust
Apache-2.0
f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd
2026-01-04T15:34:51.524868Z
false
qdrant/qdrant
https://github.com/qdrant/qdrant/blob/f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd/lib/edge/python/src/types/filter/condition.rs
lib/edge/python/src/types/filter/condition.rs
use std::fmt; use bytemuck::TransparentWrapper; use derive_more::Into; use pyo3::IntoPyObjectExt as _; use pyo3::prelude::*; use segment::json_path::JsonPath; use segment::types::*; use segment::utils::maybe_arc::MaybeArc; use crate::repr::*; use crate::types::*; #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyCondition(pub Condition); impl FromPyObject<'_, '_> for PyCondition { type Error = PyErr; fn extract(condition: Borrowed<'_, '_, PyAny>) -> PyResult<Self> { #[derive(FromPyObject)] #[expect(clippy::large_enum_variant)] enum Helper { Field(PyFieldCondition), IsEmpty(PyIsEmptyCondition), IsNull(PyIsNullCondition), HasId(PyHasIdCondition), HasVector(PyHasVectorCondition), Nested(PyNestedCondition), Filter(PyFilter), } let condition = match condition.extract()? { Helper::Field(field) => Condition::Field(field.into()), Helper::IsEmpty(is_empty) => Condition::IsEmpty(is_empty.into()), Helper::IsNull(is_null) => Condition::IsNull(is_null.into()), Helper::HasId(has_id) => Condition::HasId(has_id.into()), Helper::HasVector(has_vector) => Condition::HasVector(has_vector.into()), Helper::Nested(nested) => Condition::Nested(nested.into()), Helper::Filter(filter) => Condition::Filter(filter.into()), }; Ok(Self(condition)) } } impl<'py> IntoPyObject<'py> for PyCondition { type Target = PyAny; type Output = Bound<'py, PyAny>; type Error = PyErr; // Infallible fn into_pyobject(self, py: Python<'py>) -> PyResult<Self::Output> { match self.0 { Condition::Field(field) => PyFieldCondition(field).into_bound_py_any(py), Condition::IsEmpty(is_empty) => PyIsEmptyCondition(is_empty).into_bound_py_any(py), Condition::IsNull(is_null) => PyIsNullCondition(is_null).into_bound_py_any(py), Condition::HasId(has_id) => PyHasIdCondition(has_id).into_bound_py_any(py), Condition::HasVector(has_vector) => { PyHasVectorCondition(has_vector).into_bound_py_any(py) } Condition::Nested(nested) => PyNestedCondition(nested).into_bound_py_any(py), Condition::Filter(filter) => PyFilter(filter).into_bound_py_any(py), Condition::CustomIdChecker(_) => { unreachable!("CustomIdChecker condition is not expected in Python bindings") } } } } impl<'py> IntoPyObject<'py> for &PyCondition { type Target = PyAny; type Output = Bound<'py, PyAny>; type Error = PyErr; // Infallible fn into_pyobject(self, py: Python<'py>) -> PyResult<Self::Output> { IntoPyObject::into_pyobject(self.clone(), py) } } impl Repr for PyCondition { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match &self.0 { Condition::Field(field) => PyFieldCondition::wrap_ref(field).fmt(f), Condition::IsEmpty(is_empty) => PyIsEmptyCondition::wrap_ref(is_empty).fmt(f), Condition::IsNull(is_null) => PyIsNullCondition::wrap_ref(is_null).fmt(f), Condition::HasId(has_id) => PyHasIdCondition::wrap_ref(has_id).fmt(f), Condition::HasVector(has_vector) => PyHasVectorCondition::wrap_ref(has_vector).fmt(f), Condition::Nested(nested) => PyNestedCondition::wrap_ref(nested).fmt(f), Condition::Filter(filter) => PyFilter::wrap_ref(filter).fmt(f), Condition::CustomIdChecker(_) => { unreachable!("CustomIdChecker condition is not expected in Python bindings") } } } } #[pyclass(name = "IsEmptyCondition")] #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyIsEmptyCondition(pub IsEmptyCondition); #[pyclass_repr] #[pymethods] impl PyIsEmptyCondition { #[new] pub fn new(key: PyJsonPath) -> Self { Self(IsEmptyCondition { is_empty: PayloadField { key: JsonPath::from(key), }, }) } #[getter] pub fn key(&self) -> &PyJsonPath { PyJsonPath::wrap_ref(&self.0.is_empty.key) } pub fn __repr__(&self) -> String { self.repr() } } impl PyIsEmptyCondition { fn _getters(self) { // Every field should have a getter method let IsEmptyCondition { is_empty: PayloadField { key: _ }, } = self.0; } } #[pyclass(name = "IsNullCondition")] #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyIsNullCondition(pub IsNullCondition); #[pyclass_repr] #[pymethods] impl PyIsNullCondition { #[new] pub fn new(key: PyJsonPath) -> Self { Self(IsNullCondition { is_null: PayloadField { key: JsonPath::from(key), }, }) } #[getter] pub fn key(&self) -> &PyJsonPath { PyJsonPath::wrap_ref(&self.0.is_null.key) } pub fn __repr__(&self) -> String { self.repr() } } impl PyIsNullCondition { fn _getters(self) { // Every field should have a getter method let IsNullCondition { is_null: PayloadField { key: _ }, } = self.0; } } #[pyclass(name = "HasIdCondition")] #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyHasIdCondition(pub HasIdCondition); #[pyclass_repr] #[pymethods] impl PyHasIdCondition { #[new] pub fn new(point_ids: ahash::HashSet<PyPointId>) -> Self { Self(HasIdCondition { has_id: MaybeArc::NoArc(ahash::AHashSet::from(PyPointId::peel_set(point_ids))), }) } #[getter] pub fn point_ids(&self) -> &ahash::HashSet<PyPointId> { PyPointId::wrap_set_ref(&self.0.has_id) } pub fn __repr__(&self) -> String { self.repr() } } impl PyHasIdCondition { fn _getters(self) { // Every field should have a getter method let HasIdCondition { has_id: _point_ids } = self.0; } } #[pyclass(name = "HasVectorCondition")] #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyHasVectorCondition(pub HasVectorCondition); #[pyclass_repr] #[pymethods] impl PyHasVectorCondition { #[new] pub fn new(vector: VectorNameBuf) -> Self { Self(HasVectorCondition { has_vector: vector }) } #[getter] pub fn vector(&self) -> &str { &self.0.has_vector } pub fn __repr__(&self) -> String { self.repr() } } impl PyHasVectorCondition { fn _getters(self) { // Every field should have a getter method let HasVectorCondition { has_vector: _vector, } = self.0; } }
rust
Apache-2.0
f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd
2026-01-04T15:34:51.524868Z
false
qdrant/qdrant
https://github.com/qdrant/qdrant/blob/f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd/lib/edge/python/src/types/query/mod.rs
lib/edge/python/src/types/query/mod.rs
use std::fmt; use bytemuck::{TransparentWrapper, TransparentWrapperAlloc as _}; use derive_more::Into; use ordered_float::OrderedFloat; use pyo3::prelude::*; use segment::data_types::vectors::{NamedQuery, VectorInternal}; use segment::vector_storage::query::*; use shard::query::query_enum::QueryEnum; use crate::repr::*; use crate::types::*; #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyQuery(pub QueryEnum); impl FromPyObject<'_, '_> for PyQuery { type Error = PyErr; fn extract(query: Borrowed<'_, '_, PyAny>) -> PyResult<Self> { let query = match query.extract()? { PyQueryInterface::Nearest { query, using } => QueryEnum::Nearest(NamedQuery { query: VectorInternal::from(query), using, }), PyQueryInterface::RecommendBestScore { query, using } => { QueryEnum::RecommendBestScore(NamedQuery { query: RecoQuery::from(query), using, }) } PyQueryInterface::RecommendSumScores { query, using } => { QueryEnum::RecommendSumScores(NamedQuery { query: RecoQuery::from(query), using, }) } PyQueryInterface::Discover { query, using } => QueryEnum::Discover(NamedQuery { query: DiscoveryQuery::from(query), using, }), PyQueryInterface::Context { query, using } => QueryEnum::Context(NamedQuery { query: ContextQuery::from(query), using, }), PyQueryInterface::FeedbackNaive { query, using } => { QueryEnum::FeedbackNaive(NamedQuery { query: NaiveFeedbackQuery::from(query), using, }) } }; Ok(Self(query)) } } impl<'py> IntoPyObject<'py> for PyQuery { type Target = PyQueryInterface; type Output = Bound<'py, Self::Target>; type Error = PyErr; // Infallible? fn into_pyobject(self, py: Python<'py>) -> PyResult<Self::Output> { let query = match self.0 { QueryEnum::Nearest(NamedQuery { query, using }) => PyQueryInterface::Nearest { query: PyNamedVectorInternal(query), using, }, QueryEnum::RecommendBestScore(NamedQuery { query, using }) => { PyQueryInterface::RecommendBestScore { query: PyRecommendQuery(query), using, } } QueryEnum::RecommendSumScores(NamedQuery { query, using }) => { PyQueryInterface::RecommendSumScores { query: PyRecommendQuery(query), using, } } QueryEnum::Discover(NamedQuery { query, using }) => PyQueryInterface::Discover { query: PyDiscoverQuery(query), using, }, QueryEnum::Context(NamedQuery { query, using }) => PyQueryInterface::Context { query: PyContextQuery(query), using, }, QueryEnum::FeedbackNaive(NamedQuery { query, using }) => { PyQueryInterface::FeedbackNaive { query: PyFeedbackNaiveQuery(query), using, } } }; Bound::new(py, query) } } impl<'py> IntoPyObject<'py> for &PyQuery { type Target = PyQueryInterface; type Output = Bound<'py, Self::Target>; type Error = PyErr; // Infallible fn into_pyobject(self, py: Python<'py>) -> PyResult<Self::Output> { IntoPyObject::into_pyobject(self.clone(), py) } } impl Repr for PyQuery { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let (repr, query, using): (_, &dyn Repr, _) = match &self.0 { QueryEnum::Nearest(NamedQuery { query, using }) => { ("Nearest", PyNamedVectorInternal::wrap_ref(query), using) } QueryEnum::RecommendBestScore(NamedQuery { query, using }) => ( "RecommendBestScore", PyRecommendQuery::wrap_ref(query), using, ), QueryEnum::RecommendSumScores(NamedQuery { query, using }) => ( "RecommendSumScores", PyRecommendQuery::wrap_ref(query), using, ), QueryEnum::Discover(NamedQuery { query, using }) => { ("Discover", PyDiscoverQuery::wrap_ref(query), using) } QueryEnum::Context(NamedQuery { query, using }) => { ("Context", PyContextQuery::wrap_ref(query), using) } QueryEnum::FeedbackNaive(NamedQuery { query, using }) => ( "FeedbackNaive", PyFeedbackNaiveQuery::wrap_ref(query), using, ), }; f.complex_enum::<PyQueryInterface>(repr, &[("query", query), ("using", using)]) } } #[pyclass(name = "Query")] #[derive(Clone, Debug)] pub enum PyQueryInterface { #[pyo3(constructor = (query, using = None))] Nearest { query: PyNamedVectorInternal, using: Option<String>, }, #[pyo3(constructor = (query, using = None))] RecommendBestScore { query: PyRecommendQuery, using: Option<String>, }, #[pyo3(constructor = (query, using = None))] RecommendSumScores { query: PyRecommendQuery, using: Option<String>, }, #[pyo3(constructor = (query, using = None))] Discover { query: PyDiscoverQuery, using: Option<String>, }, #[pyo3(constructor = (query, using = None))] Context { query: PyContextQuery, using: Option<String>, }, #[pyo3(constructor = (query, using = None))] FeedbackNaive { query: PyFeedbackNaiveQuery, using: Option<String>, }, } #[pymethods] impl PyQueryInterface { pub fn __repr__(&self) -> String { self.repr() } } impl Repr for PyQueryInterface { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let (repr, query, using): (_, &dyn Repr, _) = match self { PyQueryInterface::Nearest { query, using } => ("Nearest", query, using), PyQueryInterface::RecommendBestScore { query, using } => { ("RecommendBestScore", query, using) } PyQueryInterface::RecommendSumScores { query, using } => { ("RecommendSumScores", query, using) } PyQueryInterface::Discover { query, using } => ("Discover", query, using), PyQueryInterface::Context { query, using } => ("Context", query, using), PyQueryInterface::FeedbackNaive { query, using } => ("FeedbackNaive", query, using), }; f.complex_enum::<Self>(repr, &[("query", query), ("using", using)]) } } #[pyclass(name = "RecommendQuery")] #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyRecommendQuery(RecoQuery<VectorInternal>); #[pyclass_repr] #[pymethods] impl PyRecommendQuery { #[new] pub fn new( positives: Vec<PyNamedVectorInternal>, negatives: Vec<PyNamedVectorInternal>, ) -> Self { Self(RecoQuery { positives: PyNamedVectorInternal::peel_vec(positives), negatives: PyNamedVectorInternal::peel_vec(negatives), }) } #[getter] pub fn positives(&self) -> &[PyNamedVectorInternal] { PyNamedVectorInternal::wrap_slice(&self.0.positives) } #[getter] pub fn negatives(&self) -> &[PyNamedVectorInternal] { PyNamedVectorInternal::wrap_slice(&self.0.negatives) } pub fn __repr__(&self) -> String { self.repr() } } impl PyRecommendQuery { fn _getters(self) { // Every field should have a getter method let RecoQuery { positives: _, negatives: _, } = self.0; } } #[pyclass(name = "DiscoverQuery")] #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyDiscoverQuery(DiscoveryQuery<VectorInternal>); #[pyclass_repr] #[pymethods] impl PyDiscoverQuery { #[new] pub fn new(target: PyNamedVectorInternal, pairs: Vec<PyContextPair>) -> Self { Self(DiscoveryQuery { target: VectorInternal::from(target), pairs: PyContextPair::peel_vec(pairs), }) } #[getter] pub fn target(&self) -> &PyNamedVectorInternal { PyNamedVectorInternal::wrap_ref(&self.0.target) } #[getter] pub fn pairs(&self) -> &[PyContextPair] { PyContextPair::wrap_slice(&self.0.pairs) } pub fn __repr__(&self) -> String { self.repr() } } impl PyDiscoverQuery { fn _getters(self) { // Every field should have a getter method let DiscoveryQuery { target: _, pairs: _, } = self.0; } } #[pyclass(name = "ContextQuery")] #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyContextQuery(ContextQuery<VectorInternal>); #[pyclass_repr] #[pymethods] impl PyContextQuery { #[new] pub fn new(pairs: Vec<PyContextPair>) -> Self { Self(ContextQuery { pairs: PyContextPair::peel_vec(pairs), }) } #[getter] pub fn pairs(&self) -> &[PyContextPair] { PyContextPair::wrap_slice(&self.0.pairs) } pub fn __repr__(&self) -> String { self.repr() } } impl PyContextQuery { fn _getters(self) { // Every field should have a getter method let ContextQuery { pairs: _ } = self.0; } } #[pyclass(name = "ContextPair")] #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyContextPair(ContextPair<VectorInternal>); #[pyclass_repr] #[pymethods] impl PyContextPair { #[new] pub fn new(positive: PyNamedVectorInternal, negative: PyNamedVectorInternal) -> Self { Self(ContextPair { positive: VectorInternal::from(positive), negative: VectorInternal::from(negative), }) } #[getter] pub fn positive(&self) -> &PyNamedVectorInternal { PyNamedVectorInternal::wrap_ref(&self.0.positive) } #[getter] pub fn negative(&self) -> &PyNamedVectorInternal { PyNamedVectorInternal::wrap_ref(&self.0.negative) } pub fn __repr__(&self) -> String { self.repr() } } impl PyContextPair { fn _getters(self) { // Every field should have a getter method let ContextPair { positive: _, negative: _, } = self.0; } } impl<'py> IntoPyObject<'py> for &PyContextPair { type Target = PyContextPair; type Output = Bound<'py, Self::Target>; type Error = PyErr; // Infallible fn into_pyobject(self, py: Python<'py>) -> PyResult<Self::Output> { IntoPyObject::into_pyobject(self.clone(), py) } } #[pyclass(name = "FeedbackNaiveQuery")] #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyFeedbackNaiveQuery(NaiveFeedbackQuery<VectorInternal>); #[pyclass_repr] #[pymethods] impl PyFeedbackNaiveQuery { #[new] pub fn new( target: PyNamedVectorInternal, feedback: Vec<PyFeedbackItem>, strategy: PyNaiveFeedbackCoefficients, ) -> Self { Self(NaiveFeedbackQuery { target: VectorInternal::from(target), feedback: PyFeedbackItem::peel_vec(feedback), coefficients: NaiveFeedbackCoefficients::from(strategy), }) } #[getter] pub fn target(&self) -> &PyNamedVectorInternal { PyNamedVectorInternal::wrap_ref(&self.0.target) } #[getter] pub fn feedback(&self) -> &[PyFeedbackItem] { PyFeedbackItem::wrap_slice(&self.0.feedback) } #[getter] pub fn coefficients(&self) -> PyNaiveFeedbackCoefficients { PyNaiveFeedbackCoefficients(self.0.coefficients) } pub fn __repr__(&self) -> String { self.repr() } } impl PyFeedbackNaiveQuery { fn _getters(self) { // Every field should have a getter method let NaiveFeedbackQuery { target: _, feedback: _, coefficients: _, } = self.0; } } #[pyclass(name = "FeedbackItem")] #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyFeedbackItem(FeedbackItem<VectorInternal>); #[pyclass_repr] #[pymethods] impl PyFeedbackItem { #[new] pub fn new(vector: PyNamedVectorInternal, score: f32) -> Self { Self(FeedbackItem { vector: VectorInternal::from(vector), score: OrderedFloat(score), }) } #[getter] pub fn vector(&self) -> &PyNamedVectorInternal { PyNamedVectorInternal::wrap_ref(&self.0.vector) } #[getter] pub fn score(&self) -> f32 { self.0.score.into_inner() } pub fn __repr__(&self) -> String { self.repr() } } impl PyFeedbackItem { fn _getters(self) { // Every field should have a getter method let FeedbackItem { vector: _, score: _, } = self.0; } } impl<'py> IntoPyObject<'py> for &PyFeedbackItem { type Target = PyFeedbackItem; type Output = Bound<'py, Self::Target>; type Error = PyErr; // Infallible fn into_pyobject(self, py: Python<'py>) -> PyResult<Self::Output> { IntoPyObject::into_pyobject(self.clone(), py) } } #[pyclass(name = "NaiveFeedbackStrategy")] #[derive(Copy, Clone, Debug, Into)] pub struct PyNaiveFeedbackCoefficients(NaiveFeedbackCoefficients); #[pyclass_repr] #[pymethods] impl PyNaiveFeedbackCoefficients { #[new] pub fn new(a: f32, b: f32, c: f32) -> Self { Self(NaiveFeedbackCoefficients { a: OrderedFloat(a), b: OrderedFloat(b), c: OrderedFloat(c), }) } #[getter] pub fn a(&self) -> f32 { self.0.a.into_inner() } #[getter] pub fn b(&self) -> f32 { self.0.b.into_inner() } #[getter] pub fn c(&self) -> f32 { self.0.c.into_inner() } pub fn __repr__(&self) -> String { self.repr() } } impl PyNaiveFeedbackCoefficients { fn _getters(self) { // Every field should have a getter method let NaiveFeedbackCoefficients { a: _, b: _, c: _ } = self.0; } }
rust
Apache-2.0
f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd
2026-01-04T15:34:51.524868Z
false
qdrant/qdrant
https://github.com/qdrant/qdrant/blob/f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd/lib/edge/python/src/types/formula/expression_interface.rs
lib/edge/python/src/types/formula/expression_interface.rs
use std::fmt; use bytemuck::TransparentWrapper; use pyo3::prelude::*; use crate::repr::*; use crate::*; #[pyclass(name = "Expression")] #[derive(Clone, Debug)] pub enum PyExpressionInterface { Constant { val: f32, }, Variable { var: String, }, Condition { cond: Boxed<PyCondition>, }, GeoDistance { origin: PyGeoPoint, to: PyJsonPath, }, Datetime { date_time: String, }, DatetimeKey { path: PyJsonPath, }, Mult { exprs: Vec<PyExpression>, }, Sum { exprs: Vec<PyExpression>, }, Neg { expr: Boxed<PyExpression>, }, Div { left: Boxed<PyExpression>, right: Boxed<PyExpression>, by_zero_default: Option<f32>, }, Sqrt { expr: Boxed<PyExpression>, }, Pow { base: Boxed<PyExpression>, exponent: Boxed<PyExpression>, }, Exp { expr: Boxed<PyExpression>, }, Log10 { expr: Boxed<PyExpression>, }, Ln { expr: Boxed<PyExpression>, }, Abs { expr: Boxed<PyExpression>, }, Decay { kind: PyDecayKind, x: Boxed<PyExpression>, target: Option<Boxed<PyExpression>>, midpoint: Option<f32>, scale: Option<f32>, }, } impl Repr for PyExpressionInterface { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let (repr, fields): (_, &[(_, &dyn Repr)]) = match self { PyExpressionInterface::Constant { val } => ("Constant", &[("val", val)]), PyExpressionInterface::Variable { var } => ("Variable", &[("var", var)]), PyExpressionInterface::Condition { cond } => ("Condition", &[("cond", cond)]), PyExpressionInterface::GeoDistance { origin, to } => { ("GeoDistance", &[("origin", origin), ("to", to)]) } PyExpressionInterface::Datetime { date_time } => { ("Datetime", &[("date_time", date_time)]) } PyExpressionInterface::DatetimeKey { path } => ("DatetimeKey", &[("path", path)]), PyExpressionInterface::Mult { exprs } => ("Mult", &[("exprs", exprs)]), PyExpressionInterface::Sum { exprs } => ("Sum", &[("exprs", exprs)]), PyExpressionInterface::Neg { expr } => ("Neg", &[("expr", expr)]), PyExpressionInterface::Div { left, right, by_zero_default, } => ( "Div", &[ ("left", left), ("right", right), ("by_zero_default", by_zero_default), ], ), PyExpressionInterface::Sqrt { expr } => ("Sqrt", &[("expr", expr)]), PyExpressionInterface::Pow { base, exponent } => { ("Pow", &[("base", base), ("exponent", exponent)]) } PyExpressionInterface::Exp { expr } => ("Exp", &[("expr", expr)]), PyExpressionInterface::Log10 { expr } => ("Log10", &[("expr", expr)]), PyExpressionInterface::Ln { expr } => ("Ln", &[("expr", expr)]), PyExpressionInterface::Abs { expr } => ("Abs", &[("expr", expr)]), PyExpressionInterface::Decay { kind, x, target, midpoint, scale, } => ( "Decay", &[ ("kind", kind), ("x", x), ("target", target), ("midpoint", midpoint), ("scale", scale), ], ), }; f.complex_enum::<Self>(repr, fields) } } #[derive(Clone, Debug)] pub struct Boxed<T>(Box<T>); impl<T> Boxed<T> { pub fn from_box<U>(boxed: Box<U>) -> Self where T: TransparentWrapper<U>, { Self(T::wrap_box(boxed)) } pub fn into_box<U>(self) -> Box<U> where T: TransparentWrapper<U>, { T::peel_box(self.0) } pub fn from_inner(inner: T) -> Self { Self(Box::new(inner)) } pub fn into_inner(self) -> T { *self.0 } } impl<'a, 'py, T> FromPyObject<'a, 'py> for Boxed<T> where T: FromPyObject<'a, 'py>, { type Error = T::Error; fn extract(any: Borrowed<'a, 'py, PyAny>) -> Result<Self, Self::Error> { any.extract().map(Boxed::from_inner) } } impl<'py, T> IntoPyObject<'py> for Boxed<T> where T: IntoPyObject<'py>, { type Target = T::Target; type Output = T::Output; type Error = T::Error; fn into_pyobject(self, py: Python<'py>) -> Result<Self::Output, Self::Error> { self.into_inner().into_pyobject(py) } } impl<T: Repr> Repr for Boxed<T> { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { self.0.fmt(f) } }
rust
Apache-2.0
f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd
2026-01-04T15:34:51.524868Z
false
qdrant/qdrant
https://github.com/qdrant/qdrant/blob/f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd/lib/edge/python/src/types/formula/expression.rs
lib/edge/python/src/types/formula/expression.rs
use std::fmt; use bytemuck::TransparentWrapper; use derive_more::Into; use pyo3::prelude::*; use shard::query::formula::ExpressionInternal; use crate::repr::*; use crate::*; #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyExpression(ExpressionInternal); impl FromPyObject<'_, '_> for PyExpression { type Error = PyErr; fn extract(helper: Borrowed<'_, '_, PyAny>) -> PyResult<Self> { let expr = match helper.extract()? { PyExpressionInterface::Constant { val } => ExpressionInternal::Constant(val), PyExpressionInterface::Variable { var } => ExpressionInternal::Variable(var), PyExpressionInterface::Condition { cond } => { ExpressionInternal::Condition(cond.into_box()) } PyExpressionInterface::GeoDistance { origin, to } => ExpressionInternal::GeoDistance { origin: origin.into(), to: to.into(), }, PyExpressionInterface::Datetime { date_time } => { ExpressionInternal::Datetime(date_time) } PyExpressionInterface::DatetimeKey { path } => { ExpressionInternal::DatetimeKey(path.into()) } PyExpressionInterface::Mult { exprs } => { ExpressionInternal::Mult(PyExpression::peel_vec(exprs)) } PyExpressionInterface::Sum { exprs } => { ExpressionInternal::Sum(PyExpression::peel_vec(exprs)) } PyExpressionInterface::Neg { expr } => ExpressionInternal::Neg(expr.into_box()), PyExpressionInterface::Div { left, right, by_zero_default, } => ExpressionInternal::Div { left: left.into_box(), right: right.into_box(), by_zero_default, }, PyExpressionInterface::Sqrt { expr } => ExpressionInternal::Sqrt(expr.into_box()), PyExpressionInterface::Pow { base, exponent } => ExpressionInternal::Pow { base: base.into_box(), exponent: exponent.into_box(), }, PyExpressionInterface::Exp { expr } => ExpressionInternal::Exp(expr.into_box()), PyExpressionInterface::Log10 { expr } => ExpressionInternal::Log10(expr.into_box()), PyExpressionInterface::Ln { expr } => ExpressionInternal::Ln(expr.into_box()), PyExpressionInterface::Abs { expr } => ExpressionInternal::Abs(expr.into_box()), PyExpressionInterface::Decay { kind, x, target, midpoint, scale, } => ExpressionInternal::Decay { kind: kind.into(), x: x.into_box(), target: target.map(Boxed::into_box), midpoint, scale, }, }; Ok(Self(expr)) } } impl<'py> IntoPyObject<'py> for PyExpression { type Target = PyExpressionInterface; type Output = Bound<'py, Self::Target>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> PyResult<Self::Output> { let helper = match self.0 { ExpressionInternal::Constant(val) => PyExpressionInterface::Constant { val }, ExpressionInternal::Variable(var) => PyExpressionInterface::Variable { var }, ExpressionInternal::Condition(cond) => PyExpressionInterface::Condition { cond: Boxed::from_box(cond), }, ExpressionInternal::GeoDistance { origin, to } => PyExpressionInterface::GeoDistance { origin: PyGeoPoint(origin), to: PyJsonPath(to), }, ExpressionInternal::Datetime(date_time) => { PyExpressionInterface::Datetime { date_time } } ExpressionInternal::DatetimeKey(path) => PyExpressionInterface::DatetimeKey { path: PyJsonPath(path), }, ExpressionInternal::Mult(exprs) => PyExpressionInterface::Mult { exprs: PyExpression::wrap_vec(exprs), }, ExpressionInternal::Sum(exprs) => PyExpressionInterface::Sum { exprs: PyExpression::wrap_vec(exprs), }, ExpressionInternal::Neg(expr) => PyExpressionInterface::Neg { expr: Boxed::from_box(expr), }, ExpressionInternal::Div { left, right, by_zero_default, } => PyExpressionInterface::Div { left: Boxed::from_box(left), right: Boxed::from_box(right), by_zero_default, }, ExpressionInternal::Sqrt(expr) => PyExpressionInterface::Sqrt { expr: Boxed::from_box(expr), }, ExpressionInternal::Pow { base, exponent } => PyExpressionInterface::Pow { base: Boxed::from_box(base), exponent: Boxed::from_box(exponent), }, ExpressionInternal::Exp(expr) => PyExpressionInterface::Exp { expr: Boxed::from_box(expr), }, ExpressionInternal::Log10(expr) => PyExpressionInterface::Log10 { expr: Boxed::from_box(expr), }, ExpressionInternal::Ln(expr) => PyExpressionInterface::Ln { expr: Boxed::from_box(expr), }, ExpressionInternal::Abs(expr) => PyExpressionInterface::Abs { expr: Boxed::from_box(expr), }, ExpressionInternal::Decay { kind, x, target, midpoint, scale, } => PyExpressionInterface::Decay { kind: kind.into(), x: Boxed::from_box(x), target: target.map(Boxed::from_box), midpoint, scale, }, }; Bound::new(py, helper) } } impl Repr for PyExpression { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let (repr, fields): (_, &[(_, &dyn Repr)]) = match &self.0 { ExpressionInternal::Constant(val) => ("Constant", &[("val", val)]), ExpressionInternal::Variable(var) => ("Variable", &[("var", var)]), ExpressionInternal::Condition(cond) => { ("Condition", &[("cond", PyCondition::wrap_ref(cond))]) } ExpressionInternal::GeoDistance { origin, to } => ( "GeoDistance", &[ ("origin", PyGeoPoint::wrap_ref(origin)), ("to", PyJsonPath::wrap_ref(to)), ], ), ExpressionInternal::Datetime(date_time) => ("Datetime", &[("date_time", date_time)]), ExpressionInternal::DatetimeKey(path) => { ("DatetimeKey", &[("path", PyJsonPath::wrap_ref(path))]) } ExpressionInternal::Mult(exprs) => { ("Mult", &[("exprs", &PyExpression::wrap_slice(exprs))]) } ExpressionInternal::Sum(exprs) => { ("Sum", &[("exprs", &PyExpression::wrap_slice(exprs))]) } ExpressionInternal::Neg(expr) => ("Neg", &[("expr", PyExpression::wrap_ref(expr))]), ExpressionInternal::Div { left, right, by_zero_default, } => ( "Div", &[ ("left", PyExpression::wrap_ref(left)), ("right", PyExpression::wrap_ref(right)), ("by_zero_default", by_zero_default), ], ), ExpressionInternal::Sqrt(expr) => ("Sqrt", &[("expr", PyExpression::wrap_ref(expr))]), ExpressionInternal::Pow { base, exponent } => ( "Pow", &[ ("base", PyExpression::wrap_ref(base)), ("exponent", PyExpression::wrap_ref(exponent)), ], ), ExpressionInternal::Exp(expr) => ("Exp", &[("expr", PyExpression::wrap_ref(expr))]), ExpressionInternal::Log10(expr) => ("Log10", &[("expr", PyExpression::wrap_ref(expr))]), ExpressionInternal::Ln(expr) => ("Ln", &[("expr", PyExpression::wrap_ref(expr))]), ExpressionInternal::Abs(expr) => ("Abs", &[("expr", PyExpression::wrap_ref(expr))]), ExpressionInternal::Decay { kind, x, target, midpoint, scale, } => ( "Decay", &[ ("kind", &PyDecayKind::from(*kind)), ("x", PyExpression::wrap_ref(x)), ( "target", &target.as_ref().map(|target| PyExpression::wrap_ref(target)), ), ("midpoint", midpoint), ("scale", scale), ], ), }; f.complex_enum::<PyExpressionInterface>(repr, fields) } }
rust
Apache-2.0
f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd
2026-01-04T15:34:51.524868Z
false
qdrant/qdrant
https://github.com/qdrant/qdrant/blob/f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd/lib/edge/python/src/types/formula/mod.rs
lib/edge/python/src/types/formula/mod.rs
pub mod expression; pub mod expression_interface; use std::collections::HashMap; use std::fmt; use bytemuck::TransparentWrapper; use derive_more::Into; use pyo3::exceptions::PyValueError; use pyo3::prelude::*; use segment::index::query_optimization::rescore_formula::parsed_formula::{ DecayKind, ParsedFormula, }; use shard::query::formula::{ExpressionInternal, FormulaInternal}; pub use self::expression::*; pub use self::expression_interface::*; use crate::repr::*; use crate::types::PyValue; #[pyclass(name = "Formula")] #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyFormula(pub ParsedFormula); #[pyclass_repr] #[pymethods] impl PyFormula { #[new] pub fn new(formula: PyExpression, defaults: HashMap<String, PyValue>) -> PyResult<Self> { let formula = FormulaInternal { formula: ExpressionInternal::from(formula), defaults: PyValue::peel_map(defaults), }; let formula = ParsedFormula::try_from(formula) .map_err(|err| PyValueError::new_err(format!("failed to parse formula: {err}")))?; Ok(Self(formula)) } pub fn __repr__(&self) -> String { self.repr() } } #[pyclass(name = "DecayKind")] #[derive(Copy, Clone, Debug)] pub enum PyDecayKind { /// Linear decay function Lin, /// Gaussian decay function Gauss, /// Exponential decay function Exp, } #[pymethods] impl PyDecayKind { pub fn __repr__(&self) -> String { self.repr() } } impl Repr for PyDecayKind { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let repr = match self { PyDecayKind::Lin => "Lin", PyDecayKind::Gauss => "Gauss", PyDecayKind::Exp => "Exp", }; f.simple_enum::<Self>(repr) } } impl From<DecayKind> for PyDecayKind { fn from(decay_kind: DecayKind) -> Self { match decay_kind { DecayKind::Lin => PyDecayKind::Lin, DecayKind::Gauss => PyDecayKind::Gauss, DecayKind::Exp => PyDecayKind::Exp, } } } impl From<PyDecayKind> for DecayKind { fn from(decay_kind: PyDecayKind) -> Self { match decay_kind { PyDecayKind::Lin => DecayKind::Lin, PyDecayKind::Gauss => DecayKind::Gauss, PyDecayKind::Exp => DecayKind::Exp, } } }
rust
Apache-2.0
f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd
2026-01-04T15:34:51.524868Z
false
qdrant/qdrant
https://github.com/qdrant/qdrant/blob/f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd/lib/edge/python/src/config/mod.rs
lib/edge/python/src/config/mod.rs
pub mod quantization; pub mod sparse_vector_data; pub mod vector_data; use std::collections::HashMap; use std::fmt; use bytemuck::TransparentWrapper; use derive_more::Into; use pyo3::prelude::*; use segment::types::*; pub use self::quantization::*; pub use self::sparse_vector_data::*; pub use self::vector_data::*; use crate::repr::*; #[pyclass(name = "SegmentConfig")] #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PySegmentConfig(SegmentConfig); #[pyclass_repr] #[pymethods] impl PySegmentConfig { #[new] pub fn new( vector_data: HashMap<String, PyVectorDataConfig>, sparse_vector_data: HashMap<String, PySparseVectorDataConfig>, payload_storage_type: PyPayloadStorageType, ) -> Self { Self(SegmentConfig { vector_data: PyVectorDataConfig::peel_map(vector_data), sparse_vector_data: PySparseVectorDataConfig::peel_map(sparse_vector_data), payload_storage_type: PayloadStorageType::from(payload_storage_type), }) } #[getter] pub fn vector_data(&self) -> &HashMap<String, PyVectorDataConfig> { PyVectorDataConfig::wrap_map_ref(&self.0.vector_data) } #[getter] pub fn sparse_vector_data(&self) -> &HashMap<String, PySparseVectorDataConfig> { PySparseVectorDataConfig::wrap_map_ref(&self.0.sparse_vector_data) } #[getter] pub fn payload_storage_type(&self) -> PyPayloadStorageType { PyPayloadStorageType::from(self.0.payload_storage_type) } pub fn __repr__(&self) -> String { self.repr() } } impl PySegmentConfig { fn _getters(self) { // Every field should have a getter method let SegmentConfig { vector_data: _, sparse_vector_data: _, payload_storage_type: _, } = self.0; } } #[pyclass(name = "PayloadStorageType")] #[derive(Copy, Clone, Debug)] pub enum PyPayloadStorageType { Mmap, InRamMmap, } #[pymethods] impl PyPayloadStorageType { pub fn __repr__(&self) -> String { self.repr() } } impl Repr for PyPayloadStorageType { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let repr = match self { PyPayloadStorageType::Mmap => "Mmap", PyPayloadStorageType::InRamMmap => "InRamMmap", }; f.simple_enum::<Self>(repr) } } impl From<PayloadStorageType> for PyPayloadStorageType { fn from(storage_type: PayloadStorageType) -> Self { #[allow(unreachable_patterns)] match storage_type { PayloadStorageType::Mmap => PyPayloadStorageType::Mmap, PayloadStorageType::InRamMmap => PyPayloadStorageType::InRamMmap, _ => unimplemented!("RocksDB-backed storage types are not supported by Qdrant Edge"), } } } impl From<PyPayloadStorageType> for PayloadStorageType { fn from(storage_type: PyPayloadStorageType) -> Self { match storage_type { PyPayloadStorageType::Mmap => PayloadStorageType::Mmap, PyPayloadStorageType::InRamMmap => PayloadStorageType::InRamMmap, } } }
rust
Apache-2.0
f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd
2026-01-04T15:34:51.524868Z
false
qdrant/qdrant
https://github.com/qdrant/qdrant/blob/f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd/lib/edge/python/src/config/vector_data.rs
lib/edge/python/src/config/vector_data.rs
use std::collections::HashMap; use std::{fmt, mem}; use bytemuck::TransparentWrapper; use derive_more::Into; use pyo3::IntoPyObjectExt as _; use pyo3::prelude::*; use segment::types::*; use super::quantization::*; use crate::repr::*; #[pyclass(name = "VectorDataConfig")] #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyVectorDataConfig(pub VectorDataConfig); impl PyVectorDataConfig { pub fn peel_map(map: HashMap<String, Self>) -> HashMap<String, VectorDataConfig> where Self: TransparentWrapper<VectorDataConfig>, { unsafe { mem::transmute(map) } } pub fn wrap_map_ref(map: &HashMap<String, VectorDataConfig>) -> &HashMap<String, Self> where Self: TransparentWrapper<VectorDataConfig>, { unsafe { mem::transmute(map) } } } #[pyclass_repr] #[pymethods] impl PyVectorDataConfig { #[new] #[pyo3(signature = (size, distance, storage_type, index, quantization_config=None, multivector_config=None, datatype=None))] pub fn new( size: usize, distance: PyDistance, storage_type: PyVectorStorageType, index: PyIndexes, quantization_config: Option<PyQuantizationConfig>, multivector_config: Option<PyMultiVectorConfig>, datatype: Option<PyVectorStorageDatatype>, ) -> Self { Self(VectorDataConfig { size, distance: Distance::from(distance), storage_type: VectorStorageType::from(storage_type), index: Indexes::from(index), quantization_config: quantization_config.map(QuantizationConfig::from), multivector_config: multivector_config.map(MultiVectorConfig::from), datatype: datatype.map(VectorStorageDatatype::from), }) } #[getter] pub fn size(&self) -> usize { self.0.size } #[getter] pub fn distance(&self) -> PyDistance { PyDistance::from(self.0.distance) } #[getter] pub fn storage_type(&self) -> PyVectorStorageType { PyVectorStorageType::from(self.0.storage_type) } #[getter] pub fn index(&self) -> PyIndexes { PyIndexes(self.0.index.clone()) } #[getter] pub fn quantization_config(&self) -> Option<PyQuantizationConfig> { self.0.quantization_config.clone().map(PyQuantizationConfig) } #[getter] pub fn multivector_config(&self) -> Option<PyMultiVectorConfig> { self.0.multivector_config.map(PyMultiVectorConfig) } #[getter] pub fn datatype(&self) -> Option<PyVectorStorageDatatype> { self.0.datatype.map(PyVectorStorageDatatype::from) } pub fn __repr__(&self) -> String { self.repr() } } impl PyVectorDataConfig { fn _getters(self) { // Every field should have a getter method let VectorDataConfig { size: _, distance: _, storage_type: _, index: _, quantization_config: _, multivector_config: _, datatype: _, } = self.0; } } impl<'py> IntoPyObject<'py> for &PyVectorDataConfig { type Target = PyVectorDataConfig; type Output = Bound<'py, Self::Target>; type Error = PyErr; // Infallible fn into_pyobject(self, py: Python<'py>) -> PyResult<Self::Output> { IntoPyObject::into_pyobject(self.clone(), py) } } #[pyclass(name = "Distance")] #[derive(Copy, Clone, Debug)] pub enum PyDistance { Cosine, Euclid, Dot, Manhattan, } #[pymethods] impl PyDistance { pub fn __repr__(&self) -> String { self.repr() } } impl Repr for PyDistance { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let repr = match self { Self::Cosine => "Cosine", Self::Euclid => "Euclid", Self::Dot => "Dot", Self::Manhattan => "Manhattan", }; f.simple_enum::<Self>(repr) } } impl From<Distance> for PyDistance { fn from(distance: Distance) -> Self { match distance { Distance::Cosine => PyDistance::Cosine, Distance::Euclid => PyDistance::Euclid, Distance::Dot => PyDistance::Dot, Distance::Manhattan => PyDistance::Manhattan, } } } impl From<PyDistance> for Distance { fn from(distance: PyDistance) -> Self { match distance { PyDistance::Cosine => Distance::Cosine, PyDistance::Euclid => Distance::Euclid, PyDistance::Dot => Distance::Dot, PyDistance::Manhattan => Distance::Manhattan, } } } #[pyclass(name = "VectorStorageType")] #[derive(Copy, Clone, Debug)] pub enum PyVectorStorageType { Memory, Mmap, ChunkedMmap, InRamChunkedMmap, } #[pymethods] impl PyVectorStorageType { pub fn __repr__(&self) -> String { self.repr() } } impl Repr for PyVectorStorageType { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let repr = match self { Self::Memory => "Memory", Self::Mmap => "Mmap", Self::ChunkedMmap => "ChunkedMmap", Self::InRamChunkedMmap => "InRamChunkedMmap", }; f.simple_enum::<Self>(repr) } } impl From<VectorStorageType> for PyVectorStorageType { fn from(storage_type: VectorStorageType) -> Self { match storage_type { VectorStorageType::Memory => PyVectorStorageType::Memory, VectorStorageType::Mmap => PyVectorStorageType::Mmap, VectorStorageType::ChunkedMmap => PyVectorStorageType::ChunkedMmap, VectorStorageType::InRamChunkedMmap => PyVectorStorageType::InRamChunkedMmap, } } } impl From<PyVectorStorageType> for VectorStorageType { fn from(storage_type: PyVectorStorageType) -> Self { match storage_type { PyVectorStorageType::Memory => VectorStorageType::Memory, PyVectorStorageType::Mmap => VectorStorageType::Mmap, PyVectorStorageType::ChunkedMmap => VectorStorageType::ChunkedMmap, PyVectorStorageType::InRamChunkedMmap => VectorStorageType::InRamChunkedMmap, } } } #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyIndexes(Indexes); impl FromPyObject<'_, '_> for PyIndexes { type Error = PyErr; fn extract(indexes: Borrowed<'_, '_, PyAny>) -> PyResult<Self> { #[derive(FromPyObject)] enum Helper { Plain(PyPlainIndexConfig), Hnsw(PyHnswIndexConfig), } fn _variants(indexes: Indexes) { match indexes { Indexes::Plain {} => (), Indexes::Hnsw(_) => (), } } let indexes = match indexes.extract()? { Helper::Plain(_) => Indexes::Plain {}, Helper::Hnsw(hnsw) => Indexes::Hnsw(HnswConfig::from(hnsw)), }; Ok(Self(indexes)) } } impl<'py> IntoPyObject<'py> for PyIndexes { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> PyResult<Self::Output> { match self.0 { Indexes::Plain {} => PyPlainIndexConfig.into_bound_py_any(py), Indexes::Hnsw(hnsw) => PyHnswIndexConfig(hnsw).into_bound_py_any(py), } } } impl Repr for PyIndexes { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match &self.0 { Indexes::Plain {} => PyPlainIndexConfig.fmt(f), Indexes::Hnsw(hnsw) => PyHnswIndexConfig::wrap_ref(hnsw).fmt(f), } } } #[pyclass(name = "PlainIndexConfig")] #[derive(Copy, Clone, Debug, Default, Into)] pub struct PyPlainIndexConfig; #[pyclass_repr] #[pymethods] impl PyPlainIndexConfig { #[new] pub fn new() -> Self { Self } pub fn __repr__(&self) -> String { self.repr() } } #[pyclass(name = "HnswIndexConfig")] #[derive(Copy, Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyHnswIndexConfig(HnswConfig); #[pyclass_repr] #[pymethods] impl PyHnswIndexConfig { #[new] #[pyo3(signature = (m, ef_construct, full_scan_threshold, on_disk=None, payload_m=None, inline_storage=None))] pub fn new( m: usize, ef_construct: usize, full_scan_threshold: usize, on_disk: Option<bool>, payload_m: Option<usize>, inline_storage: Option<bool>, ) -> Self { Self(HnswConfig { m, ef_construct, full_scan_threshold, max_indexing_threads: 0, on_disk, payload_m, inline_storage, }) } #[getter] pub fn m(&self) -> usize { self.0.m } #[getter] pub fn ef_construct(&self) -> usize { self.0.ef_construct } #[getter] pub fn full_scan_threshold(&self) -> usize { self.0.full_scan_threshold } #[getter] pub fn on_disk(&self) -> Option<bool> { self.0.on_disk } #[getter] pub fn payload_m(&self) -> Option<usize> { self.0.payload_m } #[getter] pub fn inline_storage(&self) -> Option<bool> { self.0.inline_storage } pub fn __repr__(&self) -> String { self.repr() } } impl PyHnswIndexConfig { fn _getters(self) { // Every field should have a getter method let HnswConfig { m: _, ef_construct: _, full_scan_threshold: _, max_indexing_threads: _, // not relevant for Qdrant Edge on_disk: _, payload_m: _, inline_storage: _, } = self.0; } } #[pyclass(name = "MultiVectorConfig")] #[derive(Copy, Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyMultiVectorConfig(MultiVectorConfig); #[pyclass_repr] #[pymethods] impl PyMultiVectorConfig { #[new] pub fn new(comparator: PyMultiVectorComparator) -> Self { Self(MultiVectorConfig { comparator: MultiVectorComparator::from(comparator), }) } #[getter] pub fn comparator(&self) -> PyMultiVectorComparator { PyMultiVectorComparator::from(self.0.comparator) } pub fn __repr__(&self) -> String { self.repr() } } impl PyMultiVectorConfig { fn _getters(self) { // Every field should have a getter method let MultiVectorConfig { comparator: _ } = self.0; } } #[pyclass(name = "MultiVectorComparator")] #[derive(Copy, Clone, Debug)] pub enum PyMultiVectorComparator { MaxSim, } #[pymethods] impl PyMultiVectorComparator { pub fn __repr__(&self) -> String { self.repr() } } impl Repr for PyMultiVectorComparator { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let repr = match self { Self::MaxSim => "MaxSim", }; f.simple_enum::<Self>(repr) } } impl From<MultiVectorComparator> for PyMultiVectorComparator { fn from(comparator: MultiVectorComparator) -> Self { match comparator { MultiVectorComparator::MaxSim => PyMultiVectorComparator::MaxSim, } } } impl From<PyMultiVectorComparator> for MultiVectorComparator { fn from(comparator: PyMultiVectorComparator) -> Self { match comparator { PyMultiVectorComparator::MaxSim => MultiVectorComparator::MaxSim, } } } #[pyclass(name = "VectorStorageDatatype")] #[derive(Copy, Clone, Debug)] pub enum PyVectorStorageDatatype { Float32, Float16, Uint8, } #[pymethods] impl PyVectorStorageDatatype { pub fn __repr__(&self) -> String { self.repr() } } impl Repr for PyVectorStorageDatatype { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let repr = match self { Self::Float32 => "Float32", Self::Float16 => "Float16", Self::Uint8 => "Uint8", }; f.simple_enum::<Self>(repr) } } impl From<VectorStorageDatatype> for PyVectorStorageDatatype { fn from(datatype: VectorStorageDatatype) -> Self { match datatype { VectorStorageDatatype::Float32 => PyVectorStorageDatatype::Float32, VectorStorageDatatype::Float16 => PyVectorStorageDatatype::Float16, VectorStorageDatatype::Uint8 => PyVectorStorageDatatype::Uint8, } } } impl From<PyVectorStorageDatatype> for VectorStorageDatatype { fn from(datatype: PyVectorStorageDatatype) -> Self { match datatype { PyVectorStorageDatatype::Float32 => VectorStorageDatatype::Float32, PyVectorStorageDatatype::Float16 => VectorStorageDatatype::Float16, PyVectorStorageDatatype::Uint8 => VectorStorageDatatype::Uint8, } } }
rust
Apache-2.0
f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd
2026-01-04T15:34:51.524868Z
false
qdrant/qdrant
https://github.com/qdrant/qdrant/blob/f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd/lib/edge/python/src/config/quantization.rs
lib/edge/python/src/config/quantization.rs
use std::fmt; use bytemuck::TransparentWrapper; use derive_more::Into; use pyo3::IntoPyObjectExt as _; use pyo3::prelude::*; use segment::types::*; use crate::repr::*; #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyQuantizationConfig(pub QuantizationConfig); impl FromPyObject<'_, '_> for PyQuantizationConfig { type Error = PyErr; fn extract(conf: Borrowed<'_, '_, PyAny>) -> PyResult<Self> { #[derive(FromPyObject)] enum Helper { Scalar(PyScalarQuantizationConfig), Product(PyProductQuantizationConfig), Binary(PyBinaryQuantizationConfig), } let conf = match conf.extract()? { Helper::Scalar(scalar) => QuantizationConfig::Scalar(ScalarQuantization { scalar: ScalarQuantizationConfig::from(scalar), }), Helper::Product(product) => QuantizationConfig::Product(ProductQuantization { product: ProductQuantizationConfig::from(product), }), Helper::Binary(binary) => QuantizationConfig::Binary(BinaryQuantization { binary: BinaryQuantizationConfig::from(binary), }), }; Ok(Self(conf)) } } impl<'py> IntoPyObject<'py> for PyQuantizationConfig { type Target = PyAny; type Output = Bound<'py, Self::Target>; type Error = PyErr; fn into_pyobject(self, py: Python<'py>) -> PyResult<Self::Output> { match self.0 { QuantizationConfig::Scalar(ScalarQuantization { scalar }) => { PyScalarQuantizationConfig(scalar).into_bound_py_any(py) } QuantizationConfig::Product(ProductQuantization { product }) => { PyProductQuantizationConfig(product).into_bound_py_any(py) } QuantizationConfig::Binary(BinaryQuantization { binary }) => { PyBinaryQuantizationConfig(binary).into_bound_py_any(py) } } } } impl Repr for PyQuantizationConfig { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match &self.0 { QuantizationConfig::Scalar(scalar) => { PyScalarQuantizationConfig::wrap_ref(&scalar.scalar).fmt(f) } QuantizationConfig::Product(product) => { PyProductQuantizationConfig::wrap_ref(&product.product).fmt(f) } QuantizationConfig::Binary(binary) => { PyBinaryQuantizationConfig::wrap_ref(&binary.binary).fmt(f) } } } } #[pyclass(name = "ScalarQuantizationConfig")] #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyScalarQuantizationConfig(ScalarQuantizationConfig); #[pyclass_repr] #[pymethods] impl PyScalarQuantizationConfig { #[new] #[pyo3(signature = (r#type, quantile = None, always_ram = None))] pub fn new(r#type: PyScalarType, quantile: Option<f32>, always_ram: Option<bool>) -> Self { Self(ScalarQuantizationConfig { r#type: ScalarType::from(r#type), quantile, always_ram, }) } #[getter] pub fn r#type(&self) -> PyScalarType { PyScalarType::from(self.0.r#type) } #[getter] pub fn quantile(&self) -> Option<f32> { self.0.quantile } #[getter] pub fn always_ram(&self) -> Option<bool> { self.0.always_ram } pub fn __repr__(&self) -> String { self.repr() } } impl PyScalarQuantizationConfig { fn _getters(self) { // Every field should have a getter method let ScalarQuantizationConfig { r#type: _, quantile: _, always_ram: _, } = self.0; } } #[pyclass(name = "ScalarType")] #[derive(Copy, Clone, Debug)] pub enum PyScalarType { Int8, } #[pymethods] impl PyScalarType { pub fn __repr__(&self) -> String { self.repr() } } impl Repr for PyScalarType { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let repr = match self { Self::Int8 => "Int8", }; f.simple_enum::<Self>(repr) } } impl From<ScalarType> for PyScalarType { fn from(scalar_type: ScalarType) -> Self { match scalar_type { ScalarType::Int8 => PyScalarType::Int8, } } } impl From<PyScalarType> for ScalarType { fn from(scalar_type: PyScalarType) -> Self { match scalar_type { PyScalarType::Int8 => ScalarType::Int8, } } } #[pyclass(name = "ProductQuantizationConfig")] #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyProductQuantizationConfig(ProductQuantizationConfig); #[pyclass_repr] #[pymethods] impl PyProductQuantizationConfig { #[new] #[pyo3(signature = (compression, always_ram = None))] pub fn new(compression: PyCompressionRatio, always_ram: Option<bool>) -> Self { Self(ProductQuantizationConfig { compression: CompressionRatio::from(compression), always_ram, }) } #[getter] pub fn compression(&self) -> PyCompressionRatio { PyCompressionRatio::from(self.0.compression) } #[getter] pub fn always_ram(&self) -> Option<bool> { self.0.always_ram } pub fn __repr__(&self) -> String { self.repr() } } impl PyProductQuantizationConfig { fn _getters(self) { // Every field should have a getter method let ProductQuantizationConfig { compression: _, always_ram: _, } = self.0; } } #[pyclass(name = "CompressionRatio")] #[derive(Copy, Clone, Debug)] pub enum PyCompressionRatio { X4, X8, X16, X32, X64, } #[pymethods] impl PyCompressionRatio { pub fn __repr__(&self) -> String { self.repr() } } impl Repr for PyCompressionRatio { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let repr = match self { Self::X4 => "X4", Self::X8 => "X8", Self::X16 => "X16", Self::X32 => "X32", Self::X64 => "X64", }; f.simple_enum::<Self>(repr) } } impl From<CompressionRatio> for PyCompressionRatio { fn from(compression: CompressionRatio) -> Self { match compression { CompressionRatio::X4 => PyCompressionRatio::X4, CompressionRatio::X8 => PyCompressionRatio::X8, CompressionRatio::X16 => PyCompressionRatio::X16, CompressionRatio::X32 => PyCompressionRatio::X32, CompressionRatio::X64 => PyCompressionRatio::X64, } } } impl From<PyCompressionRatio> for CompressionRatio { fn from(compression: PyCompressionRatio) -> Self { match compression { PyCompressionRatio::X4 => CompressionRatio::X4, PyCompressionRatio::X8 => CompressionRatio::X8, PyCompressionRatio::X16 => CompressionRatio::X16, PyCompressionRatio::X32 => CompressionRatio::X32, PyCompressionRatio::X64 => CompressionRatio::X64, } } } #[pyclass(name = "BinaryQuantizationConfig")] #[derive(Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PyBinaryQuantizationConfig(BinaryQuantizationConfig); #[pyclass_repr] #[pymethods] impl PyBinaryQuantizationConfig { #[new] #[pyo3(signature = (always_ram = None, encoding = None, query_encoding = None))] pub fn new( always_ram: Option<bool>, encoding: Option<PyBinaryQuantizationEncoding>, query_encoding: Option<PyBinaryQuantizationQueryEncoding>, ) -> Self { Self(BinaryQuantizationConfig { always_ram, encoding: encoding.map(BinaryQuantizationEncoding::from), query_encoding: query_encoding.map(BinaryQuantizationQueryEncoding::from), }) } #[getter] pub fn always_ram(&self) -> Option<bool> { self.0.always_ram } #[getter] pub fn encoding(&self) -> Option<PyBinaryQuantizationEncoding> { self.0.encoding.map(PyBinaryQuantizationEncoding::from) } #[getter] pub fn query_encoding(&self) -> Option<PyBinaryQuantizationQueryEncoding> { self.0 .query_encoding .map(PyBinaryQuantizationQueryEncoding::from) } pub fn __repr__(&self) -> String { self.repr() } } impl PyBinaryQuantizationConfig { fn _getters(self) { // Every field should have a getter method let BinaryQuantizationConfig { always_ram: _, encoding: _, query_encoding: _, } = self.0; } } #[pyclass(name = "BinaryQuantizationEncoding")] #[derive(Copy, Clone, Debug)] pub enum PyBinaryQuantizationEncoding { OneBit, TwoBits, OneAndHalfBits, } #[pymethods] impl PyBinaryQuantizationEncoding { pub fn __repr__(&self) -> String { self.repr() } } impl Repr for PyBinaryQuantizationEncoding { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let repr = match self { Self::OneBit => "OneBit", Self::TwoBits => "TwoBits", Self::OneAndHalfBits => "OneAndHalfBits", }; f.simple_enum::<Self>(repr) } } impl From<BinaryQuantizationEncoding> for PyBinaryQuantizationEncoding { fn from(encoding: BinaryQuantizationEncoding) -> Self { match encoding { BinaryQuantizationEncoding::OneBit => PyBinaryQuantizationEncoding::OneBit, BinaryQuantizationEncoding::TwoBits => PyBinaryQuantizationEncoding::TwoBits, BinaryQuantizationEncoding::OneAndHalfBits => { PyBinaryQuantizationEncoding::OneAndHalfBits } } } } impl From<PyBinaryQuantizationEncoding> for BinaryQuantizationEncoding { fn from(encoding: PyBinaryQuantizationEncoding) -> Self { match encoding { PyBinaryQuantizationEncoding::OneBit => BinaryQuantizationEncoding::OneBit, PyBinaryQuantizationEncoding::TwoBits => BinaryQuantizationEncoding::TwoBits, PyBinaryQuantizationEncoding::OneAndHalfBits => { BinaryQuantizationEncoding::OneAndHalfBits } } } } #[pyclass(name = "BinaryQuantizationQueryEncoding")] #[derive(Copy, Clone, Debug)] pub enum PyBinaryQuantizationQueryEncoding { Default, Binary, Scalar4Bits, Scalar8Bits, } #[pymethods] impl PyBinaryQuantizationQueryEncoding { pub fn __repr__(&self) -> String { self.repr() } } impl Repr for PyBinaryQuantizationQueryEncoding { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let repr = match self { Self::Default => "Default", Self::Binary => "Binary", Self::Scalar4Bits => "Scalar4Bits", Self::Scalar8Bits => "Scalar8Bits", }; f.simple_enum::<Self>(repr) } } impl From<BinaryQuantizationQueryEncoding> for PyBinaryQuantizationQueryEncoding { fn from(encoding: BinaryQuantizationQueryEncoding) -> Self { match encoding { BinaryQuantizationQueryEncoding::Default => PyBinaryQuantizationQueryEncoding::Default, BinaryQuantizationQueryEncoding::Binary => PyBinaryQuantizationQueryEncoding::Binary, BinaryQuantizationQueryEncoding::Scalar4Bits => { PyBinaryQuantizationQueryEncoding::Scalar4Bits } BinaryQuantizationQueryEncoding::Scalar8Bits => { PyBinaryQuantizationQueryEncoding::Scalar8Bits } } } } impl From<PyBinaryQuantizationQueryEncoding> for BinaryQuantizationQueryEncoding { fn from(encoding: PyBinaryQuantizationQueryEncoding) -> Self { match encoding { PyBinaryQuantizationQueryEncoding::Default => BinaryQuantizationQueryEncoding::Default, PyBinaryQuantizationQueryEncoding::Binary => BinaryQuantizationQueryEncoding::Binary, PyBinaryQuantizationQueryEncoding::Scalar4Bits => { BinaryQuantizationQueryEncoding::Scalar4Bits } PyBinaryQuantizationQueryEncoding::Scalar8Bits => { BinaryQuantizationQueryEncoding::Scalar8Bits } } } }
rust
Apache-2.0
f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd
2026-01-04T15:34:51.524868Z
false
qdrant/qdrant
https://github.com/qdrant/qdrant/blob/f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd/lib/edge/python/src/config/sparse_vector_data.rs
lib/edge/python/src/config/sparse_vector_data.rs
use std::collections::HashMap; use std::{fmt, mem}; use bytemuck::TransparentWrapper; use derive_more::Into; use pyo3::prelude::*; use segment::data_types::modifier::Modifier; use segment::index::sparse_index::sparse_index_config::{SparseIndexConfig, SparseIndexType}; use segment::types::*; use super::vector_data::*; use crate::repr::*; #[pyclass(name = "SparseVectorDataConfig")] #[derive(Copy, Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PySparseVectorDataConfig(pub SparseVectorDataConfig); impl PySparseVectorDataConfig { pub fn peel_map(map: HashMap<String, Self>) -> HashMap<String, SparseVectorDataConfig> where Self: TransparentWrapper<SparseVectorDataConfig>, { unsafe { mem::transmute(map) } } pub fn wrap_map_ref(map: &HashMap<String, SparseVectorDataConfig>) -> &HashMap<String, Self> where Self: TransparentWrapper<SparseVectorDataConfig>, { unsafe { mem::transmute(map) } } } #[pyclass_repr] #[pymethods] impl PySparseVectorDataConfig { #[new] pub fn new( index: PySparseIndexConfig, storage_type: PySparseVectorStorageType, modifier: Option<PyModifier>, ) -> Self { Self(SparseVectorDataConfig { index: SparseIndexConfig::from(index), storage_type: SparseVectorStorageType::from(storage_type), modifier: modifier.map(Modifier::from), }) } #[getter] pub fn index(&self) -> PySparseIndexConfig { PySparseIndexConfig(self.0.index) } #[getter] pub fn storage_type(&self) -> PySparseVectorStorageType { PySparseVectorStorageType::from(self.0.storage_type) } #[getter] pub fn modifier(&self) -> Option<PyModifier> { self.0.modifier.map(PyModifier::from) } pub fn __repr__(&self) -> String { self.repr() } } impl PySparseVectorDataConfig { fn _getters(self) { // Every field should have a getter method let SparseVectorDataConfig { index: _, storage_type: _, modifier: _, } = self.0; } } impl<'py> IntoPyObject<'py> for &PySparseVectorDataConfig { type Target = PySparseVectorDataConfig; type Output = Bound<'py, Self::Target>; type Error = PyErr; // Infallible fn into_pyobject(self, py: Python<'py>) -> PyResult<Self::Output> { IntoPyObject::into_pyobject(*self, py) } } #[pyclass(name = "SparseIndexConfig")] #[derive(Copy, Clone, Debug, Into, TransparentWrapper)] #[repr(transparent)] pub struct PySparseIndexConfig(SparseIndexConfig); #[pyclass_repr] #[pymethods] impl PySparseIndexConfig { #[new] pub fn new( full_scan_threshold: Option<usize>, index_type: PySparseIndexType, datatype: Option<PyVectorStorageDatatype>, ) -> Self { Self(SparseIndexConfig { full_scan_threshold, index_type: SparseIndexType::from(index_type), datatype: datatype.map(VectorStorageDatatype::from), }) } #[getter] pub fn full_scan_threshold(&self) -> Option<usize> { self.0.full_scan_threshold } #[getter] pub fn index_type(&self) -> PySparseIndexType { PySparseIndexType::from(self.0.index_type) } #[getter] pub fn datatype(&self) -> Option<PyVectorStorageDatatype> { self.0.datatype.map(PyVectorStorageDatatype::from) } pub fn __repr__(&self) -> String { self.repr() } } impl PySparseIndexConfig { fn _getters(self) { // Every field should have a getter method let SparseIndexConfig { full_scan_threshold: _, index_type: _, datatype: _, } = self.0; } } #[pyclass(name = "SparseIndexType")] #[derive(Copy, Clone, Debug)] pub enum PySparseIndexType { MutableRam, ImmutableRam, Mmap, } #[pymethods] impl PySparseIndexType { pub fn __repr__(&self) -> String { self.repr() } } impl Repr for PySparseIndexType { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let repr = match self { Self::MutableRam => "MutableRam", Self::ImmutableRam => "ImmutableRam", Self::Mmap => "Mmap", }; f.simple_enum::<Self>(repr) } } impl From<SparseIndexType> for PySparseIndexType { fn from(index_type: SparseIndexType) -> Self { match index_type { SparseIndexType::MutableRam => PySparseIndexType::MutableRam, SparseIndexType::ImmutableRam => PySparseIndexType::ImmutableRam, SparseIndexType::Mmap => PySparseIndexType::Mmap, } } } impl From<PySparseIndexType> for SparseIndexType { fn from(index_type: PySparseIndexType) -> Self { match index_type { PySparseIndexType::MutableRam => SparseIndexType::MutableRam, PySparseIndexType::ImmutableRam => SparseIndexType::ImmutableRam, PySparseIndexType::Mmap => SparseIndexType::Mmap, } } } #[pyclass(name = "SparseVectorStorageType")] #[derive(Copy, Clone, Debug)] pub enum PySparseVectorStorageType { Mmap, } #[pymethods] impl PySparseVectorStorageType { pub fn __repr__(&self) -> String { self.repr() } } impl Repr for PySparseVectorStorageType { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let repr = match self { Self::Mmap => "Mmap", }; f.simple_enum::<Self>(repr) } } impl From<SparseVectorStorageType> for PySparseVectorStorageType { fn from(storage_type: SparseVectorStorageType) -> Self { #[allow(unreachable_patterns)] #[allow(clippy::match_wildcard_for_single_variants)] match storage_type { SparseVectorStorageType::Mmap => PySparseVectorStorageType::Mmap, _ => unimplemented!("RocksDB-backed storage types are not supported by Qdrant Edge"), } } } impl From<PySparseVectorStorageType> for SparseVectorStorageType { fn from(storage_type: PySparseVectorStorageType) -> Self { match storage_type { PySparseVectorStorageType::Mmap => SparseVectorStorageType::Mmap, } } } #[pyclass(name = "Modifier")] #[derive(Copy, Clone, Debug)] pub enum PyModifier { None, Idf, } #[pymethods] impl PyModifier { pub fn __repr__(&self) -> String { self.repr() } } impl Repr for PyModifier { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let repr = match self { Self::None => "None", Self::Idf => "Idf", }; f.simple_enum::<Self>(repr) } } impl From<Modifier> for PyModifier { fn from(modifier: Modifier) -> Self { match modifier { Modifier::None => PyModifier::None, Modifier::Idf => PyModifier::Idf, } } } impl From<PyModifier> for Modifier { fn from(modifier: PyModifier) -> Self { match modifier { PyModifier::None => Modifier::None, PyModifier::Idf => Modifier::Idf, } } }
rust
Apache-2.0
f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd
2026-01-04T15:34:51.524868Z
false
qdrant/qdrant
https://github.com/qdrant/qdrant/blob/f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd/lib/edge/src/scroll.rs
lib/edge/src/scroll.rs
use std::collections::HashSet; use std::sync::atomic::AtomicBool; use common::counter::hardware_accumulator::HwMeasurementAcc; use itertools::Itertools as _; use rand::distr::weighted::WeightedIndex; use rand::rngs::StdRng; use rand::{Rng as _, SeedableRng as _}; use segment::common::operation_error::{OperationError, OperationResult}; use segment::data_types::order_by::{Direction, OrderBy}; use segment::types::*; use shard::query::scroll::{QueryScrollRequestInternal, ScrollOrder}; use shard::retrieve::record_internal::RecordInternal; use shard::retrieve::retrieve_blocking::retrieve_blocking; use super::Shard; use crate::DEFAULT_EDGE_TIMEOUT; impl Shard { pub fn query_scroll( &self, request: &QueryScrollRequestInternal, ) -> OperationResult<Vec<ScoredPoint>> { let QueryScrollRequestInternal { limit, with_vector, filter, scroll_order, with_payload, } = request; let records = match scroll_order { ScrollOrder::ById => self.scroll_by_id( None, *limit, with_payload, with_vector, filter.as_ref(), HwMeasurementAcc::disposable(), )?, ScrollOrder::ByField(order_by) => self.scroll_by_field( *limit, with_payload, with_vector, filter.as_ref(), order_by, HwMeasurementAcc::disposable(), )?, ScrollOrder::Random => self.scroll_randomly( *limit, with_payload, with_vector, filter.as_ref(), HwMeasurementAcc::disposable(), )?, }; let point_results = records .into_iter() .map(|record| ScoredPoint { id: record.id, version: 0, score: 1.0, payload: record.payload, vector: record.vector, shard_key: record.shard_key, order_value: record.order_value, }) .collect(); Ok(point_results) } fn scroll_by_id( &self, offset: Option<ExtendedPointId>, limit: usize, with_payload_interface: &WithPayloadInterface, with_vector: &WithVector, filter: Option<&Filter>, hw_measurement_acc: HwMeasurementAcc, ) -> OperationResult<Vec<RecordInternal>> { let (non_appendable, appendable) = self.segments.read().split_segments(); let hw_counter = hw_measurement_acc.get_counter_cell(); let point_ids: Vec<_> = non_appendable .into_iter() .chain(appendable) .map(|segment| { segment.get().read().read_filtered( offset, Some(limit), filter, &AtomicBool::new(false), &hw_counter, ) }) .collect(); let point_ids = point_ids .into_iter() .flatten() .sorted() .dedup() .take(limit) .collect_vec(); let mut points = retrieve_blocking( self.segments.clone(), &point_ids, &WithPayload::from(with_payload_interface), with_vector, DEFAULT_EDGE_TIMEOUT, &AtomicBool::new(false), hw_measurement_acc, )?; let ordered_points = point_ids .iter() .filter_map(|point_id| points.remove(point_id)) .collect(); Ok(ordered_points) } fn scroll_by_field( &self, limit: usize, with_payload_interface: &WithPayloadInterface, with_vector: &WithVector, filter: Option<&Filter>, order_by: &OrderBy, hw_measurement_acc: HwMeasurementAcc, ) -> OperationResult<Vec<RecordInternal>> { let (non_appendable, appendable) = self.segments.read().split_segments(); let hw_counter = hw_measurement_acc.get_counter_cell(); let read_results: Vec<_> = non_appendable .into_iter() .chain(appendable) .map(|segment| { segment.get().read().read_ordered_filtered( Some(limit), filter, order_by, &AtomicBool::new(false), &hw_counter, ) }) .collect::<Result<_, _>>()?; let (order_values, point_ids): (Vec<_>, Vec<_>) = read_results .into_iter() .kmerge_by(|a, b| match order_by.direction() { Direction::Asc => a <= b, Direction::Desc => a >= b, }) .dedup() .take(limit) .unzip(); let points = retrieve_blocking( self.segments.clone(), &point_ids, &WithPayload::from(with_payload_interface), with_vector, DEFAULT_EDGE_TIMEOUT, &AtomicBool::new(false), hw_measurement_acc, )?; let ordered_points = point_ids .iter() .zip(order_values) .filter_map(|(point_id, value)| { let mut record = points.get(point_id).cloned()?; record.order_value = Some(value); Some(record) }) .collect(); Ok(ordered_points) } fn scroll_randomly( &self, limit: usize, with_payload_interface: &WithPayloadInterface, with_vector: &WithVector, filter: Option<&Filter>, hw_measurement_acc: HwMeasurementAcc, ) -> OperationResult<Vec<RecordInternal>> { let (non_appendable, appendable) = self.segments.read().split_segments(); let hw_counter = hw_measurement_acc.get_counter_cell(); let (point_count, mut point_ids): (Vec<_>, Vec<_>) = non_appendable .into_iter() .chain(appendable) .map(|segment| { let segment = segment.get(); let segment = segment.read(); let point_count = segment.available_point_count(); let point_ids = segment.read_random_filtered( limit, filter, &AtomicBool::new(false), &hw_counter, ); (point_count, point_ids) }) .unzip(); // Shortcut if all segments are empty if point_count.iter().all(|&count| count == 0) { return Ok(Vec::new()); } // Select points in a weighted fashion from each segment, depending on how many points each segment has. let distribution = WeightedIndex::new(point_count).map_err(|err| { OperationError::service_error(format!( "failed to create weighted index for random scroll: {err:?}" )) })?; let mut rng = StdRng::from_os_rng(); let mut random_point_ids = HashSet::with_capacity(limit); // Randomly sample points in two stages // // 1. This loop iterates <= LIMIT times, and either breaks early if we // have enough points, or if some of the segments are exhausted. // // 2. If the segments are exhausted, we will fill up the rest of the // points from other segments. In total, the complexity is guaranteed to // be O(limit). while random_point_ids.len() < limit { let segment_idx = rng.sample(&distribution); let segment_point_ids = &mut point_ids[segment_idx]; if let Some(point) = segment_point_ids.pop() { random_point_ids.insert(point); } else { // It seems that some segments are empty early, // so distribution does not make sense anymore. // This is only possible if segments size < limit. break; } } // If we still need more points, we will get them from the rest of the segments. // This is a rare case, as it seems we don't have enough points in individual segments. // Therefore, we can ignore "proper" distribution, as it won't be accurate anyway. if random_point_ids.len() < limit { for point_id in point_ids.into_iter().flatten() { random_point_ids.insert(point_id); if random_point_ids.len() >= limit { break; } } } let random_point_ids: Vec<_> = random_point_ids.into_iter().collect(); let random_points = retrieve_blocking( self.segments.clone(), &random_point_ids, &WithPayload::from(with_payload_interface), with_vector, DEFAULT_EDGE_TIMEOUT, &AtomicBool::new(false), hw_measurement_acc, )? .into_values() .collect(); Ok(random_points) } }
rust
Apache-2.0
f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd
2026-01-04T15:34:51.524868Z
false
qdrant/qdrant
https://github.com/qdrant/qdrant/blob/f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd/lib/edge/src/lib.rs
lib/edge/src/lib.rs
pub mod query; pub mod retrieve; pub mod scroll; pub mod search; pub mod update; use std::num::NonZero; use std::path::{Path, PathBuf}; use std::sync::Arc; use std::sync::atomic::AtomicBool; use std::time::Duration; use common::save_on_disk::SaveOnDisk; use fs_err as fs; use parking_lot::Mutex; use segment::common::operation_error::{OperationError, OperationResult}; use segment::entry::SegmentEntry; use segment::segment_constructor::load_segment; use segment::types::SegmentConfig; use shard::operations::CollectionUpdateOperations; use shard::segment_holder::{LockedSegmentHolder, SegmentHolder}; use shard::wal::SerdeWal; use wal::WalOptions; #[derive(Debug)] pub struct Shard { _path: PathBuf, config: SegmentConfig, wal: Mutex<SerdeWal<CollectionUpdateOperations>>, segments: LockedSegmentHolder, } const WAL_PATH: &str = "wal"; const SEGMENTS_PATH: &str = "segments"; impl Shard { pub fn load(path: &Path, mut config: Option<SegmentConfig>) -> OperationResult<Self> { let wal_path = path.join(WAL_PATH); if !wal_path.exists() { fs::create_dir(&wal_path).map_err(|err| { OperationError::service_error(format!("failed to create WAL directory: {err}")) })?; } let wal: SerdeWal<CollectionUpdateOperations> = SerdeWal::new(&wal_path, default_wal_options()).map_err(|err| { OperationError::service_error(format!( "failed to open WAL {}: {err}", wal_path.display(), )) })?; let segments_path = path.join(SEGMENTS_PATH); if !segments_path.exists() { fs::create_dir(&segments_path).map_err(|err| { OperationError::service_error(format!("failed to create segments directory: {err}")) })?; } let segments_dir = fs::read_dir(&segments_path).map_err(|err| { OperationError::service_error(format!("failed to read segments directory: {err}")) })?; let mut segments = SegmentHolder::default(); for entry in segments_dir { let entry = entry.map_err(|err| { OperationError::service_error(format!( "failed to read entry in segments directory: {err}", )) })?; let segment_path = entry.path(); if !segment_path.is_dir() { log::warn!( "Skipping non-directory segment entry {}", segment_path.display(), ); continue; } if let Some(name) = segment_path.file_name() && let Some(name) = name.to_str() && name.starts_with(".") { log::warn!( "Skipping hidden segment directory {}", segment_path.display(), ); continue; } let segment = load_segment(&segment_path, &AtomicBool::new(false)).map_err(|err| { OperationError::service_error(format!( "failed to load segment {}: {err}", segment_path.display(), )) })?; let Some(mut segment) = segment else { fs::remove_dir_all(&segment_path).map_err(|err| { OperationError::service_error(format!( "failed to remove leftover segment: {err}", )) })?; continue; }; if let Some(config) = &config { if !config.is_compatible(segment.config()) { return Err(OperationError::service_error(format!( "segment {} is incompatible with provided config or previously loaded segments: \ expected {:?}, but received {:?}", segment_path.display(), config, segment.config(), ))); } } else { config = Some(segment.config().clone()); } segment.check_consistency_and_repair().map_err(|err| { OperationError::service_error(format!( "failed to repair segment {}: {err}", segment_path.display(), )) })?; segments.add_new(segment); } if !segments.has_appendable_segment() { let Some(config) = &config else { return Err(OperationError::service_error( "segment config is not provided and no segments were loaded", )); }; let payload_index_schema_path = path.join("payload_index.json"); let payload_index_schema = SaveOnDisk::load_or_init_default(&payload_index_schema_path) .map_err(|err| { OperationError::service_error(format!( "failed to initialize temporary payload index schema file {}: {err}", payload_index_schema_path.display(), )) })?; segments.create_appendable_segment( &segments_path, config.clone(), Arc::new(payload_index_schema), )?; debug_assert!(segments.has_appendable_segment()); } let shard = Self { _path: path.into(), config: config.expect("config was provided or at least one segment was loaded"), wal: parking_lot::Mutex::new(wal), segments: Arc::new(parking_lot::RwLock::new(segments)), }; Ok(shard) } pub fn config(&self) -> &SegmentConfig { &self.config } } fn default_wal_options() -> WalOptions { WalOptions { segment_capacity: 32 * 1024 * 1024, segment_queue_len: 0, retain_closed: NonZero::new(1).unwrap(), } } // Default timeout of 1h used as a placeholder in Edge pub(crate) const DEFAULT_EDGE_TIMEOUT: Duration = Duration::from_secs(3600);
rust
Apache-2.0
f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd
2026-01-04T15:34:51.524868Z
false
qdrant/qdrant
https://github.com/qdrant/qdrant/blob/f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd/lib/edge/src/update.rs
lib/edge/src/update.rs
use std::fmt; use common::counter::hardware_counter::HardwareCounterCell; use segment::common::operation_error::{OperationError, OperationResult}; use shard::operations::CollectionUpdateOperations; use shard::update::*; use crate::Shard; impl Shard { pub fn update(&self, operation: CollectionUpdateOperations) -> OperationResult<()> { let mut wal = self.wal.lock(); let operation_id = wal.write(&operation).map_err(service_error)?; let hw_counter = HardwareCounterCell::disposable(); let result = match operation { CollectionUpdateOperations::PointOperation(point_operation) => { process_point_operation(&self.segments, operation_id, point_operation, &hw_counter) } CollectionUpdateOperations::VectorOperation(vector_operation) => { process_vector_operation( &self.segments, operation_id, vector_operation, &hw_counter, ) } CollectionUpdateOperations::PayloadOperation(payload_operation) => { process_payload_operation( &self.segments, operation_id, payload_operation, &hw_counter, ) } CollectionUpdateOperations::FieldIndexOperation(index_operation) => { process_field_index_operation( &self.segments, operation_id, &index_operation, &hw_counter, ) } }; result.map(|_| ()) } } fn service_error(err: impl fmt::Display) -> OperationError { OperationError::service_error(err.to_string()) }
rust
Apache-2.0
f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd
2026-01-04T15:34:51.524868Z
false
qdrant/qdrant
https://github.com/qdrant/qdrant/blob/f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd/lib/edge/src/search.rs
lib/edge/src/search.rs
use std::cmp; use common::counter::hardware_accumulator::HwMeasurementAcc; use segment::common::operation_error::OperationResult; use segment::data_types::modifier::Modifier; use segment::data_types::vectors::QueryVector; use segment::types::{DEFAULT_FULL_SCAN_THRESHOLD, ScoredPoint, WithPayload}; use shard::common::stopping_guard::StoppingGuard; use shard::query::query_context::{fill_query_context, init_query_context}; use shard::search::CoreSearchRequest; use shard::search_result_aggregator::BatchResultAggregator; use crate::{DEFAULT_EDGE_TIMEOUT, Shard}; impl Shard { /// This method is DEPRECATED and should be replaced with query. pub fn search(&self, search: CoreSearchRequest) -> OperationResult<Vec<ScoredPoint>> { let is_stopped_guard = StoppingGuard::new(); let searches = [search]; let query_context = init_query_context( &searches, DEFAULT_FULL_SCAN_THRESHOLD, &is_stopped_guard, HwMeasurementAcc::disposable(), |vector_name| { self.config .sparse_vector_data .get(vector_name) .is_some_and(|v| v.modifier == Some(Modifier::Idf)) }, ); let [search] = searches; let Some(context) = fill_query_context( query_context, self.segments.clone(), DEFAULT_EDGE_TIMEOUT, &is_stopped_guard.get_is_stopped(), )? else { // No segments to search return Ok(vec![]); }; let segments: Vec<_> = self .segments .read() .non_appendable_then_appendable_segments() .collect(); let CoreSearchRequest { query, filter, params, limit, offset, with_payload, with_vector, score_threshold, } = search; let vector_name = query.get_vector_name().to_string(); let query_vector = QueryVector::from(query); let with_payload = WithPayload::from(with_payload.unwrap_or_default()); let with_vector = with_vector.unwrap_or_default(); let mut points_by_segment = Vec::with_capacity(segments.len()); for segment in segments { let batched_points = segment.get().read().search_batch( &vector_name, &[&query_vector], &with_payload, &with_vector, filter.as_ref(), offset + limit, params.as_ref(), &context.get_segment_query_context(), )?; debug_assert_eq!(batched_points.len(), 1); let [points] = batched_points .try_into() .expect("single batched search result"); points_by_segment.push(points); } let mut aggregator = BatchResultAggregator::new([offset + limit]); aggregator.update_point_versions(points_by_segment.iter().flatten()); for points in points_by_segment { aggregator.update_batch_results(0, points); } let [mut points] = aggregator .into_topk() .try_into() .expect("single batched search result"); let distance = self .config .vector_data .get(&vector_name) .expect("vector config exist") .distance; match &query_vector { QueryVector::Nearest(_) => { for point in &mut points { point.score = distance.postprocess_score(point.score); } } QueryVector::RecommendBestScore(_) => (), QueryVector::RecommendSumScores(_) => (), QueryVector::Discovery(_) => (), QueryVector::Context(_) => (), QueryVector::FeedbackNaive(_) => (), } if let Some(score_threshold) = score_threshold { debug_assert!( points.is_sorted_by(|left, right| distance.is_ordered(left.score, right.score)), ); let below_threshold = points .iter() .enumerate() .find(|(_, point)| !distance.check_threshold(point.score, score_threshold)); if let Some((below_threshold_idx, _)) = below_threshold { points.truncate(below_threshold_idx); } } let _ = points.drain(..cmp::min(points.len(), offset)); Ok(points) } }
rust
Apache-2.0
f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd
2026-01-04T15:34:51.524868Z
false
qdrant/qdrant
https://github.com/qdrant/qdrant/blob/f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd/lib/edge/src/query.rs
lib/edge/src/query.rs
use std::mem; use std::sync::Arc; use std::sync::atomic::AtomicBool; use ahash::AHashSet; use common::counter::hardware_accumulator::HwMeasurementAcc; use ordered_float::OrderedFloat; use segment::common::operation_error::{OperationError, OperationResult}; use segment::common::reciprocal_rank_fusion::rrf_scoring; use segment::common::score_fusion::{ScoreFusion, score_fusion}; use segment::data_types::query_context::FormulaContext; use segment::index::query_optimization::rescore_formula::parsed_formula::ParsedFormula; use segment::types::{ Filter, HasIdCondition, ScoredPoint, WithPayload, WithPayloadInterface, WithVector, }; use shard::query::mmr::mmr_from_points_with_vector; use shard::query::planned_query::*; use shard::query::scroll::{QueryScrollRequestInternal, ScrollOrder}; use shard::query::*; use shard::retrieve::retrieve_blocking::retrieve_blocking; use shard::search::CoreSearchRequest; use shard::search_result_aggregator::BatchResultAggregator; use super::Shard; use crate::DEFAULT_EDGE_TIMEOUT; impl Shard { pub fn query(&self, request: ShardQueryRequest) -> OperationResult<Vec<ScoredPoint>> { let planned_query = PlannedQuery::try_from(vec![request])?; let PlannedQuery { root_plans, searches, scrolls, } = planned_query; let mut search_results = Vec::new(); for search in &searches { search_results.push(self.search(search.clone())?); } let mut scroll_results = Vec::new(); for scroll in &scrolls { scroll_results.push(self.query_scroll(scroll)?); } let mut scored_points_batch = Vec::new(); for root_plan in root_plans { let scored_points = self.resolve_plan( root_plan, &mut search_results, &mut scroll_results, HwMeasurementAcc::disposable(), )?; scored_points_batch.push(scored_points) } let [scored_points] = scored_points_batch .try_into() .map_err(|unconverted: Vec<_>| { OperationError::service_error(format!( "unexpected scored points batch size: expected 1, received {}", unconverted.len(), )) })?; Ok(scored_points) } fn resolve_plan( &self, root_plan: RootPlan, search_results: &mut Vec<Vec<ScoredPoint>>, scroll_results: &mut Vec<Vec<ScoredPoint>>, hw_measurement_acc: HwMeasurementAcc, ) -> OperationResult<Vec<ScoredPoint>> { let RootPlan { merge_plan, with_payload, with_vector, } = root_plan; let results = self.recurse_prefetch( merge_plan, search_results, scroll_results, 0, hw_measurement_acc.clone(), )?; let [result] = self .fill_with_payload_or_vectors( vec![results], with_payload, with_vector, hw_measurement_acc, )? .try_into() .map_err(|unconverted: Vec<_>| { OperationError::service_error(format!( "expected single result after filling payload/vectors, got {}", unconverted.len(), )) })?; Ok(result) } fn recurse_prefetch( &self, merge_plan: MergePlan, search_results: &mut Vec<Vec<ScoredPoint>>, scroll_results: &mut Vec<Vec<ScoredPoint>>, depth: usize, hw_counter_acc: HwMeasurementAcc, ) -> OperationResult<Vec<ScoredPoint>> { let MergePlan { sources: merge_plan_sources, rescore_stages, } = merge_plan; let max_len = merge_plan_sources.len(); let mut sources = Vec::with_capacity(max_len); // We need to preserve the order of the sources for some fusion strategies for source in merge_plan_sources { match source { Source::SearchesIdx(idx) => { sources.push(take_prefetched_source(search_results, idx)?) } Source::ScrollsIdx(idx) => { sources.push(take_prefetched_source(scroll_results, idx)?) } Source::Prefetch(merge_plan) => { let merged = self.recurse_prefetch( *merge_plan, search_results, scroll_results, depth + 1, hw_counter_acc.clone(), )?; sources.push(merged); } } } if let Some(rescore_stages) = rescore_stages { let RescoreStages { shard_level, collection_level, } = rescore_stages; let shard_stage_result = if let Some(rescore_params) = shard_level { vec![self.rescore(sources, rescore_params, hw_counter_acc.clone())?] } else { sources }; let collection_result = if let Some(rescore_params) = collection_level { self.rescore(shard_stage_result, rescore_params, hw_counter_acc)? } else { // Only one shard result is expected at this point. shard_stage_result.into_iter().next().unwrap_or_default() }; // In Edge, both shard-level and collection-level rescoring are handled the same way. Ok(collection_result) } else { // The sources here are passed to the next layer without any extra processing. // It should be a query without prefetches. debug_assert_eq!(depth, 0); debug_assert_eq!(sources.len(), 1); let [result] = sources.try_into().map_err(|unconverted: Vec<_>| { OperationError::service_error(format!( "expected single source without rescore stages, got {}", unconverted.len(), )) })?; Ok(result) } } fn rescore( &self, sources: Vec<Vec<ScoredPoint>>, rescore_params: RescoreParams, hw_counter_acc: HwMeasurementAcc, ) -> OperationResult<Vec<ScoredPoint>> { let RescoreParams { rescore, score_threshold, limit, params, } = rescore_params; match rescore { ScoringQuery::Fusion(fusion) => { let top_fused = Self::fusion_rescore( sources.into_iter(), fusion, score_threshold.map(OrderedFloat::into_inner), limit, ); Ok(top_fused) } ScoringQuery::OrderBy(order_by) => { // create single scroll request for rescoring query let filter = filter_by_point_ids(&sources); // Note: score_threshold is not used in this case, as all results will have same score, // but different order_value let scroll_request = QueryScrollRequestInternal { limit, filter: Some(filter), with_payload: false.into(), with_vector: false.into(), scroll_order: ScrollOrder::ByField(order_by), }; self.query_scroll(&scroll_request) } ScoringQuery::Vector(query_enum) => { // create single search request for rescoring query let filter = filter_by_point_ids(&sources); let search_request = CoreSearchRequest { query: query_enum, filter: Some(filter), params, limit, offset: 0, with_payload: None, with_vector: None, score_threshold: score_threshold.map(OrderedFloat::into_inner), }; self.search(search_request) } ScoringQuery::Formula(formula) => { self.rescore_with_formula(formula, sources, limit, hw_counter_acc) } ScoringQuery::Sample(sample) => match sample { SampleInternal::Random => { // create single scroll request for rescoring query let filter = filter_by_point_ids(&sources); // Note: score_threshold is not used in this case, as all results will have same score and order_value let scroll_request = QueryScrollRequestInternal { limit, filter: Some(filter), with_payload: false.into(), with_vector: false.into(), scroll_order: ScrollOrder::Random, }; self.query_scroll(&scroll_request) } }, ScoringQuery::Mmr(mmr) => self.mmr_rescore(sources, mmr, limit, hw_counter_acc), } } fn fusion_rescore( sources: impl Iterator<Item = Vec<ScoredPoint>>, fusion: FusionInternal, score_threshold: Option<f32>, limit: usize, ) -> Vec<ScoredPoint> { let fused = match fusion { FusionInternal::RrfK(k) => rrf_scoring(sources, k), FusionInternal::Dbsf => score_fusion(sources, ScoreFusion::dbsf()), }; let top_fused: Vec<_> = if let Some(score_threshold) = score_threshold { fused .into_iter() .take_while(|point| point.score >= score_threshold) .take(limit) .collect() } else { fused.into_iter().take(limit).collect() }; top_fused } pub fn rescore_with_formula( &self, formula: ParsedFormula, prefetches_results: Vec<Vec<ScoredPoint>>, limit: usize, hw_measurement_acc: HwMeasurementAcc, ) -> OperationResult<Vec<ScoredPoint>> { let ctx = FormulaContext { formula, prefetches_results, limit, is_stopped: Arc::new(AtomicBool::new(false)), }; let ctx = Arc::new(ctx); let hw_counter = hw_measurement_acc.get_counter_cell(); let mut rescored_results = Vec::new(); for segment in self .segments .read() .non_appendable_then_appendable_segments() { let rescored_result = segment .get() .read() .rescore_with_formula(ctx.clone(), &hw_counter)?; rescored_results.push(rescored_result); } // use aggregator with only one "batch" let mut aggregator = BatchResultAggregator::new(std::iter::once(limit)); aggregator.update_point_versions(rescored_results.iter().flatten()); aggregator.update_batch_results(0, rescored_results.into_iter().flatten()); let top = aggregator.into_topk().into_iter().next().ok_or_else(|| { OperationError::service_error("expected first result of aggregator") })?; Ok(top) } /// Maximal Marginal Relevance rescoring fn mmr_rescore( &self, sources: Vec<Vec<ScoredPoint>>, mmr: MmrInternal, limit: usize, hw_measurement_acc: HwMeasurementAcc, ) -> OperationResult<Vec<ScoredPoint>> { let points_with_vector = self .fill_with_payload_or_vectors( sources, false.into(), WithVector::from(mmr.using.clone()), hw_measurement_acc.clone(), )? .into_iter() .flatten(); let vector_data_config = self.config.vector_data.get(&mmr.using).ok_or_else(|| { OperationError::service_error(format!( "vector data config for vector {} not found", mmr.using, )) })?; // Even if we have fewer points than requested, still calculate MMR. let mut top_mmr = mmr_from_points_with_vector( points_with_vector, mmr, vector_data_config.distance, vector_data_config.multivector_config, limit, hw_measurement_acc, )?; // strip mmr vector. We will handle user-requested vectors at root level of request. for point in &mut top_mmr { point.vector = None; } Ok(top_mmr) } fn fill_with_payload_or_vectors( &self, query_response: ShardQueryResponse, with_payload: WithPayloadInterface, with_vector: WithVector, hw_measurement_acc: HwMeasurementAcc, ) -> OperationResult<ShardQueryResponse> { if !with_payload.is_required() && !with_vector.is_enabled() { return Ok(query_response); } // ids to retrieve (deduplication happens in the searcher) let point_ids: Vec<_> = query_response .iter() .flatten() .map(|scored_point| scored_point.id) .collect(); let records_map = retrieve_blocking( self.segments.clone(), &point_ids, &WithPayload::from(with_payload), &with_vector, DEFAULT_EDGE_TIMEOUT, &AtomicBool::new(false), hw_measurement_acc, )?; // It might be possible, that we won't find all records, // so we need to re-collect the results let query_response: ShardQueryResponse = query_response .into_iter() .map(|points| { points .into_iter() .filter_map(|mut point| { records_map.get(&point.id).map(|record| { point.payload.clone_from(&record.payload); point.vector.clone_from(&record.vector); point }) }) .collect() }) .collect(); Ok(query_response) } } fn take_prefetched_source<T: Default>(items: &mut [T], index: usize) -> OperationResult<T> { let source = items.get_mut(index).ok_or_else(|| { OperationError::service_error(format!("prefetched source at index {index} does not exist")) })?; Ok(mem::take(source)) } /// Extracts point ids from sources, and creates a filter to only include those ids. fn filter_by_point_ids(points: &[Vec<ScoredPoint>]) -> Filter { let point_ids: AHashSet<_> = points.iter().flatten().map(|point| point.id).collect(); // create filter for target point ids Filter::new_must(segment::types::Condition::HasId(HasIdCondition::from( point_ids, ))) }
rust
Apache-2.0
f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd
2026-01-04T15:34:51.524868Z
false
qdrant/qdrant
https://github.com/qdrant/qdrant/blob/f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd/lib/edge/src/retrieve.rs
lib/edge/src/retrieve.rs
use std::sync::atomic::AtomicBool; use common::counter::hardware_accumulator::HwMeasurementAcc; use segment::common::operation_error::OperationResult; use segment::types::{ExtendedPointId, WithPayload, WithPayloadInterface, WithVector}; use shard::retrieve::record_internal::RecordInternal; use shard::retrieve::retrieve_blocking::retrieve_blocking; use crate::{DEFAULT_EDGE_TIMEOUT, Shard}; impl Shard { pub fn retrieve( &self, point_ids: &[ExtendedPointId], with_payload: Option<WithPayloadInterface>, with_vector: Option<WithVector>, ) -> OperationResult<Vec<RecordInternal>> { let with_payload = WithPayload::from(with_payload.unwrap_or(WithPayloadInterface::Bool(true))); let with_vector = with_vector.unwrap_or(WithVector::Bool(false)); let mut points = retrieve_blocking( self.segments.clone(), point_ids, &with_payload, &with_vector, DEFAULT_EDGE_TIMEOUT, &AtomicBool::new(false), HwMeasurementAcc::disposable(), )?; let points: Vec<_> = point_ids .iter() .filter_map(|id| points.remove(id)) .collect(); Ok(points) } }
rust
Apache-2.0
f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd
2026-01-04T15:34:51.524868Z
false
qdrant/qdrant
https://github.com/qdrant/qdrant/blob/f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd/lib/edge/examples/edge-cli.rs
lib/edge/examples/edge-cli.rs
use std::env; use std::path::Path; fn main() -> anyhow::Result<()> { let args: Vec<_> = env::args().skip(1).take(2).collect(); let [edge_shard_path] = args .try_into() .map_err(|args| anyhow::format_err!("unexpected arguments {args:?}"))?; let _edge_shard = edge::Shard::load(Path::new(&edge_shard_path), None)?; Ok(()) }
rust
Apache-2.0
f937d0260ffd7705c6f34d9c25da1e27ebf5ddfd
2026-01-04T15:34:51.524868Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/.github/workflows/scripts/build_directory/src/lib.rs
.github/workflows/scripts/build_directory/src/lib.rs
use std::{ error::Error, fs, path::{Path, PathBuf}, }; static URL_BASE: &str = "https://github.com/TheAlgorithms/Rust/blob/master"; fn good_filepaths(top_dir: &Path) -> Result<Vec<String>, Box<dyn Error>> { let mut good_fs = Vec::new(); if top_dir.is_dir() { for entry in fs::read_dir(top_dir)? { let entry = entry?; let path = entry.path(); if entry.file_name().to_str().unwrap().starts_with('.') || entry.file_name().to_str().unwrap().starts_with('_') { continue; } if path.is_dir() { let mut other = good_filepaths(&path)?; good_fs.append(&mut other); } else if entry.file_name().to_str().unwrap().ends_with(".rs") && entry.file_name().to_str().unwrap() != "mod.rs" { good_fs.push( path.into_os_string() .into_string() .unwrap() .split_at(2) .1 .to_string(), ); } } } good_fs.sort(); Ok(good_fs) } fn md_prefix(indent_count: usize) -> String { if indent_count > 0 { format!("{}*", " ".repeat(indent_count)) } else { "\n##".to_string() } } fn print_path(old_path: String, new_path: String) -> (String, String) { let old_parts = old_path .split(std::path::MAIN_SEPARATOR) .collect::<Vec<&str>>(); let mut result = String::new(); for (count, new_part) in new_path.split(std::path::MAIN_SEPARATOR).enumerate() { if count + 1 > old_parts.len() || old_parts[count] != new_part { println!("{} {}", md_prefix(count), to_title(new_part)); result.push_str(format!("{} {}\n", md_prefix(count), to_title(new_part)).as_str()); } } (new_path, result) } pub fn build_directory_md(top_dir: &Path) -> Result<String, Box<dyn Error>> { let mut old_path = String::from(""); let mut result = String::new(); for filepath in good_filepaths(top_dir)? { let mut filepath = PathBuf::from(filepath); let filename = filepath.file_name().unwrap().to_owned(); filepath.pop(); let filepath = filepath.into_os_string().into_string().unwrap(); if filepath != old_path { let path_res = print_path(old_path, filepath); old_path = path_res.0; result.push_str(path_res.1.as_str()); } let url = format!("{}/{}", old_path, filename.to_string_lossy()); let url = get_addr(&url); let indent = old_path.matches(std::path::MAIN_SEPARATOR).count() + 1; let filename = to_title(filename.to_str().unwrap().split('.').collect::<Vec<&str>>()[0]); println!("{} [{}]({})", md_prefix(indent), filename, url); result.push_str(format!("{} [{}]({})\n", md_prefix(indent), filename, url).as_str()); } Ok(result) } fn to_title(name: &str) -> String { let mut change = true; name.chars() .map(move |letter| { if change && !letter.is_numeric() { change = false; letter.to_uppercase().next().unwrap() } else if letter == '_' { change = true; ' ' } else { if letter.is_numeric() || !letter.is_alphanumeric() { change = true; } letter } }) .collect::<String>() } fn get_addr(addr: &str) -> String { if cfg!(windows) { format!("{}/{}", URL_BASE, switch_backslash(addr)) } else { format!("{}/{}", URL_BASE, addr) } } // Function that changes '\' to '/' (for Windows builds only) fn switch_backslash(addr: &str) -> String { addr.chars() .map(|mut symbol| { if symbol == '\\' { symbol = '/'; } symbol }) .collect::<String>() }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/.github/workflows/scripts/build_directory/src/main.rs
.github/workflows/scripts/build_directory/src/main.rs
use std::{fs::File, io::Write, path::Path}; use build_directory::build_directory_md; fn main() -> Result<(), std::io::Error> { let mut file = File::create("DIRECTORY.md").unwrap(); // unwrap for panic match build_directory_md(Path::new(".")) { Ok(buf) => { file.write_all("# List of all files\n".as_bytes())?; file.write_all(buf.as_bytes())?; } Err(err) => { panic!("Error while creating string: {err}"); } } Ok(()) }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/lib.rs
src/lib.rs
pub mod backtracking; pub mod big_integer; pub mod bit_manipulation; pub mod ciphers; pub mod compression; pub mod conversions; pub mod data_structures; pub mod dynamic_programming; pub mod financial; pub mod general; pub mod geometry; pub mod graph; pub mod greedy; pub mod machine_learning; pub mod math; pub mod navigation; pub mod number_theory; pub mod searching; pub mod signal_analysis; pub mod sorting; pub mod string; #[cfg(test)] mod tests { use super::sorting; #[test] fn quick_sort() { //descending let mut ve1 = vec![6, 5, 4, 3, 2, 1]; sorting::quick_sort(&mut ve1); assert!(sorting::is_sorted(&ve1)); //pre-sorted let mut ve2 = vec![1, 2, 3, 4, 5, 6]; sorting::quick_sort(&mut ve2); assert!(sorting::is_sorted(&ve2)); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/duval_algorithm.rs
src/string/duval_algorithm.rs
//! Implementation of Duval's Algorithm to compute the standard factorization of a string //! into Lyndon words. A Lyndon word is defined as a string that is strictly smaller //! (lexicographically) than any of its nontrivial suffixes. This implementation operates //! in linear time and space. /// Performs Duval's algorithm to factorize a given string into its Lyndon words. /// /// # Arguments /// /// * `s` - A slice of characters representing the input string. /// /// # Returns /// /// A vector of strings, where each string is a Lyndon word, representing the factorization /// of the input string. /// /// # Time Complexity /// /// The algorithm runs in O(n) time, where `n` is the length of the input string. pub fn duval_algorithm(s: &str) -> Vec<String> { factorize_duval(&s.chars().collect::<Vec<char>>()) } /// Helper function that takes a string slice, converts it to a vector of characters, /// and then applies the Duval factorization algorithm to find the Lyndon words. /// /// # Arguments /// /// * `s` - A string slice representing the input text. /// /// # Returns /// /// A vector of strings, each representing a Lyndon word in the factorization. fn factorize_duval(s: &[char]) -> Vec<String> { let mut start = 0; let mut factors: Vec<String> = Vec::new(); while start < s.len() { let mut end = start + 1; let mut repeat = start; while end < s.len() && s[repeat] <= s[end] { if s[repeat] < s[end] { repeat = start; } else { repeat += 1; } end += 1; } while start <= repeat { factors.push(s[start..start + end - repeat].iter().collect::<String>()); start += end - repeat; } } factors } #[cfg(test)] mod test { use super::*; macro_rules! test_duval_algorithm { ($($name:ident: $inputs:expr,)*) => { $( #[test] fn $name() { let (text, expected) = $inputs; assert_eq!(duval_algorithm(text), expected); } )* } } test_duval_algorithm! { repeating_with_suffix: ("abcdabcdababc", ["abcd", "abcd", "ababc"]), single_repeating_char: ("aaa", ["a", "a", "a"]), single: ("ababb", ["ababb"]), unicode: ("അഅഅ", ["അ", "അ", "അ"]), empty_string: ("", Vec::<String>::new()), single_char: ("x", ["x"]), palindrome: ("racecar", ["r", "acecar"]), long_repeating: ("aaaaaa", ["a", "a", "a", "a", "a", "a"]), mixed_repeating: ("ababcbabc", ["ababcbabc"]), non_repeating_sorted: ("abcdefg", ["abcdefg"]), alternating_increasing: ("abababab", ["ab", "ab", "ab", "ab"]), long_repeating_lyndon: ("abcabcabcabc", ["abc", "abc", "abc", "abc"]), decreasing_order: ("zyxwvutsrqponm", ["z", "y", "x", "w", "v", "u", "t", "s", "r", "q", "p", "o", "n", "m"]), alphanumeric_mixed: ("a1b2c3a1", ["a", "1b2c3a", "1"]), special_characters: ("a@b#c$d", ["a", "@b", "#c$d"]), unicode_complex: ("αβγδ", ["αβγδ"]), long_string_performance: (&"a".repeat(1_000_000), vec!["a"; 1_000_000]), palindrome_repeating_prefix: ("abccba", ["abccb", "a"]), interrupted_lyndon: ("abcxabc", ["abcx", "abc"]), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/anagram.rs
src/string/anagram.rs
use std::collections::HashMap; /// Custom error type representing an invalid character found in the input. #[derive(Debug, PartialEq)] pub enum AnagramError { NonAlphabeticCharacter, } /// Checks if two strings are anagrams, ignoring spaces and case sensitivity. /// /// # Arguments /// /// * `s` - First input string. /// * `t` - Second input string. /// /// # Returns /// /// * `Ok(true)` if the strings are anagrams. /// * `Ok(false)` if the strings are not anagrams. /// * `Err(AnagramError)` if either string contains non-alphabetic characters. pub fn check_anagram(s: &str, t: &str) -> Result<bool, AnagramError> { let s_cleaned = clean_string(s)?; let t_cleaned = clean_string(t)?; Ok(char_count(&s_cleaned) == char_count(&t_cleaned)) } /// Cleans the input string by removing spaces and converting to lowercase. /// Returns an error if any non-alphabetic character is found. /// /// # Arguments /// /// * `s` - Input string to clean. /// /// # Returns /// /// * `Ok(String)` containing the cleaned string (no spaces, lowercase). /// * `Err(AnagramError)` if the string contains non-alphabetic characters. fn clean_string(s: &str) -> Result<String, AnagramError> { s.chars() .filter(|c| !c.is_whitespace()) .map(|c| { if c.is_alphabetic() { Ok(c.to_ascii_lowercase()) } else { Err(AnagramError::NonAlphabeticCharacter) } }) .collect() } /// Computes the histogram of characters in a string. /// /// # Arguments /// /// * `s` - Input string. /// /// # Returns /// /// * A `HashMap` where the keys are characters and values are their count. fn char_count(s: &str) -> HashMap<char, usize> { let mut res = HashMap::new(); for c in s.chars() { *res.entry(c).or_insert(0) += 1; } res } #[cfg(test)] mod tests { use super::*; macro_rules! test_cases { ($($name:ident: $test_case:expr,)*) => { $( #[test] fn $name() { let (s, t, expected) = $test_case; assert_eq!(check_anagram(s, t), expected); assert_eq!(check_anagram(t, s), expected); } )* } } test_cases! { empty_strings: ("", "", Ok(true)), empty_and_non_empty: ("", "Ted Morgan", Ok(false)), single_char_same: ("z", "Z", Ok(true)), single_char_diff: ("g", "h", Ok(false)), valid_anagram_lowercase: ("cheater", "teacher", Ok(true)), valid_anagram_with_spaces: ("madam curie", "radium came", Ok(true)), valid_anagram_mixed_cases: ("Satan", "Santa", Ok(true)), valid_anagram_with_spaces_and_mixed_cases: ("Anna Madrigal", "A man and a girl", Ok(true)), new_york_times: ("New York Times", "monkeys write", Ok(true)), church_of_scientology: ("Church of Scientology", "rich chosen goofy cult", Ok(true)), mcdonalds_restaurants: ("McDonald's restaurants", "Uncle Sam's standard rot", Err(AnagramError::NonAlphabeticCharacter)), coronavirus: ("coronavirus", "carnivorous", Ok(true)), synonym_evil: ("evil", "vile", Ok(true)), synonym_gentleman: ("a gentleman", "elegant man", Ok(true)), antigram: ("restful", "fluster", Ok(true)), sentences: ("William Shakespeare", "I am a weakish speller", Ok(true)), part_of_speech_adj_to_verb: ("silent", "listen", Ok(true)), anagrammatized: ("Anagrams", "Ars magna", Ok(true)), non_anagram: ("rat", "car", Ok(false)), invalid_anagram_with_special_char: ("hello!", "world", Err(AnagramError::NonAlphabeticCharacter)), invalid_anagram_with_numeric_chars: ("test123", "321test", Err(AnagramError::NonAlphabeticCharacter)), invalid_anagram_with_symbols: ("check@anagram", "check@nagaram", Err(AnagramError::NonAlphabeticCharacter)), non_anagram_length_mismatch: ("abc", "abcd", Ok(false)), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/reverse.rs
src/string/reverse.rs
/// Reverses the given string. /// /// # Arguments /// /// * `text` - A string slice that holds the string to be reversed. /// /// # Returns /// /// * A new `String` that is the reverse of the input string. pub fn reverse(text: &str) -> String { text.chars().rev().collect() } #[cfg(test)] mod tests { use super::*; macro_rules! test_cases { ($($name:ident: $test_case:expr,)*) => { $( #[test] fn $name() { let (input, expected) = $test_case; assert_eq!(reverse(input), expected); } )* }; } test_cases! { test_simple_palindrome: ("racecar", "racecar"), test_non_palindrome: ("abcdef", "fedcba"), test_sentence_with_spaces: ("step on no pets", "step on no pets"), test_empty_string: ("", ""), test_single_character: ("a", "a"), test_leading_trailing_spaces: (" hello ", " olleh "), test_unicode_characters: ("你好", "好你"), test_mixed_content: ("a1b2c3!", "!3c2b1a"), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/lipogram.rs
src/string/lipogram.rs
use std::collections::HashSet; /// Represents possible errors that can occur when checking for lipograms. #[derive(Debug, PartialEq, Eq)] pub enum LipogramError { /// Indicates that a non-alphabetic character was found in the input. NonAlphabeticCharacter, /// Indicates that a missing character is not in lowercase. NonLowercaseMissingChar, } /// Computes the set of missing alphabetic characters from the input string. /// /// # Arguments /// /// * `in_str` - A string slice that contains the input text. /// /// # Returns /// /// Returns a `HashSet<char>` containing the lowercase alphabetic characters that are not present in `in_str`. fn compute_missing(in_str: &str) -> HashSet<char> { let alphabet: HashSet<char> = ('a'..='z').collect(); let letters_used: HashSet<char> = in_str .to_lowercase() .chars() .filter(|c| c.is_ascii_alphabetic()) .collect(); alphabet.difference(&letters_used).cloned().collect() } /// Checks if the provided string is a lipogram, meaning it is missing specific characters. /// /// # Arguments /// /// * `lipogram_str` - A string slice that contains the text to be checked for being a lipogram. /// * `missing_chars` - A reference to a `HashSet<char>` containing the expected missing characters. /// /// # Returns /// /// Returns `Ok(true)` if the string is a lipogram that matches the provided missing characters, /// `Ok(false)` if it does not match, or a `LipogramError` if the input contains invalid characters. pub fn is_lipogram( lipogram_str: &str, missing_chars: &HashSet<char>, ) -> Result<bool, LipogramError> { for &c in missing_chars { if !c.is_lowercase() { return Err(LipogramError::NonLowercaseMissingChar); } } for c in lipogram_str.chars() { if !c.is_ascii_alphabetic() && !c.is_whitespace() { return Err(LipogramError::NonAlphabeticCharacter); } } let missing = compute_missing(lipogram_str); Ok(missing == *missing_chars) } #[cfg(test)] mod tests { use super::*; macro_rules! test_lipogram { ($($name:ident: $tc:expr,)*) => { $( #[test] fn $name() { let (input, missing_chars, expected) = $tc; assert_eq!(is_lipogram(input, &missing_chars), expected); } )* } } test_lipogram! { perfect_pangram: ( "The quick brown fox jumps over the lazy dog", HashSet::from([]), Ok(true) ), lipogram_single_missing: ( "The quick brown fox jumped over the lazy dog", HashSet::from(['s']), Ok(true) ), lipogram_multiple_missing: ( "The brown fox jumped over the lazy dog", HashSet::from(['q', 'i', 'c', 'k', 's']), Ok(true) ), long_lipogram_single_missing: ( "A jovial swain should not complain of any buxom fair who mocks his pain and thinks it gain to quiz his awkward air", HashSet::from(['e']), Ok(true) ), invalid_non_lowercase_chars: ( "The quick brown fox jumped over the lazy dog", HashSet::from(['X']), Err(LipogramError::NonLowercaseMissingChar) ), invalid_non_alphabetic_input: ( "The quick brown fox jumps over the lazy dog 123@!", HashSet::from([]), Err(LipogramError::NonAlphabeticCharacter) ), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/rabin_karp.rs
src/string/rabin_karp.rs
//! This module implements the Rabin-Karp string searching algorithm. //! It uses a rolling hash technique to find all occurrences of a pattern //! within a target string efficiently. const MOD: usize = 101; const RADIX: usize = 256; /// Finds all starting indices where the `pattern` appears in the `text`. /// /// # Arguments /// * `text` - The string where the search is performed. /// * `pattern` - The substring pattern to search for. /// /// # Returns /// A vector of starting indices where the pattern is found. pub fn rabin_karp(text: &str, pattern: &str) -> Vec<usize> { if text.is_empty() || pattern.is_empty() || pattern.len() > text.len() { return vec![]; } let pat_hash = compute_hash(pattern); let mut radix_pow = 1; // Compute RADIX^(n-1) % MOD for _ in 0..pattern.len() - 1 { radix_pow = (radix_pow * RADIX) % MOD; } let mut rolling_hash = 0; let mut result = vec![]; for i in 0..=text.len() - pattern.len() { rolling_hash = if i == 0 { compute_hash(&text[0..pattern.len()]) } else { update_hash(text, i - 1, i + pattern.len() - 1, rolling_hash, radix_pow) }; if rolling_hash == pat_hash && pattern[..] == text[i..i + pattern.len()] { result.push(i); } } result } /// Calculates the hash of a string using the Rabin-Karp formula. /// /// # Arguments /// * `s` - The string to calculate the hash for. /// /// # Returns /// The hash value of the string modulo `MOD`. fn compute_hash(s: &str) -> usize { let mut hash_val = 0; for &byte in s.as_bytes().iter() { hash_val = (hash_val * RADIX + byte as usize) % MOD; } hash_val } /// Updates the rolling hash when shifting the search window. /// /// # Arguments /// * `s` - The full text where the search is performed. /// * `old_idx` - The index of the character that is leaving the window. /// * `new_idx` - The index of the new character entering the window. /// * `old_hash` - The hash of the previous substring. /// * `radix_pow` - The precomputed value of RADIX^(n-1) % MOD. /// /// # Returns /// The updated hash for the new substring. fn update_hash( s: &str, old_idx: usize, new_idx: usize, old_hash: usize, radix_pow: usize, ) -> usize { let mut new_hash = old_hash; let old_char = s.as_bytes()[old_idx] as usize; let new_char = s.as_bytes()[new_idx] as usize; new_hash = (new_hash + MOD - (old_char * radix_pow % MOD)) % MOD; new_hash = (new_hash * RADIX + new_char) % MOD; new_hash } #[cfg(test)] mod tests { use super::*; macro_rules! test_cases { ($($name:ident: $inputs:expr,)*) => { $( #[test] fn $name() { let (text, pattern, expected) = $inputs; assert_eq!(rabin_karp(text, pattern), expected); } )* }; } test_cases! { single_match_at_start: ("hello world", "hello", vec![0]), single_match_at_end: ("hello world", "world", vec![6]), single_match_in_middle: ("abc def ghi", "def", vec![4]), multiple_matches: ("ababcabc", "abc", vec![2, 5]), overlapping_matches: ("aaaaa", "aaa", vec![0, 1, 2]), no_match: ("abcdefg", "xyz", vec![]), pattern_is_entire_string: ("abc", "abc", vec![0]), target_is_multiple_patterns: ("abcabcabc", "abc", vec![0, 3, 6]), empty_text: ("", "abc", vec![]), empty_pattern: ("abc", "", vec![]), empty_text_and_pattern: ("", "", vec![]), pattern_larger_than_text: ("abc", "abcd", vec![]), large_text_small_pattern: (&("a".repeat(1000) + "b"), "b", vec![1000]), single_char_match: ("a", "a", vec![0]), single_char_no_match: ("a", "b", vec![]), large_pattern_no_match: ("abc", "defghi", vec![]), repeating_chars: ("aaaaaa", "aa", vec![0, 1, 2, 3, 4]), special_characters: ("abc$def@ghi", "$def@", vec![3]), numeric_and_alphabetic_mix: ("abc123abc456", "123abc", vec![3]), case_sensitivity: ("AbcAbc", "abc", vec![]), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/suffix_array.rs
src/string/suffix_array.rs
// In computer science, a suffix array is a sorted array of all suffixes of a string. // It is a data structure used in, among others, full-text indices, data-compression algorithms, // and the field of bibliometrics. Source: https://en.wikipedia.org/wiki/Suffix_array use std::cmp::Ordering; #[derive(Clone)] struct Suffix { index: usize, rank: (i32, i32), } impl Suffix { fn cmp(&self, b: &Self) -> Ordering { let a = self; let ((a1, a2), (b1, b2)) = (a.rank, b.rank); match a1.cmp(&b1) { Ordering::Equal => { if a2 < b2 { Ordering::Less } else { Ordering::Greater } } o => o, } } } pub fn generate_suffix_array(txt: &str) -> Vec<usize> { let n = txt.len(); let mut suffixes: Vec<Suffix> = vec![ Suffix { index: 0, rank: (-1, -1) }; n ]; for (i, suf) in suffixes.iter_mut().enumerate() { suf.index = i; suf.rank.0 = (txt.chars().nth(i).expect("this should exist") as u32 - 'a' as u32) as i32; suf.rank.1 = if (i + 1) < n { (txt.chars().nth(i + 1).expect("this should exist") as u32 - 'a' as u32) as i32 } else { -1 } } suffixes.sort_by(|a, b| a.cmp(b)); let mut ind = vec![0; n]; let mut k = 4; while k < 2 * n { let mut rank = 0; let mut prev_rank = suffixes[0].rank.0; suffixes[0].rank.0 = rank; ind[suffixes[0].index] = 0; for i in 1..n { if suffixes[i].rank.0 == prev_rank && suffixes[i].rank.1 == suffixes[i - 1].rank.1 { prev_rank = suffixes[i].rank.0; suffixes[i].rank.0 = rank; } else { prev_rank = suffixes[i].rank.0; rank += 1; suffixes[i].rank.0 = rank; } ind[suffixes[i].index] = i; } for i in 0..n { let next_index = suffixes[i].index + (k / 2); suffixes[i].rank.1 = if next_index < n { suffixes[ind[next_index]].rank.0 } else { -1 } } suffixes.sort_by(|a, b| a.cmp(b)); k *= 2; } let mut suffix_arr = Vec::new(); for suf in suffixes { suffix_arr.push(suf.index); } suffix_arr } #[cfg(test)] mod tests { use super::*; #[test] fn test_suffix_array() { let a = generate_suffix_array("banana"); assert_eq!(a, vec![5, 3, 1, 0, 4, 2]); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/levenshtein_distance.rs
src/string/levenshtein_distance.rs
//! Provides functions to calculate the Levenshtein distance between two strings. //! //! The Levenshtein distance is a measure of the similarity between two strings by calculating the minimum number of single-character //! edits (insertions, deletions, or substitutions) required to change one string into the other. use std::cmp::min; /// Calculates the Levenshtein distance between two strings using a naive dynamic programming approach. /// /// The Levenshtein distance is a measure of the similarity between two strings by calculating the minimum number of single-character /// edits (insertions, deletions, or substitutions) required to change one string into the other. /// /// # Arguments /// /// * `string1` - A reference to the first string. /// * `string2` - A reference to the second string. /// /// # Returns /// /// The Levenshtein distance between the two input strings. /// /// This function computes the Levenshtein distance by constructing a dynamic programming matrix and iteratively filling it in. /// It follows the standard top-to-bottom, left-to-right approach for filling in the matrix. /// /// # Complexity /// /// - Time complexity: O(nm), /// - Space complexity: O(nm), /// /// where n and m are lengths of `string1` and `string2`. /// /// Note that this implementation uses a straightforward dynamic programming approach without any space optimization. /// It may consume more memory for larger input strings compared to the optimized version. pub fn naive_levenshtein_distance(string1: &str, string2: &str) -> usize { let distance_matrix: Vec<Vec<usize>> = (0..=string1.len()) .map(|i| { (0..=string2.len()) .map(|j| { if i == 0 { j } else if j == 0 { i } else { 0 } }) .collect() }) .collect(); let updated_matrix = (1..=string1.len()).fold(distance_matrix, |matrix, i| { (1..=string2.len()).fold(matrix, |mut inner_matrix, j| { let cost = usize::from(string1.as_bytes()[i - 1] != string2.as_bytes()[j - 1]); inner_matrix[i][j] = (inner_matrix[i - 1][j - 1] + cost) .min(inner_matrix[i][j - 1] + 1) .min(inner_matrix[i - 1][j] + 1); inner_matrix }) }); updated_matrix[string1.len()][string2.len()] } /// Calculates the Levenshtein distance between two strings using an optimized dynamic programming approach. /// /// This edit distance is defined as 1 point per insertion, substitution, or deletion required to make the strings equal. /// /// # Arguments /// /// * `string1` - The first string. /// * `string2` - The second string. /// /// # Returns /// /// The Levenshtein distance between the two input strings. /// For a detailed explanation, check the example on [Wikipedia](https://en.wikipedia.org/wiki/Levenshtein_distance). /// This function iterates over the bytes in the string, so it may not behave entirely as expected for non-ASCII strings. /// /// Note that this implementation utilizes an optimized dynamic programming approach, significantly reducing the space complexity from O(nm) to O(n), where n and m are the lengths of `string1` and `string2`. /// /// Additionally, it minimizes space usage by leveraging the shortest string horizontally and the longest string vertically in the computation matrix. /// /// # Complexity /// /// - Time complexity: O(nm), /// - Space complexity: O(n), /// /// where n and m are lengths of `string1` and `string2`. pub fn optimized_levenshtein_distance(string1: &str, string2: &str) -> usize { if string1.is_empty() { return string2.len(); } let l1 = string1.len(); let mut prev_dist: Vec<usize> = (0..=l1).collect(); for (row, c2) in string2.chars().enumerate() { // we'll keep a reference to matrix[i-1][j-1] (top-left cell) let mut prev_substitution_cost = prev_dist[0]; // diff with empty string, since `row` starts at 0, it's `row + 1` prev_dist[0] = row + 1; for (col, c1) in string1.chars().enumerate() { // "on the left" in the matrix (i.e. the value we just computed) let deletion_cost = prev_dist[col] + 1; // "on the top" in the matrix (means previous) let insertion_cost = prev_dist[col + 1] + 1; let substitution_cost = if c1 == c2 { // last char is the same on both ends, so the min_distance is left unchanged from matrix[i-1][i+1] prev_substitution_cost } else { // substitute the last character prev_substitution_cost + 1 }; // save the old value at (i-1, j-1) prev_substitution_cost = prev_dist[col + 1]; prev_dist[col + 1] = _min3(deletion_cost, insertion_cost, substitution_cost); } } prev_dist[l1] } #[inline] fn _min3<T: Ord>(a: T, b: T, c: T) -> T { min(a, min(b, c)) } #[cfg(test)] mod tests { const LEVENSHTEIN_DISTANCE_TEST_CASES: &[(&str, &str, usize)] = &[ ("", "", 0), ("Hello, World!", "Hello, World!", 0), ("", "Rust", 4), ("horse", "ros", 3), ("tan", "elephant", 6), ("execute", "intention", 8), ]; macro_rules! levenshtein_distance_tests { ($function:ident) => { mod $function { use super::*; fn run_test_case(string1: &str, string2: &str, expected_distance: usize) { assert_eq!(super::super::$function(string1, string2), expected_distance); assert_eq!(super::super::$function(string2, string1), expected_distance); assert_eq!(super::super::$function(string1, string1), 0); assert_eq!(super::super::$function(string2, string2), 0); } #[test] fn test_levenshtein_distance() { for &(string1, string2, expected_distance) in LEVENSHTEIN_DISTANCE_TEST_CASES.iter() { run_test_case(string1, string2, expected_distance); } } } }; } levenshtein_distance_tests!(naive_levenshtein_distance); levenshtein_distance_tests!(optimized_levenshtein_distance); }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/isomorphism.rs
src/string/isomorphism.rs
//! This module provides functionality to determine whether two strings are isomorphic. //! //! Two strings are considered isomorphic if the characters in one string can be replaced //! by some mapping relation to obtain the other string. use std::collections::HashMap; /// Determines whether two strings are isomorphic. /// /// # Arguments /// /// * `s` - The first string. /// * `t` - The second string. /// /// # Returns /// /// `true` if the strings are isomorphic, `false` otherwise. pub fn is_isomorphic(s: &str, t: &str) -> bool { let s_chars: Vec<char> = s.chars().collect(); let t_chars: Vec<char> = t.chars().collect(); if s_chars.len() != t_chars.len() { return false; } let mut s_to_t_map = HashMap::new(); let mut t_to_s_map = HashMap::new(); for (s_char, t_char) in s_chars.into_iter().zip(t_chars) { if !check_mapping(&mut s_to_t_map, s_char, t_char) || !check_mapping(&mut t_to_s_map, t_char, s_char) { return false; } } true } /// Checks the mapping between two characters and updates the map. /// /// # Arguments /// /// * `map` - The HashMap to store the mapping. /// * `key` - The key character. /// * `value` - The value character. /// /// # Returns /// /// `true` if the mapping is consistent, `false` otherwise. fn check_mapping(map: &mut HashMap<char, char>, key: char, value: char) -> bool { match map.get(&key) { Some(&mapped_char) => mapped_char == value, None => { map.insert(key, value); true } } } #[cfg(test)] mod tests { use super::is_isomorphic; macro_rules! test_is_isomorphic { ($($name:ident: $inputs:expr,)*) => { $( #[test] fn $name() { let (s, t, expected) = $inputs; assert_eq!(is_isomorphic(s, t), expected); assert_eq!(is_isomorphic(t, s), expected); assert!(is_isomorphic(s, s)); assert!(is_isomorphic(t, t)); } )* } } test_is_isomorphic! { isomorphic: ("egg", "add", true), isomorphic_long: ("abcdaabdcdbbabababacdadad", "AbCdAAbdCdbbAbAbAbACdAdAd", true), not_isomorphic: ("egg", "adc", false), non_isomorphic_long: ("abcdaabdcdbbabababacdadad", "AACdAAbdCdbbAbAbAbACdAdAd", false), isomorphic_unicode: ("天苍苍", "野茫茫", true), isomorphic_unicode_different_byte_size: ("abb", "野茫茫", true), empty: ("", "", true), different_length: ("abc", "abcd", false), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/run_length_encoding.rs
src/string/run_length_encoding.rs
pub fn run_length_encoding(target: &str) -> String { if target.trim().is_empty() { return "".to_string(); } let mut count: i32 = 0; let mut base_character: String = "".to_string(); let mut encoded_target = String::new(); for c in target.chars() { if base_character == *"" { base_character = c.to_string(); } if c.to_string() == base_character { count += 1; } else { encoded_target.push_str(&count.to_string()); count = 1; encoded_target.push_str(&base_character); base_character = c.to_string(); } } encoded_target.push_str(&count.to_string()); encoded_target.push_str(&base_character); encoded_target } pub fn run_length_decoding(target: &str) -> String { if target.trim().is_empty() { return "".to_string(); } let mut character_count = String::new(); let mut decoded_target = String::new(); for c in target.chars() { character_count.push(c); let is_numeric: bool = character_count.parse::<i32>().is_ok(); if !is_numeric { let pop_char: char = character_count.pop().unwrap(); decoded_target.push_str( &pop_char .to_string() .repeat(character_count.parse().unwrap()), ); character_count = "".to_string(); } } decoded_target } #[cfg(test)] mod tests { use super::*; macro_rules! test_run_length { ($($name:ident: $test_case:expr,)*) => { $( #[test] fn $name() { let (raw_str, encoded) = $test_case; assert_eq!(run_length_encoding(raw_str), encoded); assert_eq!(run_length_decoding(encoded), raw_str); } )* }; } test_run_length! { empty_input: ("", ""), repeated_char: ("aaaaaaaaaa", "10a"), no_repeated: ("abcdefghijk", "1a1b1c1d1e1f1g1h1i1j1k"), regular_input: ("aaaaabbbcccccdddddddddd", "5a3b5c10d"), two_blocks_with_same_char: ("aaabbaaaa", "3a2b4a"), long_input: ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbcccccdddddddddd", "200a3b5c10d"), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/aho_corasick.rs
src/string/aho_corasick.rs
use std::cell::RefCell; use std::collections::BTreeMap; use std::collections::VecDeque; use std::rc::{Rc, Weak}; #[derive(Default)] struct ACNode { trans: BTreeMap<char, Rc<RefCell<ACNode>>>, suffix: Weak<RefCell<ACNode>>, // the suffix(fail) link lengths: Vec<usize>, // lengths of matched patterns ended at this node } #[derive(Default)] pub struct AhoCorasick { root: Rc<RefCell<ACNode>>, } impl AhoCorasick { pub fn new(words: &[&str]) -> Self { let root = Rc::new(RefCell::new(ACNode::default())); for word in words { let mut cur = Rc::clone(&root); for c in word.chars() { cur = Rc::clone(Rc::clone(&cur).borrow_mut().trans.entry(c).or_default()); } cur.borrow_mut().lengths.push(word.len()); } Self::build_suffix(Rc::clone(&root)); Self { root } } fn build_suffix(root: Rc<RefCell<ACNode>>) { let mut q = VecDeque::new(); q.push_back(Rc::clone(&root)); while let Some(parent) = q.pop_front() { let parent = parent.borrow(); for (c, child) in &parent.trans { q.push_back(Rc::clone(child)); let mut child = child.borrow_mut(); let mut suffix = parent.suffix.upgrade(); loop { match &suffix { None => { child.lengths.extend(root.borrow().lengths.clone()); child.suffix = Rc::downgrade(&root); break; } Some(node) => { if node.borrow().trans.contains_key(c) { let node = &node.borrow().trans[c]; child.lengths.extend(node.borrow().lengths.clone()); child.suffix = Rc::downgrade(node); break; } suffix = suffix.unwrap().borrow().suffix.upgrade(); } } } } } } pub fn search<'a>(&self, s: &'a str) -> Vec<&'a str> { let mut ans = vec![]; let mut cur = Rc::clone(&self.root); let mut position: usize = 0; for c in s.chars() { loop { if let Some(child) = Rc::clone(&cur).borrow().trans.get(&c) { cur = Rc::clone(child); break; } let suffix = cur.borrow().suffix.clone(); match suffix.upgrade() { Some(node) => cur = node, None => break, } } position += c.len_utf8(); for &len in &cur.borrow().lengths { ans.push(&s[position - len..position]); } } ans } } #[cfg(test)] mod tests { use super::*; #[test] fn test_aho_corasick() { let dict = ["abc", "abcd", "xyz", "acxy", "efg", "123", "678", "6543"]; let ac = AhoCorasick::new(&dict); let res = ac.search("ababcxyzacxy12678acxy6543"); assert_eq!(res, ["abc", "xyz", "acxy", "678", "acxy", "6543",]); } #[test] fn test_aho_corasick_with_utf8() { let dict = [ "abc", "中文", "abc中", "abcd", "xyz", "acxy", "efg", "123", "678", "6543", "ハンバーガー", ]; let ac = AhoCorasick::new(&dict); let res = ac.search("ababc中xyzacxy12678acxyハンバーガー6543中文"); assert_eq!( res, [ "abc", "abc中", "xyz", "acxy", "678", "acxy", "ハンバーガー", "6543", "中文" ] ); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/autocomplete_using_trie.rs
src/string/autocomplete_using_trie.rs
/* It autocomplete by prefix using added words. word List => ["apple", "orange", "oregano"] prefix => "or" matches => ["orange", "oregano"] */ use std::collections::HashMap; const END: char = '#'; #[derive(Debug)] struct Trie(HashMap<char, Box<Trie>>); impl Trie { fn new() -> Self { Trie(HashMap::new()) } fn insert(&mut self, text: &str) { let mut trie = self; for c in text.chars() { trie = trie.0.entry(c).or_insert_with(|| Box::new(Trie::new())); } trie.0.insert(END, Box::new(Trie::new())); } fn find(&self, prefix: &str) -> Vec<String> { let mut trie = self; for c in prefix.chars() { let char_trie = trie.0.get(&c); if let Some(char_trie) = char_trie { trie = char_trie; } else { return vec![]; } } Self::_elements(trie) .iter() .map(|s| prefix.to_owned() + s) .collect() } fn _elements(map: &Trie) -> Vec<String> { let mut results = vec![]; for (c, v) in map.0.iter() { let mut sub_result = vec![]; if c == &END { sub_result.push("".to_owned()) } else { Self::_elements(v) .iter() .map(|s| sub_result.push(c.to_string() + s)) .collect() } results.extend(sub_result) } results } } pub struct Autocomplete { trie: Trie, } impl Autocomplete { fn new() -> Self { Self { trie: Trie::new() } } pub fn insert_words<T: AsRef<str>>(&mut self, words: &[T]) { for word in words { self.trie.insert(word.as_ref()); } } pub fn find_words(&self, prefix: &str) -> Vec<String> { self.trie.find(prefix) } } impl Default for Autocomplete { fn default() -> Self { Self::new() } } #[cfg(test)] mod tests { use super::Autocomplete; #[test] fn test_autocomplete() { let words = vec!["apple", "orange", "oregano"]; let mut auto_complete = Autocomplete::new(); auto_complete.insert_words(&words); let prefix = "app"; let mut auto_completed_words = auto_complete.find_words(prefix); let mut apple = vec!["apple"]; apple.sort(); auto_completed_words.sort(); assert_eq!(auto_completed_words, apple); let prefix = "or"; let mut auto_completed_words = auto_complete.find_words(prefix); let mut prefix_or = vec!["orange", "oregano"]; prefix_or.sort(); auto_completed_words.sort(); assert_eq!(auto_completed_words, prefix_or); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/boyer_moore_search.rs
src/string/boyer_moore_search.rs
//! This module implements the Boyer-Moore string search algorithm, an efficient method //! for finding all occurrences of a pattern within a given text. The algorithm skips //! sections of the text by leveraging two key rules: the bad character rule and the //! good suffix rule (only the bad character rule is implemented here for simplicity). use std::collections::HashMap; /// Builds the bad character table for the Boyer-Moore algorithm. /// This table stores the last occurrence of each character in the pattern. /// /// # Arguments /// * `pat` - The pattern as a slice of characters. /// /// # Returns /// A `HashMap` where the keys are characters from the pattern and the values are their /// last known positions within the pattern. fn build_bad_char_table(pat: &[char]) -> HashMap<char, isize> { let mut bad_char_table = HashMap::new(); for (i, &ch) in pat.iter().enumerate() { bad_char_table.insert(ch, i as isize); } bad_char_table } /// Calculates the shift when a full match occurs in the Boyer-Moore algorithm. /// It uses the bad character table to determine how much to shift the pattern. /// /// # Arguments /// * `shift` - The current shift of the pattern on the text. /// * `pat_len` - The length of the pattern. /// * `text_len` - The length of the text. /// * `bad_char_table` - The bad character table built for the pattern. /// * `text` - The text as a slice of characters. /// /// # Returns /// The number of positions to shift the pattern after a match. fn calc_match_shift( shift: isize, pat_len: isize, text_len: isize, bad_char_table: &HashMap<char, isize>, text: &[char], ) -> isize { if shift + pat_len >= text_len { return 1; } let next_ch = text[(shift + pat_len) as usize]; pat_len - bad_char_table.get(&next_ch).unwrap_or(&-1) } /// Calculates the shift when a mismatch occurs in the Boyer-Moore algorithm. /// The bad character rule is used to determine how far to shift the pattern. /// /// # Arguments /// * `mis_idx` - The mismatch index in the pattern. /// * `shift` - The current shift of the pattern on the text. /// * `text` - The text as a slice of characters. /// * `bad_char_table` - The bad character table built for the pattern. /// /// # Returns /// The number of positions to shift the pattern after a mismatch. fn calc_mismatch_shift( mis_idx: isize, shift: isize, text: &[char], bad_char_table: &HashMap<char, isize>, ) -> isize { let mis_ch = text[(shift + mis_idx) as usize]; let bad_char_shift = bad_char_table.get(&mis_ch).unwrap_or(&-1); std::cmp::max(1, mis_idx - bad_char_shift) } /// Performs the Boyer-Moore string search algorithm, which searches for all /// occurrences of a pattern within a text. /// /// The Boyer-Moore algorithm is efficient for large texts and patterns, as it /// skips sections of the text based on the bad character rule and other optimizations. /// /// # Arguments /// * `text` - The text to search within as a string slice. /// * `pat` - The pattern to search for as a string slice. /// /// # Returns /// A vector of starting indices where the pattern occurs in the text. pub fn boyer_moore_search(text: &str, pat: &str) -> Vec<usize> { let mut positions = Vec::new(); let text_len = text.len() as isize; let pat_len = pat.len() as isize; // Handle edge cases where the text or pattern is empty, or the pattern is longer than the text if text_len == 0 || pat_len == 0 || pat_len > text_len { return positions; } // Convert text and pattern to character vectors for easier indexing let pat: Vec<char> = pat.chars().collect(); let text: Vec<char> = text.chars().collect(); // Build the bad character table for the pattern let bad_char_table = build_bad_char_table(&pat); let mut shift = 0; // Main loop: shift the pattern over the text while shift <= text_len - pat_len { let mut j = pat_len - 1; // Compare pattern from right to left while j >= 0 && pat[j as usize] == text[(shift + j) as usize] { j -= 1; } // If we found a match (j < 0), record the position if j < 0 { positions.push(shift as usize); shift += calc_match_shift(shift, pat_len, text_len, &bad_char_table, &text); } else { // If mismatch, calculate how far to shift based on the bad character rule shift += calc_mismatch_shift(j, shift, &text, &bad_char_table); } } positions } #[cfg(test)] mod tests { use super::*; macro_rules! boyer_moore_tests { ($($name:ident: $tc:expr,)*) => { $( #[test] fn $name() { let (text, pattern, expected) = $tc; assert_eq!(boyer_moore_search(text, pattern), expected); } )* }; } boyer_moore_tests! { test_simple_match: ("AABCAB12AFAABCABFFEGABCAB", "ABCAB", vec![1, 11, 20]), test_no_match: ("AABCAB12AFAABCABFFEGABCAB", "FFF", vec![]), test_partial_match: ("AABCAB12AFAABCABFFEGABCAB", "CAB", vec![3, 13, 22]), test_empty_text: ("", "A", vec![]), test_empty_pattern: ("ABC", "", vec![]), test_both_empty: ("", "", vec![]), test_pattern_longer_than_text: ("ABC", "ABCDEFG", vec![]), test_single_character_text: ("A", "A", vec![0]), test_single_character_pattern: ("AAAA", "A", vec![0, 1, 2, 3]), test_case_sensitivity: ("ABCabcABC", "abc", vec![3]), test_overlapping_patterns: ("AAAAA", "AAA", vec![0, 1, 2]), test_special_characters: ("@!#$$%^&*", "$$", vec![3]), test_numerical_pattern: ("123456789123456", "456", vec![3, 12]), test_partial_overlap_no_match: ("ABCD", "ABCDE", vec![]), test_single_occurrence: ("XXXXXXXXXXXXXXXXXXPATTERNXXXXXXXXXXXXXXXXXX", "PATTERN", vec![18]), test_single_occurrence_with_noise: ("PATPATPATPATTERNPAT", "PATTERN", vec![9]), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/pangram.rs
src/string/pangram.rs
//! This module provides functionality to check if a given string is a pangram. //! //! A pangram is a sentence that contains every letter of the alphabet at least once. //! This module can distinguish between a non-pangram, a regular pangram, and a //! perfect pangram, where each letter appears exactly once. use std::collections::HashSet; /// Represents the status of a string in relation to the pangram classification. #[derive(PartialEq, Debug)] pub enum PangramStatus { NotPangram, Pangram, PerfectPangram, } fn compute_letter_counts(pangram_str: &str) -> std::collections::HashMap<char, usize> { let mut letter_counts = std::collections::HashMap::new(); for ch in pangram_str .to_lowercase() .chars() .filter(|c| c.is_ascii_alphabetic()) { *letter_counts.entry(ch).or_insert(0) += 1; } letter_counts } /// Determines if the input string is a pangram, and classifies it as either a regular or perfect pangram. /// /// # Arguments /// /// * `pangram_str` - A reference to the string slice to be checked for pangram status. /// /// # Returns /// /// A `PangramStatus` enum indicating whether the string is a pangram, and if so, whether it is a perfect pangram. pub fn is_pangram(pangram_str: &str) -> PangramStatus { let letter_counts = compute_letter_counts(pangram_str); let alphabet: HashSet<char> = ('a'..='z').collect(); let used_letters: HashSet<_> = letter_counts.keys().cloned().collect(); if used_letters != alphabet { return PangramStatus::NotPangram; } if letter_counts.values().all(|&count| count == 1) { PangramStatus::PerfectPangram } else { PangramStatus::Pangram } } #[cfg(test)] mod tests { use super::*; macro_rules! pangram_tests { ($($name:ident: $tc:expr,)*) => { $( #[test] fn $name() { let (input, expected) = $tc; assert_eq!(is_pangram(input), expected); } )* }; } pangram_tests! { test_not_pangram_simple: ("This is not a pangram", PangramStatus::NotPangram), test_not_pangram_day: ("today is a good day", PangramStatus::NotPangram), test_not_pangram_almost: ("this is almost a pangram but it does not have bcfghjkqwxy and the last letter", PangramStatus::NotPangram), test_pangram_standard: ("The quick brown fox jumps over the lazy dog", PangramStatus::Pangram), test_pangram_boxer: ("A mad boxer shot a quick, gloved jab to the jaw of his dizzy opponent", PangramStatus::Pangram), test_pangram_discotheques: ("Amazingly few discotheques provide jukeboxes", PangramStatus::Pangram), test_pangram_zebras: ("How vexingly quick daft zebras jump", PangramStatus::Pangram), test_perfect_pangram_jock: ("Mr. Jock, TV quiz PhD, bags few lynx", PangramStatus::PerfectPangram), test_empty_string: ("", PangramStatus::NotPangram), test_repeated_letter: ("aaaaa", PangramStatus::NotPangram), test_non_alphabetic: ("12345!@#$%", PangramStatus::NotPangram), test_mixed_case_pangram: ("ThE QuiCk BroWn FoX JumPs OveR tHe LaZy DoG", PangramStatus::Pangram), test_perfect_pangram_with_symbols: ("Mr. Jock, TV quiz PhD, bags few lynx!", PangramStatus::PerfectPangram), test_long_non_pangram: (&"a".repeat(1000), PangramStatus::NotPangram), test_near_pangram_missing_one_letter: ("The quick brown fox jumps over the lazy do", PangramStatus::NotPangram), test_near_pangram_missing_two_letters: ("The quick brwn f jumps ver the lazy dg", PangramStatus::NotPangram), test_near_pangram_with_special_characters: ("Th3 qu!ck brown f0x jumps 0v3r th3 l@zy d0g.", PangramStatus::NotPangram), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/knuth_morris_pratt.rs
src/string/knuth_morris_pratt.rs
//! Knuth-Morris-Pratt string matching algorithm implementation in Rust. //! //! This module contains the implementation of the KMP algorithm, which is used for finding //! occurrences of a pattern string within a text string efficiently. The algorithm preprocesses //! the pattern to create a partial match table, which allows for efficient searching. /// Finds all occurrences of the pattern in the given string using the Knuth-Morris-Pratt algorithm. /// /// # Arguments /// /// * `string` - The string to search within. /// * `pattern` - The pattern string to search for. /// /// # Returns /// /// A vector of starting indices where the pattern is found in the string. If the pattern or the /// string is empty, an empty vector is returned. pub fn knuth_morris_pratt(string: &str, pattern: &str) -> Vec<usize> { if string.is_empty() || pattern.is_empty() { return vec![]; } let text_chars = string.chars().collect::<Vec<char>>(); let pattern_chars = pattern.chars().collect::<Vec<char>>(); let partial_match_table = build_partial_match_table(&pattern_chars); find_pattern(&text_chars, &pattern_chars, &partial_match_table) } /// Builds the partial match table (also known as "prefix table") for the given pattern. /// /// The partial match table is used to skip characters while matching the pattern in the text. /// Each entry at index `i` in the table indicates the length of the longest proper prefix of /// the substring `pattern[0..i]` which is also a suffix of this substring. /// /// # Arguments /// /// * `pattern_chars` - The pattern string as a slice of characters. /// /// # Returns /// /// A vector representing the partial match table. fn build_partial_match_table(pattern_chars: &[char]) -> Vec<usize> { let mut partial_match_table = vec![0]; pattern_chars .iter() .enumerate() .skip(1) .for_each(|(index, &char)| { let mut length = partial_match_table[index - 1]; while length > 0 && pattern_chars[length] != char { length = partial_match_table[length - 1]; } partial_match_table.push(if pattern_chars[length] == char { length + 1 } else { length }); }); partial_match_table } /// Finds all occurrences of the pattern in the given string using the precomputed partial match table. /// /// This function iterates through the string and uses the partial match table to efficiently find /// all starting indices of the pattern in the string. /// /// # Arguments /// /// * `text_chars` - The string to search within as a slice of characters. /// * `pattern_chars` - The pattern string to search for as a slice of characters. /// * `partial_match_table` - The precomputed partial match table for the pattern. /// /// # Returns /// /// A vector of starting indices where the pattern is found in the string. fn find_pattern( text_chars: &[char], pattern_chars: &[char], partial_match_table: &[usize], ) -> Vec<usize> { let mut result_indices = vec![]; let mut match_length = 0; text_chars .iter() .enumerate() .for_each(|(text_index, &text_char)| { while match_length > 0 && text_char != pattern_chars[match_length] { match_length = partial_match_table[match_length - 1]; } if text_char == pattern_chars[match_length] { match_length += 1; } if match_length == pattern_chars.len() { result_indices.push(text_index + 1 - match_length); match_length = partial_match_table[match_length - 1]; } }); result_indices } #[cfg(test)] mod tests { use super::*; macro_rules! test_knuth_morris_pratt { ($($name:ident: $inputs:expr,)*) => { $( #[test] fn $name() { let (input, pattern, expected) = $inputs; assert_eq!(knuth_morris_pratt(input, pattern), expected); } )* } } test_knuth_morris_pratt! { each_letter_matches: ("aaa", "a", vec![0, 1, 2]), a_few_seperate_matches: ("abababa", "ab", vec![0, 2, 4]), unicode: ("അഅഅ", "അ", vec![0, 1, 2]), unicode_no_match_but_similar_bytes: ( &String::from_utf8(vec![224, 180, 133]).unwrap(), &String::from_utf8(vec![224, 180, 132]).unwrap(), vec![] ), one_match: ("ABC ABCDAB ABCDABCDABDE", "ABCDABD", vec![15]), lots_of_matches: ("aaabaabaaaaa", "aa", vec![0, 1, 4, 7, 8, 9, 10]), lots_of_intricate_matches: ("ababababa", "aba", vec![0, 2, 4, 6]), not_found0: ("abcde", "f", vec![]), not_found1: ("abcde", "ac", vec![]), not_found2: ("ababab", "bababa", vec![]), empty_string: ("", "abcdef", vec![]), empty_pattern: ("abcdef", "", vec![]), single_character_string: ("a", "a", vec![0]), single_character_pattern: ("abcdef", "d", vec![3]), pattern_at_start: ("abcdef", "abc", vec![0]), pattern_at_end: ("abcdef", "def", vec![3]), pattern_in_middle: ("abcdef", "cd", vec![2]), no_match_with_repeated_characters: ("aaaaaa", "b", vec![]), pattern_longer_than_string: ("abc", "abcd", vec![]), very_long_string: (&"a".repeat(10000), "a", (0..10000).collect::<Vec<usize>>()), very_long_pattern: (&"a".repeat(10000), &"a".repeat(9999), (0..2).collect::<Vec<usize>>()), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/jaro_winkler_distance.rs
src/string/jaro_winkler_distance.rs
// In computer science and statistics, // the Jaro–Winkler distance is a string metric measuring an edit distance // between two sequences. // It is a variant proposed in 1990 by William E. Winkler // of the Jaro distance metric (1989, Matthew A. Jaro). pub fn jaro_winkler_distance(str1: &str, str2: &str) -> f64 { if str1.is_empty() || str2.is_empty() { return 0.0; } fn get_matched_characters(s1: &str, s2: &str) -> String { let mut s2 = s2.to_string(); let mut matched: Vec<char> = Vec::new(); let limit = std::cmp::min(s1.len(), s2.len()) / 2; for (i, l) in s1.chars().enumerate() { let left = std::cmp::max(0, i as i32 - limit as i32) as usize; let right = std::cmp::min(i + limit + 1, s2.len()); if s2[left..right].contains(l) { matched.push(l); let a = &s2[0..s2.find(l).expect("this exists")]; let b = &s2[(s2.find(l).expect("this exists") + 1)..]; s2 = format!("{a} {b}"); } } matched.iter().collect::<String>() } let matching_1 = get_matched_characters(str1, str2); let matching_2 = get_matched_characters(str2, str1); let match_count = matching_1.len(); // transposition let transpositions = { let mut count = 0; for (c1, c2) in matching_1.chars().zip(matching_2.chars()) { if c1 != c2 { count += 1; } } count / 2 }; let jaro: f64 = { if match_count == 0 { return 0.0; } (1_f64 / 3_f64) * (match_count as f64 / str1.len() as f64 + match_count as f64 / str2.len() as f64 + (match_count - transpositions) as f64 / match_count as f64) }; let mut prefix_len = 0.0; let bound = std::cmp::min(std::cmp::min(str1.len(), str2.len()), 4); for (c1, c2) in str1[..bound].chars().zip(str2[..bound].chars()) { if c1 == c2 { prefix_len += 1.0; } else { break; } } jaro + (0.1 * prefix_len * (1.0 - jaro)) } #[cfg(test)] mod tests { use super::*; #[test] fn test_jaro_winkler_distance() { let a = jaro_winkler_distance("hello", "world"); assert_eq!(a, 0.4666666666666666); let a = jaro_winkler_distance("martha", "marhta"); assert_eq!(a, 0.9611111111111111); let a = jaro_winkler_distance("martha", "marhat"); assert_eq!(a, 0.9611111111111111); let a = jaro_winkler_distance("test", "test"); assert_eq!(a, 1.0); let a = jaro_winkler_distance("test", ""); assert_eq!(a, 0.0); let a = jaro_winkler_distance("hello world", "HeLLo W0rlD"); assert_eq!(a, 0.6363636363636364); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/mod.rs
src/string/mod.rs
mod aho_corasick; mod anagram; mod autocomplete_using_trie; mod boyer_moore_search; mod burrows_wheeler_transform; mod duval_algorithm; mod hamming_distance; mod isogram; mod isomorphism; mod jaro_winkler_distance; mod knuth_morris_pratt; mod levenshtein_distance; mod lipogram; mod manacher; mod palindrome; mod pangram; mod rabin_karp; mod reverse; mod run_length_encoding; mod shortest_palindrome; mod suffix_array; mod suffix_array_manber_myers; mod suffix_tree; mod z_algorithm; pub use self::aho_corasick::AhoCorasick; pub use self::anagram::check_anagram; pub use self::autocomplete_using_trie::Autocomplete; pub use self::boyer_moore_search::boyer_moore_search; pub use self::burrows_wheeler_transform::{ burrows_wheeler_transform, inv_burrows_wheeler_transform, }; pub use self::duval_algorithm::duval_algorithm; pub use self::hamming_distance::hamming_distance; pub use self::isogram::is_isogram; pub use self::isomorphism::is_isomorphic; pub use self::jaro_winkler_distance::jaro_winkler_distance; pub use self::knuth_morris_pratt::knuth_morris_pratt; pub use self::levenshtein_distance::{naive_levenshtein_distance, optimized_levenshtein_distance}; pub use self::lipogram::is_lipogram; pub use self::manacher::manacher; pub use self::palindrome::is_palindrome; pub use self::pangram::is_pangram; pub use self::pangram::PangramStatus; pub use self::rabin_karp::rabin_karp; pub use self::reverse::reverse; pub use self::run_length_encoding::{run_length_decoding, run_length_encoding}; pub use self::shortest_palindrome::shortest_palindrome; pub use self::suffix_array::generate_suffix_array; pub use self::suffix_array_manber_myers::generate_suffix_array_manber_myers; pub use self::suffix_tree::{Node, SuffixTree}; pub use self::z_algorithm::match_pattern; pub use self::z_algorithm::z_array;
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/shortest_palindrome.rs
src/string/shortest_palindrome.rs
//! This module provides functions for finding the shortest palindrome //! that can be formed by adding characters to the left of a given string. //! References //! //! - [KMP](https://www.scaler.com/topics/data-structures/kmp-algorithm/) //! - [Prefix Functions and KPM](https://oi-wiki.org/string/kmp/) /// Finds the shortest palindrome that can be formed by adding characters /// to the left of the given string `s`. /// /// # Arguments /// /// * `s` - A string slice that holds the input string. /// /// # Returns /// /// Returns a new string that is the shortest palindrome, formed by adding /// the necessary characters to the beginning of `s`. pub fn shortest_palindrome(s: &str) -> String { if s.is_empty() { return "".to_string(); } let original_chars: Vec<char> = s.chars().collect(); let suffix_table = compute_suffix(&original_chars); let mut reversed_chars: Vec<char> = s.chars().rev().collect(); // The prefix of the original string matches the suffix of the reversed string. let prefix_match = compute_prefix_match(&original_chars, &reversed_chars, &suffix_table); reversed_chars.append(&mut original_chars[prefix_match[original_chars.len() - 1]..].to_vec()); reversed_chars.iter().collect() } /// Computes the suffix table used for the KMP (Knuth-Morris-Pratt) string /// matching algorithm. /// /// # Arguments /// /// * `chars` - A slice of characters for which the suffix table is computed. /// /// # Returns /// /// Returns a vector of `usize` representing the suffix table. Each element /// at index `i` indicates the longest proper suffix which is also a proper /// prefix of the substring `chars[0..=i]`. pub fn compute_suffix(chars: &[char]) -> Vec<usize> { let mut suffix = vec![0; chars.len()]; for i in 1..chars.len() { let mut j = suffix[i - 1]; while j > 0 && chars[j] != chars[i] { j = suffix[j - 1]; } suffix[i] = j + (chars[j] == chars[i]) as usize; } suffix } /// Computes the prefix matches of the original string against its reversed /// version using the suffix table. /// /// # Arguments /// /// * `original` - A slice of characters representing the original string. /// * `reversed` - A slice of characters representing the reversed string. /// * `suffix` - A slice containing the suffix table computed for the original string. /// /// # Returns /// /// Returns a vector of `usize` where each element at index `i` indicates the /// length of the longest prefix of `original` that matches a suffix of /// `reversed[0..=i]`. pub fn compute_prefix_match(original: &[char], reversed: &[char], suffix: &[usize]) -> Vec<usize> { let mut match_table = vec![0; original.len()]; match_table[0] = usize::from(original[0] == reversed[0]); for i in 1..original.len() { let mut j = match_table[i - 1]; while j > 0 && reversed[i] != original[j] { j = suffix[j - 1]; } match_table[i] = j + usize::from(reversed[i] == original[j]); } match_table } #[cfg(test)] mod tests { use super::*; use crate::string::is_palindrome; macro_rules! test_shortest_palindrome { ($($name:ident: $inputs:expr,)*) => { $( #[test] fn $name() { let (input, expected) = $inputs; assert!(is_palindrome(expected)); assert_eq!(shortest_palindrome(input), expected); assert_eq!(shortest_palindrome(expected), expected); } )* } } test_shortest_palindrome! { empty: ("", ""), extend_left_1: ("aacecaaa", "aaacecaaa"), extend_left_2: ("abcd", "dcbabcd"), unicode_1: ("അ", "അ"), unicode_2: ("a牛", "牛a牛"), single_char: ("x", "x"), already_palindrome: ("racecar", "racecar"), extend_left_3: ("abcde", "edcbabcde"), extend_left_4: ("abca", "acbabca"), long_string: ("abcdefg", "gfedcbabcdefg"), repetitive: ("aaaaa", "aaaaa"), complex: ("abacdfgdcaba", "abacdgfdcabacdfgdcaba"), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/isogram.rs
src/string/isogram.rs
//! This module provides functionality to check if a given string is an isogram. //! An isogram is a word or phrase in which no letter occurs more than once. use std::collections::HashMap; /// Enum representing possible errors that can occur while checking for isograms. #[derive(Debug, PartialEq, Eq)] pub enum IsogramError { /// Indicates that the input contains a non-alphabetic character. NonAlphabeticCharacter, } /// Counts the occurrences of each alphabetic character in a given string. /// /// This function takes a string slice as input. It counts how many times each alphabetic character /// appears in the input string and returns a hashmap where the keys are characters and the values /// are their respective counts. /// /// # Arguments /// /// * `s` - A string slice that contains the input to count characters from. /// /// # Errors /// /// Returns an error if the input contains non-alphabetic characters (excluding spaces). /// /// # Note /// /// This function treats uppercase and lowercase letters as equivalent (case-insensitive). /// Spaces are ignored and do not affect the character count. fn count_letters(s: &str) -> Result<HashMap<char, usize>, IsogramError> { let mut letter_counts = HashMap::new(); for ch in s.to_ascii_lowercase().chars() { if !ch.is_ascii_alphabetic() && !ch.is_whitespace() { return Err(IsogramError::NonAlphabeticCharacter); } if ch.is_ascii_alphabetic() { *letter_counts.entry(ch).or_insert(0) += 1; } } Ok(letter_counts) } /// Checks if the given input string is an isogram. /// /// This function takes a string slice as input. It counts the occurrences of each /// alphabetic character (ignoring case and spaces). /// /// # Arguments /// /// * `input` - A string slice that contains the input to check for isogram properties. /// /// # Return /// /// - `Ok(true)` if all characters appear only once, or `Ok(false)` if any character appears more than once. /// - `Err(IsogramError::NonAlphabeticCharacter)` if the input contains any non-alphabetic characters. pub fn is_isogram(s: &str) -> Result<bool, IsogramError> { let letter_counts = count_letters(s)?; Ok(letter_counts.values().all(|&count| count == 1)) } #[cfg(test)] mod tests { use super::*; macro_rules! isogram_tests { ($($name:ident: $tc:expr,)*) => { $( #[test] fn $name() { let (input, expected) = $tc; assert_eq!(is_isogram(input), expected); } )* }; } isogram_tests! { isogram_simple: ("isogram", Ok(true)), isogram_case_insensitive: ("Isogram", Ok(true)), isogram_with_spaces: ("a b c d e", Ok(true)), isogram_mixed: ("Dermatoglyphics", Ok(true)), isogram_long: ("Subdermatoglyphic", Ok(true)), isogram_german_city: ("Malitzschkendorf", Ok(true)), perfect_pangram: ("Cwm fjord bank glyphs vext quiz", Ok(true)), isogram_sentences: ("The big dwarf only jumps", Ok(true)), isogram_french: ("Lampez un fort whisky", Ok(true)), isogram_portuguese: ("Velho traduz sim", Ok(true)), isogram_spanish: ("Centrifugadlos", Ok(true)), invalid_isogram_with_repeated_char: ("hello", Ok(false)), invalid_isogram_with_numbers: ("abc123", Err(IsogramError::NonAlphabeticCharacter)), invalid_isogram_with_special_char: ("abc!", Err(IsogramError::NonAlphabeticCharacter)), invalid_isogram_with_comma: ("Velho, traduz sim", Err(IsogramError::NonAlphabeticCharacter)), invalid_isogram_with_spaces: ("a b c d a", Ok(false)), invalid_isogram_with_repeated_phrase: ("abcabc", Ok(false)), isogram_empty_string: ("", Ok(true)), isogram_single_character: ("a", Ok(true)), invalid_isogram_multiple_same_characters: ("aaaa", Ok(false)), invalid_isogram_with_symbols: ("abc@#$%", Err(IsogramError::NonAlphabeticCharacter)), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/manacher.rs
src/string/manacher.rs
pub fn manacher(s: String) -> String { let l = s.len(); if l <= 1 { return s; } // MEMO: We need to detect odd palindrome as well, // therefore, inserting dummy string so that // we can find a pair with dummy center character. let mut chars: Vec<char> = Vec::with_capacity(s.len() * 2 + 1); for c in s.chars() { chars.push('#'); chars.push(c); } chars.push('#'); // List: storing the length of palindrome at each index of string let mut length_of_palindrome = vec![1usize; chars.len()]; // Integer: Current checking palindrome's center index let mut current_center: usize = 0; // Integer: Right edge index existing the radius away from current center let mut right_from_current_center: usize = 0; for i in 0..chars.len() { // 1: Check if we are looking at right side of palindrome. if right_from_current_center > i && i > current_center { // 1-1: If so copy from the left side of palindrome. // If the value + index exceeds the right edge index, we should cut and check palindrome later #3. length_of_palindrome[i] = std::cmp::min( right_from_current_center - i, length_of_palindrome[2 * current_center - i], ); // 1-2: Move the checking palindrome to new index if it exceeds the right edge. if length_of_palindrome[i] + i >= right_from_current_center { current_center = i; right_from_current_center = length_of_palindrome[i] + i; // 1-3: If radius exceeds the end of list, it means checking is over. // You will never get the larger value because the string will get only shorter. if right_from_current_center >= chars.len() - 1 { break; } } else { // 1-4: If the checking index doesn't exceeds the right edge, // it means the length is just as same as the left side. // You don't need to check anymore. continue; } } // Integer: Current radius from checking index // If it's copied from left side and more than 1, // it means it's ensured so you don't need to check inside radius. let mut radius: usize = (length_of_palindrome[i] - 1) / 2; radius += 1; // 2: Checking palindrome. // Need to care about overflow usize. while i >= radius && i + radius <= chars.len() - 1 && chars[i - radius] == chars[i + radius] { length_of_palindrome[i] += 2; radius += 1; } } // 3: Find the maximum length and generate answer. let center_of_max = length_of_palindrome .iter() .enumerate() .max_by_key(|(_, &value)| value) .map(|(idx, _)| idx) .unwrap(); let radius_of_max = (length_of_palindrome[center_of_max] - 1) / 2; let answer = &chars[(center_of_max - radius_of_max)..=(center_of_max + radius_of_max)] .iter() .collect::<String>(); answer.replace('#', "") } #[cfg(test)] mod tests { use super::manacher; #[test] fn get_longest_palindrome_by_manacher() { assert_eq!(manacher("babad".to_string()), "aba".to_string()); assert_eq!(manacher("cbbd".to_string()), "bb".to_string()); assert_eq!(manacher("a".to_string()), "a".to_string()); let ac_ans = manacher("ac".to_string()); assert!(ac_ans == *"a" || ac_ans == *"c"); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/palindrome.rs
src/string/palindrome.rs
//! A module for checking if a given string is a palindrome. /// Checks if the given string is a palindrome. /// /// A palindrome is a sequence that reads the same backward as forward. /// This function ignores non-alphanumeric characters and is case-insensitive. /// /// # Arguments /// /// * `s` - A string slice that represents the input to be checked. /// /// # Returns /// /// * `true` if the string is a palindrome; otherwise, `false`. pub fn is_palindrome(s: &str) -> bool { let mut chars = s .chars() .filter(|c| c.is_alphanumeric()) .map(|c| c.to_ascii_lowercase()); while let (Some(c1), Some(c2)) = (chars.next(), chars.next_back()) { if c1 != c2 { return false; } } true } #[cfg(test)] mod tests { use super::*; macro_rules! palindrome_tests { ($($name:ident: $inputs:expr,)*) => { $( #[test] fn $name() { let (input, expected) = $inputs; assert_eq!(is_palindrome(input), expected); } )* } } palindrome_tests! { odd_palindrome: ("madam", true), even_palindrome: ("deified", true), single_character_palindrome: ("x", true), single_word_palindrome: ("eye", true), case_insensitive_palindrome: ("RaceCar", true), mixed_case_and_punctuation_palindrome: ("A man, a plan, a canal, Panama!", true), mixed_case_and_space_palindrome: ("No 'x' in Nixon", true), empty_string: ("", true), pompeii_palindrome: ("Roma-Olima-Milo-Amor", true), napoleon_palindrome: ("Able was I ere I saw Elba", true), john_taylor_palindrome: ("Lewd did I live, & evil I did dwel", true), well_know_english_palindrome: ("Never odd or even", true), palindromic_phrase: ("Rats live on no evil star", true), names_palindrome: ("Hannah", true), prime_minister_of_cambodia: ("Lon Nol", true), japanese_novelist_and_manga_writer: ("Nisio Isin", true), actor: ("Robert Trebor", true), rock_vocalist: ("Ola Salo", true), pokemon_species: ("Girafarig", true), lychrel_num_56: ("121", true), universal_palindrome_date: ("02/02/2020", true), french_palindrome: ("une Slave valse nu", true), finnish_palindrome: ("saippuakivikauppias", true), non_palindrome_simple: ("hello", false), non_palindrome_with_punctuation: ("hello!", false), non_palindrome_mixed_case: ("Hello, World", false), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/z_algorithm.rs
src/string/z_algorithm.rs
//! This module provides functionalities to match patterns in strings //! and compute the Z-array for a given input string. /// Calculates the Z-value for a given substring of the input string /// based on a specified pattern. /// /// # Parameters /// - `input_string`: A slice of elements that represents the input string. /// - `pattern`: A slice of elements representing the pattern to match. /// - `start_index`: The index in the input string to start checking for matches. /// - `z_value`: The initial Z-value to be computed. /// /// # Returns /// The computed Z-value indicating the length of the matching prefix. fn calculate_z_value<T: Eq>( input_string: &[T], pattern: &[T], start_index: usize, mut z_value: usize, ) -> usize { let size = input_string.len(); let pattern_size = pattern.len(); while (start_index + z_value) < size && z_value < pattern_size { if input_string[start_index + z_value] != pattern[z_value] { break; } z_value += 1; } z_value } /// Initializes the Z-array value based on a previous match and updates /// it to optimize further calculations. /// /// # Parameters /// - `z_array`: A mutable slice of the Z-array to be updated. /// - `i`: The current index in the input string. /// - `match_end`: The index of the last character matched in the pattern. /// - `last_match`: The index of the last match found. /// /// # Returns /// The initialized Z-array value for the current index. fn initialize_z_array_from_previous_match( z_array: &[usize], i: usize, match_end: usize, last_match: usize, ) -> usize { std::cmp::min(z_array[i - last_match], match_end - i + 1) } /// Finds the starting indices of all full matches of the pattern /// in the Z-array. /// /// # Parameters /// - `z_array`: A slice of the Z-array containing computed Z-values. /// - `pattern_size`: The length of the pattern to find in the Z-array. /// /// # Returns /// A vector containing the starting indices of full matches. fn find_full_matches(z_array: &[usize], pattern_size: usize) -> Vec<usize> { z_array .iter() .enumerate() .filter_map(|(idx, &z_value)| (z_value == pattern_size).then_some(idx)) .collect() } /// Matches the occurrences of a pattern in an input string starting /// from a specified index. /// /// # Parameters /// - `input_string`: A slice of elements to search within. /// - `pattern`: A slice of elements that represents the pattern to match. /// - `start_index`: The index in the input string to start the search. /// - `only_full_matches`: If true, only full matches of the pattern will be returned. /// /// # Returns /// A vector containing the starting indices of the matches. fn match_with_z_array<T: Eq>( input_string: &[T], pattern: &[T], start_index: usize, only_full_matches: bool, ) -> Vec<usize> { let size = input_string.len(); let pattern_size = pattern.len(); let mut last_match: usize = 0; let mut match_end: usize = 0; let mut z_array = vec![0usize; size]; for i in start_index..size { if i <= match_end { z_array[i] = initialize_z_array_from_previous_match(&z_array, i, match_end, last_match); } z_array[i] = calculate_z_value(input_string, pattern, i, z_array[i]); if i + z_array[i] > match_end + 1 { match_end = i + z_array[i] - 1; last_match = i; } } if only_full_matches { find_full_matches(&z_array, pattern_size) } else { z_array } } /// Constructs the Z-array for the given input string. /// /// The Z-array is an array where the i-th element is the length of the longest /// substring starting from s[i] that is also a prefix of s. /// /// # Parameters /// - `input`: A slice of the input string for which the Z-array is to be constructed. /// /// # Returns /// A vector representing the Z-array of the input string. pub fn z_array<T: Eq>(input: &[T]) -> Vec<usize> { match_with_z_array(input, input, 1, false) } /// Matches the occurrences of a given pattern in an input string. /// /// This function acts as a wrapper around `match_with_z_array` to provide a simpler /// interface for pattern matching, returning only full matches. /// /// # Parameters /// - `input`: A slice of the input string where the pattern will be searched. /// - `pattern`: A slice of the pattern to search for in the input string. /// /// # Returns /// A vector of indices where the pattern matches the input string. pub fn match_pattern<T: Eq>(input: &[T], pattern: &[T]) -> Vec<usize> { match_with_z_array(input, pattern, 0, true) } #[cfg(test)] mod tests { use super::*; macro_rules! test_match_pattern { ($($name:ident: ($input:expr, $pattern:expr, $expected:expr),)*) => { $( #[test] fn $name() { let (input, pattern, expected) = ($input, $pattern, $expected); assert_eq!(match_pattern(input.as_bytes(), pattern.as_bytes()), expected); } )* }; } macro_rules! test_z_array_cases { ($($name:ident: ($input:expr, $expected:expr),)*) => { $( #[test] fn $name() { let (input, expected) = ($input, $expected); assert_eq!(z_array(input.as_bytes()), expected); } )* }; } test_match_pattern! { simple_match: ("abcabcabc", "abc", vec![0, 3, 6]), no_match: ("abcdef", "xyz", vec![]), single_char_match: ("aaaaaa", "a", vec![0, 1, 2, 3, 4, 5]), overlapping_match: ("abababa", "aba", vec![0, 2, 4]), full_string_match: ("pattern", "pattern", vec![0]), empty_pattern: ("nonempty", " ", vec![]), pattern_larger_than_text: ("small", "largerpattern", vec![]), repeated_pattern_in_text: ( "aaaaaaaa", "aaa", vec![0, 1, 2, 3, 4, 5] ), pattern_not_in_lipsum: ( concat!( "lorem ipsum dolor sit amet, consectetur ", "adipiscing elit, sed do eiusmod tempor ", "incididunt ut labore et dolore magna aliqua" ), ";alksdjfoiwer", vec![] ), pattern_in_lipsum: ( concat!( "lorem ipsum dolor sit amet, consectetur ", "adipiscing elit, sed do eiusmod tempor ", "incididunt ut labore et dolore magna aliqua" ), "m", vec![4, 10, 23, 68, 74, 110] ), } test_z_array_cases! { basic_z_array: ("aabaabab", vec![0, 1, 0, 4, 1, 0, 1, 0]), empty_string: ("", vec![]), single_char_z_array: ("a", vec![0]), repeated_char_z_array: ("aaaaaa", vec![0, 5, 4, 3, 2, 1]), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/burrows_wheeler_transform.rs
src/string/burrows_wheeler_transform.rs
pub fn burrows_wheeler_transform(input: &str) -> (String, usize) { let len = input.len(); let mut table = Vec::<String>::with_capacity(len); for i in 0..len { table.push(input[i..].to_owned() + &input[..i]); } table.sort_by_key(|a| a.to_lowercase()); let mut encoded = String::new(); let mut index: usize = 0; for (i, item) in table.iter().enumerate().take(len) { encoded.push(item.chars().last().unwrap()); if item.eq(&input) { index = i; } } (encoded, index) } pub fn inv_burrows_wheeler_transform<T: AsRef<str>>(input: (T, usize)) -> String { let len = input.0.as_ref().len(); let mut table = Vec::<(usize, char)>::with_capacity(len); for i in 0..len { table.push((i, input.0.as_ref().chars().nth(i).unwrap())); } table.sort_by(|a, b| a.1.cmp(&b.1)); let mut decoded = String::new(); let mut idx = input.1; for _ in 0..len { decoded.push(table[idx].1); idx = table[idx].0; } decoded } #[cfg(test)] mod tests { use super::*; #[test] //Ensure function stand-alone legitimacy fn stand_alone_function() { assert_eq!( burrows_wheeler_transform("CARROT"), ("CTRRAO".to_owned(), 1usize) ); assert_eq!(inv_burrows_wheeler_transform(("CTRRAO", 1usize)), "CARROT"); assert_eq!( burrows_wheeler_transform("THEALGORITHMS"), ("EHLTTRAHGOMSI".to_owned(), 11usize) ); assert_eq!( inv_burrows_wheeler_transform(("EHLTTRAHGOMSI".to_string(), 11usize)), "THEALGORITHMS" ); assert_eq!( burrows_wheeler_transform("!.!.!??.=::"), (":..!!?:=.?!".to_owned(), 0usize) ); assert_eq!( inv_burrows_wheeler_transform((":..!!?:=.?!", 0usize)), "!.!.!??.=::" ); } #[test] fn basic_characters() { assert_eq!( inv_burrows_wheeler_transform(burrows_wheeler_transform("CARROT")), "CARROT" ); assert_eq!( inv_burrows_wheeler_transform(burrows_wheeler_transform("TOMATO")), "TOMATO" ); assert_eq!( inv_burrows_wheeler_transform(burrows_wheeler_transform("THISISATEST")), "THISISATEST" ); assert_eq!( inv_burrows_wheeler_transform(burrows_wheeler_transform("THEALGORITHMS")), "THEALGORITHMS" ); assert_eq!( inv_burrows_wheeler_transform(burrows_wheeler_transform("RUST")), "RUST" ); } #[test] fn special_characters() { assert_eq!( inv_burrows_wheeler_transform(burrows_wheeler_transform("!.!.!??.=::")), "!.!.!??.=::" ); assert_eq!( inv_burrows_wheeler_transform(burrows_wheeler_transform("!{}{}(((&&%%!??.=::")), "!{}{}(((&&%%!??.=::" ); assert_eq!( inv_burrows_wheeler_transform(burrows_wheeler_transform("//&$[]")), "//&$[]" ); } #[test] fn empty() { assert_eq!( inv_burrows_wheeler_transform(burrows_wheeler_transform("")), "" ); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/suffix_tree.rs
src/string/suffix_tree.rs
// In computer science, a suffix tree (also called PAT tree or, in an earlier form, position tree) // is a compressed trie containing all the suffixes of the given text as their keys and positions // in the text as their values. Suffix trees allow particularly fast implementations of many // important string operations. Source: https://en.wikipedia.org/wiki/Suffix_tree #[derive(Debug, PartialEq, Eq, Clone)] pub struct Node { pub sub: String, // substring of input string pub ch: Vec<usize>, // vector of child nodes } impl Node { fn new(sub: String, children: Vec<usize>) -> Self { Node { sub, ch: children.to_vec(), } } pub fn empty() -> Self { Node { sub: "".to_string(), ch: vec![], } } } pub struct SuffixTree { pub nodes: Vec<Node>, } impl SuffixTree { pub fn new(s: &str) -> Self { let mut suf_tree = SuffixTree { nodes: vec![Node::empty()], }; for i in 0..s.len() { let (_, substr) = s.split_at(i); suf_tree.add_suffix(substr); } suf_tree } fn add_suffix(&mut self, suf: &str) { let mut n = 0; let mut i = 0; while i < suf.len() { let b = suf.chars().nth(i); let mut x2 = 0; let mut n2: usize; loop { let children = &self.nodes[n].ch; if children.len() == x2 { n2 = self.nodes.len(); self.nodes.push(Node::new( { let (_, sub) = suf.split_at(i); sub.to_string() }, vec![], )); self.nodes[n].ch.push(n2); return; } n2 = children[x2]; if self.nodes[n2].sub.chars().next() == b { break; } x2 += 1; } let sub2 = self.nodes[n2].sub.clone(); let mut j = 0; while j < sub2.len() { if suf.chars().nth(i + j) != sub2.chars().nth(j) { let n3 = n2; n2 = self.nodes.len(); self.nodes.push(Node::new( { let (sub, _) = sub2.split_at(j); sub.to_string() }, vec![n3], )); let (_, temp_sub) = sub2.split_at(j); self.nodes[n3].sub = temp_sub.to_string(); self.nodes[n].ch[x2] = n2; break; } j += 1; } i += j; n = n2; } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_suffix_tree() { let suf_tree = SuffixTree::new("banana$"); assert_eq!( suf_tree.nodes, vec![ Node { sub: "".to_string(), ch: vec![1, 8, 6, 10] }, Node { sub: "banana$".to_string(), ch: vec![] }, Node { sub: "na$".to_string(), ch: vec![] }, Node { sub: "na$".to_string(), ch: vec![] }, Node { sub: "na".to_string(), ch: vec![2, 5] }, Node { sub: "$".to_string(), ch: vec![] }, Node { sub: "na".to_string(), ch: vec![3, 7] }, Node { sub: "$".to_string(), ch: vec![] }, Node { sub: "a".to_string(), ch: vec![4, 9] }, Node { sub: "$".to_string(), ch: vec![] }, Node { sub: "$".to_string(), ch: vec![] } ] ); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/suffix_array_manber_myers.rs
src/string/suffix_array_manber_myers.rs
pub fn generate_suffix_array_manber_myers(input: &str) -> Vec<usize> { if input.is_empty() { return Vec::new(); } let n = input.len(); let mut suffixes: Vec<(usize, &str)> = Vec::with_capacity(n); for (i, _suffix) in input.char_indices() { suffixes.push((i, &input[i..])); } suffixes.sort_by_key(|&(_, s)| s); let mut suffix_array: Vec<usize> = vec![0; n]; let mut rank = vec![0; n]; let mut cur_rank = 0; let mut prev_suffix = &suffixes[0].1; for (i, suffix) in suffixes.iter().enumerate() { if &suffix.1 != prev_suffix { cur_rank += 1; prev_suffix = &suffix.1; } rank[suffix.0] = cur_rank; suffix_array[i] = suffix.0; } let mut k = 1; let mut new_rank: Vec<usize> = vec![0; n]; while k < n { suffix_array.sort_by_key(|&x| (rank[x], rank[(x + k) % n])); let mut cur_rank = 0; let mut prev = suffix_array[0]; new_rank[prev] = cur_rank; for &suffix in suffix_array.iter().skip(1) { let next = suffix; if (rank[prev], rank[(prev + k) % n]) != (rank[next], rank[(next + k) % n]) { cur_rank += 1; } new_rank[next] = cur_rank; prev = next; } std::mem::swap(&mut rank, &mut new_rank); k <<= 1; } suffix_array } #[cfg(test)] mod tests { use super::*; #[test] fn test_suffix_array() { let input = "banana"; let expected_result = vec![5, 3, 1, 0, 4, 2]; assert_eq!(generate_suffix_array_manber_myers(input), expected_result); } #[test] fn test_empty_string() { let input = ""; let expected_result: Vec<usize> = Vec::new(); assert_eq!(generate_suffix_array_manber_myers(input), expected_result); } #[test] fn test_single_character() { let input = "a"; let expected_result = vec![0]; assert_eq!(generate_suffix_array_manber_myers(input), expected_result); } #[test] fn test_repeating_characters() { let input = "zzzzzz"; let expected_result = vec![5, 4, 3, 2, 1, 0]; assert_eq!(generate_suffix_array_manber_myers(input), expected_result); } #[test] fn test_long_string() { let input = "abcdefghijklmnopqrstuvwxyz"; let expected_result: Vec<usize> = (0..26).collect(); assert_eq!(generate_suffix_array_manber_myers(input), expected_result); } #[test] fn test_mix_of_characters() { let input = "abracadabra!"; let expected_result = vec![11, 10, 7, 0, 3, 5, 8, 1, 4, 6, 9, 2]; assert_eq!(generate_suffix_array_manber_myers(input), expected_result); } #[test] fn test_whitespace_characters() { let input = " hello world "; let expected_result = vec![12, 0, 6, 11, 2, 1, 10, 3, 4, 5, 8, 9, 7]; assert_eq!(generate_suffix_array_manber_myers(input), expected_result); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/string/hamming_distance.rs
src/string/hamming_distance.rs
/// Error type for Hamming distance calculation. #[derive(Debug, PartialEq)] pub enum HammingDistanceError { InputStringsHaveDifferentLength, } /// Calculates the Hamming distance between two strings. /// /// The Hamming distance is defined as the number of positions at which the corresponding characters of the two strings are different. pub fn hamming_distance(string_a: &str, string_b: &str) -> Result<usize, HammingDistanceError> { if string_a.len() != string_b.len() { return Err(HammingDistanceError::InputStringsHaveDifferentLength); } let distance = string_a .chars() .zip(string_b.chars()) .filter(|(a, b)| a != b) .count(); Ok(distance) } #[cfg(test)] mod tests { use super::*; macro_rules! test_hamming_distance { ($($name:ident: $tc:expr,)*) => { $( #[test] fn $name() { let (str_a, str_b, expected) = $tc; assert_eq!(hamming_distance(str_a, str_b), expected); assert_eq!(hamming_distance(str_b, str_a), expected); } )* } } test_hamming_distance! { empty_inputs: ("", "", Ok(0)), different_length: ("0", "", Err(HammingDistanceError::InputStringsHaveDifferentLength)), length_1_inputs_identical: ("a", "a", Ok(0)), length_1_inputs_different: ("a", "b", Ok(1)), same_strings: ("rust", "rust", Ok(0)), regular_input_0: ("karolin", "kathrin", Ok(3)), regular_input_1: ("kathrin", "kerstin", Ok(4)), regular_input_2: ("00000", "11111", Ok(5)), different_case: ("x", "X", Ok(1)), strings_with_no_common_chars: ("abcd", "wxyz", Ok(4)), long_strings_one_diff: (&"a".repeat(1000), &("a".repeat(999) + "b"), Ok(1)), long_strings_many_diffs: (&("a".repeat(500) + &"b".repeat(500)), &("b".repeat(500) + &"a".repeat(500)), Ok(1000)), strings_with_special_chars_identical: ("!@#$%^", "!@#$%^", Ok(0)), strings_with_special_chars_diff: ("!@#$%^", "&*()_+", Ok(6)), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/backtracking/n_queens.rs
src/backtracking/n_queens.rs
//! This module provides functionality to solve the N-Queens problem. //! //! The N-Queens problem is a classic chessboard puzzle where the goal is to //! place N queens on an NxN chessboard so that no two queens threaten each //! other. Queens can attack each other if they share the same row, column, or //! diagonal. //! //! This implementation solves the N-Queens problem using a backtracking algorithm. //! It starts with an empty chessboard and iteratively tries to place queens in //! different rows, ensuring they do not conflict with each other. If a valid //! solution is found, it's added to the list of solutions. /// Solves the N-Queens problem for a given size and returns a vector of solutions. /// /// # Arguments /// /// * `n` - The size of the chessboard (NxN). /// /// # Returns /// /// A vector containing all solutions to the N-Queens problem. pub fn n_queens_solver(n: usize) -> Vec<Vec<String>> { let mut solver = NQueensSolver::new(n); solver.solve() } /// Represents a solver for the N-Queens problem. struct NQueensSolver { // The size of the chessboard size: usize, // A 2D vector representing the chessboard where '.' denotes an empty space and 'Q' denotes a queen board: Vec<Vec<char>>, // A vector to store all valid solutions solutions: Vec<Vec<String>>, } impl NQueensSolver { /// Creates a new `NQueensSolver` instance with the given size. /// /// # Arguments /// /// * `size` - The size of the chessboard (N×N). /// /// # Returns /// /// A new `NQueensSolver` instance. fn new(size: usize) -> Self { NQueensSolver { size, board: vec![vec!['.'; size]; size], solutions: Vec::new(), } } /// Solves the N-Queens problem and returns a vector of solutions. /// /// # Returns /// /// A vector containing all solutions to the N-Queens problem. fn solve(&mut self) -> Vec<Vec<String>> { self.solve_helper(0); std::mem::take(&mut self.solutions) } /// Checks if it's safe to place a queen at the specified position (row, col). /// /// # Arguments /// /// * `row` - The row index of the position to check. /// * `col` - The column index of the position to check. /// /// # Returns /// /// `true` if it's safe to place a queen at the specified position, `false` otherwise. fn is_safe(&self, row: usize, col: usize) -> bool { // Check column and diagonals for i in 0..row { if self.board[i][col] == 'Q' || (col >= row - i && self.board[i][col - (row - i)] == 'Q') || (col + row - i < self.size && self.board[i][col + (row - i)] == 'Q') { return false; } } true } /// Recursive helper function to solve the N-Queens problem. /// /// # Arguments /// /// * `row` - The current row being processed. fn solve_helper(&mut self, row: usize) { if row == self.size { self.solutions .push(self.board.iter().map(|row| row.iter().collect()).collect()); return; } for col in 0..self.size { if self.is_safe(row, col) { self.board[row][col] = 'Q'; self.solve_helper(row + 1); self.board[row][col] = '.'; } } } } #[cfg(test)] mod tests { use super::*; macro_rules! test_n_queens_solver { ($($name:ident: $tc:expr,)*) => { $( #[test] fn $name() { let (n, expected_solutions) = $tc; let solutions = n_queens_solver(n); assert_eq!(solutions, expected_solutions); } )* }; } test_n_queens_solver! { test_0_queens: (0, vec![Vec::<String>::new()]), test_1_queen: (1, vec![vec!["Q"]]), test_2_queens:(2, Vec::<Vec<String>>::new()), test_3_queens:(3, Vec::<Vec<String>>::new()), test_4_queens: (4, vec![ vec![".Q..", "...Q", "Q...", "..Q."], vec!["..Q.", "Q...", "...Q", ".Q.."], ]), test_5_queens:(5, vec![ vec!["Q....", "..Q..", "....Q", ".Q...", "...Q."], vec!["Q....", "...Q.", ".Q...", "....Q", "..Q.."], vec![".Q...", "...Q.", "Q....", "..Q..", "....Q"], vec![".Q...", "....Q", "..Q..", "Q....", "...Q."], vec!["..Q..", "Q....", "...Q.", ".Q...", "....Q"], vec!["..Q..", "....Q", ".Q...", "...Q.", "Q...."], vec!["...Q.", "Q....", "..Q..", "....Q", ".Q..."], vec!["...Q.", ".Q...", "....Q", "..Q..", "Q...."], vec!["....Q", ".Q...", "...Q.", "Q....", "..Q.."], vec!["....Q", "..Q..", "Q....", "...Q.", ".Q..."], ]), test_6_queens: (6, vec![ vec![".Q....", "...Q..", ".....Q", "Q.....", "..Q...", "....Q."], vec!["..Q...", ".....Q", ".Q....", "....Q.", "Q.....", "...Q.."], vec!["...Q..", "Q.....", "....Q.", ".Q....", ".....Q", "..Q..."], vec!["....Q.", "..Q...", "Q.....", ".....Q", "...Q..", ".Q...."], ]), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/backtracking/parentheses_generator.rs
src/backtracking/parentheses_generator.rs
/// Generates all combinations of well-formed parentheses given a non-negative integer `n`. /// /// This function uses backtracking to generate all possible combinations of well-formed /// parentheses. The resulting combinations are returned as a vector of strings. /// /// # Arguments /// /// * `n` - A non-negative integer representing the number of pairs of parentheses. pub fn generate_parentheses(n: usize) -> Vec<String> { let mut result = Vec::new(); if n > 0 { generate("", 0, 0, n, &mut result); } result } /// Helper function for generating parentheses recursively. /// /// This function is called recursively to build combinations of well-formed parentheses. /// It tracks the number of open and close parentheses added so far and adds a new parenthesis /// if it's valid to do so. /// /// # Arguments /// /// * `current` - The current string of parentheses being built. /// * `open_count` - The count of open parentheses in the current string. /// * `close_count` - The count of close parentheses in the current string. /// * `n` - The total number of pairs of parentheses to be generated. /// * `result` - A mutable reference to the vector storing the generated combinations. fn generate( current: &str, open_count: usize, close_count: usize, n: usize, result: &mut Vec<String>, ) { if current.len() == (n * 2) { result.push(current.to_string()); return; } if open_count < n { let new_str = current.to_string() + "("; generate(&new_str, open_count + 1, close_count, n, result); } if close_count < open_count { let new_str = current.to_string() + ")"; generate(&new_str, open_count, close_count + 1, n, result); } } #[cfg(test)] mod tests { use super::*; macro_rules! generate_parentheses_tests { ($($name:ident: $test_case:expr,)*) => { $( #[test] fn $name() { let (n, expected_result) = $test_case; assert_eq!(generate_parentheses(n), expected_result); } )* }; } generate_parentheses_tests! { test_generate_parentheses_0: (0, Vec::<String>::new()), test_generate_parentheses_1: (1, vec!["()"]), test_generate_parentheses_2: (2, vec!["(())", "()()"]), test_generate_parentheses_3: (3, vec!["((()))", "(()())", "(())()", "()(())", "()()()"]), test_generate_parentheses_4: (4, vec!["(((())))", "((()()))", "((())())", "((()))()", "(()(()))", "(()()())", "(()())()", "(())(())", "(())()()", "()((()))", "()(()())", "()(())()", "()()(())", "()()()()"]), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/backtracking/sudoku.rs
src/backtracking/sudoku.rs
//! A Rust implementation of Sudoku solver using Backtracking. //! //! This module provides functionality to solve Sudoku puzzles using the backtracking algorithm. //! //! GeeksForGeeks: [Sudoku Backtracking](https://www.geeksforgeeks.org/sudoku-backtracking-7/) /// Solves a Sudoku puzzle. /// /// Given a partially filled Sudoku puzzle represented by a 9x9 grid, this function attempts to /// solve the puzzle using the backtracking algorithm. /// /// Returns the solved Sudoku board if a solution exists, or `None` if no solution is found. pub fn sudoku_solver(board: &[[u8; 9]; 9]) -> Option<[[u8; 9]; 9]> { let mut solver = SudokuSolver::new(*board); if solver.solve() { Some(solver.board) } else { None } } /// Represents a Sudoku puzzle solver. struct SudokuSolver { /// The Sudoku board represented by a 9x9 grid. board: [[u8; 9]; 9], } impl SudokuSolver { /// Creates a new Sudoku puzzle solver with the given board. fn new(board: [[u8; 9]; 9]) -> SudokuSolver { SudokuSolver { board } } /// Finds an empty cell in the Sudoku board. /// /// Returns the coordinates of an empty cell `(row, column)` if found, or `None` if all cells are filled. fn find_empty_cell(&self) -> Option<(usize, usize)> { // Find an empty cell in the board (returns None if all cells are filled) for row in 0..9 { for column in 0..9 { if self.board[row][column] == 0 { return Some((row, column)); } } } None } /// Checks whether a given value can be placed in a specific cell according to Sudoku rules. /// /// Returns `true` if the value can be placed in the cell, otherwise `false`. fn is_value_valid(&self, coordinates: (usize, usize), value: u8) -> bool { let (row, column) = coordinates; // Checks if the value to be added in the board is an acceptable value for the cell // Checking through the row for current_column in 0..9 { if self.board[row][current_column] == value { return false; } } // Checking through the column for current_row in 0..9 { if self.board[current_row][column] == value { return false; } } // Checking through the 3x3 block of the cell let start_row = row / 3 * 3; let start_column = column / 3 * 3; for current_row in start_row..start_row + 3 { for current_column in start_column..start_column + 3 { if self.board[current_row][current_column] == value { return false; } } } true } /// Solves the Sudoku puzzle recursively using backtracking. /// /// Returns `true` if a solution is found, otherwise `false`. fn solve(&mut self) -> bool { let empty_cell = self.find_empty_cell(); if let Some((row, column)) = empty_cell { for value in 1..=9 { if self.is_value_valid((row, column), value) { self.board[row][column] = value; if self.solve() { return true; } // Backtracking if the board cannot be solved using the current configuration self.board[row][column] = 0; } } } else { // If the board is complete return true; } // Returning false if the board cannot be solved using the current configuration false } } #[cfg(test)] mod tests { use super::*; macro_rules! test_sudoku_solver { ($($name:ident: $board:expr, $expected:expr,)*) => { $( #[test] fn $name() { let result = sudoku_solver(&$board); assert_eq!(result, $expected); } )* }; } test_sudoku_solver! { test_sudoku_correct: [ [3, 0, 6, 5, 0, 8, 4, 0, 0], [5, 2, 0, 0, 0, 0, 0, 0, 0], [0, 8, 7, 0, 0, 0, 0, 3, 1], [0, 0, 3, 0, 1, 0, 0, 8, 0], [9, 0, 0, 8, 6, 3, 0, 0, 5], [0, 5, 0, 0, 9, 0, 6, 0, 0], [1, 3, 0, 0, 0, 0, 2, 5, 0], [0, 0, 0, 0, 0, 0, 0, 7, 4], [0, 0, 5, 2, 0, 6, 3, 0, 0], ], Some([ [3, 1, 6, 5, 7, 8, 4, 9, 2], [5, 2, 9, 1, 3, 4, 7, 6, 8], [4, 8, 7, 6, 2, 9, 5, 3, 1], [2, 6, 3, 4, 1, 5, 9, 8, 7], [9, 7, 4, 8, 6, 3, 1, 2, 5], [8, 5, 1, 7, 9, 2, 6, 4, 3], [1, 3, 8, 9, 4, 7, 2, 5, 6], [6, 9, 2, 3, 5, 1, 8, 7, 4], [7, 4, 5, 2, 8, 6, 3, 1, 9], ]), test_sudoku_incorrect: [ [6, 0, 3, 5, 0, 8, 4, 0, 0], [5, 2, 0, 0, 0, 0, 0, 0, 0], [0, 8, 7, 0, 0, 0, 0, 3, 1], [0, 0, 3, 0, 1, 0, 0, 8, 0], [9, 0, 0, 8, 6, 3, 0, 0, 5], [0, 5, 0, 0, 9, 0, 6, 0, 0], [1, 3, 0, 0, 0, 0, 2, 5, 0], [0, 0, 0, 0, 0, 0, 0, 7, 4], [0, 0, 5, 2, 0, 6, 3, 0, 0], ], None::<[[u8; 9]; 9]>, } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/backtracking/subset_sum.rs
src/backtracking/subset_sum.rs
//! This module provides functionality to check if there exists a subset of a given set of integers //! that sums to a target value. The implementation uses a recursive backtracking approach. /// Checks if there exists a subset of the given set that sums to the target value. pub fn has_subset_with_sum(set: &[isize], target: isize) -> bool { backtrack(set, set.len(), target) } fn backtrack(set: &[isize], remaining_items: usize, target: isize) -> bool { // Found a subset with the required sum if target == 0 { return true; } // No more elements to process if remaining_items == 0 { return false; } // Check if we can find a subset including or excluding the last element backtrack(set, remaining_items - 1, target) || backtrack(set, remaining_items - 1, target - set[remaining_items - 1]) } #[cfg(test)] mod tests { use super::*; macro_rules! has_subset_with_sum_tests { ($($name:ident: $test_case:expr,)*) => { $( #[test] fn $name() { let (set, target, expected) = $test_case; assert_eq!(has_subset_with_sum(set, target), expected); } )* } } has_subset_with_sum_tests! { test_small_set_with_sum: (&[3, 34, 4, 12, 5, 2], 9, true), test_small_set_without_sum: (&[3, 34, 4, 12, 5, 2], 30, false), test_consecutive_set_with_sum: (&[1, 2, 3, 4, 5, 6], 10, true), test_consecutive_set_without_sum: (&[1, 2, 3, 4, 5, 6], 22, false), test_large_set_with_sum: (&[5, 10, 12, 13, 15, 18, -1, 10, 50, -2, 3, 4], 30, true), test_empty_set: (&[], 0, true), test_empty_set_with_nonzero_sum: (&[], 10, false), test_single_element_equal_to_sum: (&[10], 10, true), test_single_element_not_equal_to_sum: (&[5], 10, false), test_negative_set_with_sum: (&[-7, -3, -2, 5, 8], 0, true), test_negative_sum: (&[1, 2, 3, 4, 5], -1, false), test_negative_sum_with_negatives: (&[-7, -3, -2, 5, 8], -4, true), test_negative_sum_with_negatives_no_solution: (&[-7, -3, -2, 5, 8], -14, false), test_even_inputs_odd_target: (&[2, 4, 6, 2, 8, -2, 10, 12, -24, 8, 12, 18], 3, false), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/backtracking/permutations.rs
src/backtracking/permutations.rs
//! This module provides a function to generate all possible distinct permutations //! of a given collection of integers using a backtracking algorithm. /// Generates all possible distinct permutations of a given vector of integers. /// /// # Arguments /// /// * `nums` - A vector of integers. The input vector is sorted before generating /// permutations to handle duplicates effectively. /// /// # Returns /// /// A vector containing all possible distinct permutations of the input vector. pub fn permute(mut nums: Vec<isize>) -> Vec<Vec<isize>> { let mut permutations = Vec::new(); let mut current = Vec::new(); let mut used = vec![false; nums.len()]; nums.sort(); generate(&nums, &mut current, &mut used, &mut permutations); permutations } /// Helper function for the `permute` function to generate distinct permutations recursively. /// /// # Arguments /// /// * `nums` - A reference to the sorted slice of integers. /// * `current` - A mutable reference to the vector holding the current permutation. /// * `used` - A mutable reference to a vector tracking which elements are used. /// * `permutations` - A mutable reference to the vector holding all generated distinct permutations. fn generate( nums: &[isize], current: &mut Vec<isize>, used: &mut Vec<bool>, permutations: &mut Vec<Vec<isize>>, ) { if current.len() == nums.len() { permutations.push(current.clone()); return; } for idx in 0..nums.len() { if used[idx] { continue; } if idx > 0 && nums[idx] == nums[idx - 1] && !used[idx - 1] { continue; } current.push(nums[idx]); used[idx] = true; generate(nums, current, used, permutations); current.pop(); used[idx] = false; } } #[cfg(test)] mod tests { use super::*; macro_rules! permute_tests { ($($name:ident: $test_case:expr,)*) => { $( #[test] fn $name() { let (input, expected) = $test_case; assert_eq!(permute(input), expected); } )* } } permute_tests! { test_permute_basic: (vec![1, 2, 3], vec![ vec![1, 2, 3], vec![1, 3, 2], vec![2, 1, 3], vec![2, 3, 1], vec![3, 1, 2], vec![3, 2, 1], ]), test_permute_empty: (Vec::<isize>::new(), vec![vec![]]), test_permute_single: (vec![1], vec![vec![1]]), test_permute_duplicates: (vec![1, 1, 2], vec![ vec![1, 1, 2], vec![1, 2, 1], vec![2, 1, 1], ]), test_permute_all_duplicates: (vec![1, 1, 1, 1], vec![ vec![1, 1, 1, 1], ]), test_permute_negative: (vec![-1, -2, -3], vec![ vec![-3, -2, -1], vec![-3, -1, -2], vec![-2, -3, -1], vec![-2, -1, -3], vec![-1, -3, -2], vec![-1, -2, -3], ]), test_permute_mixed: (vec![-1, 0, 1], vec![ vec![-1, 0, 1], vec![-1, 1, 0], vec![0, -1, 1], vec![0, 1, -1], vec![1, -1, 0], vec![1, 0, -1], ]), test_permute_larger: (vec![1, 2, 3, 4], vec![ vec![1, 2, 3, 4], vec![1, 2, 4, 3], vec![1, 3, 2, 4], vec![1, 3, 4, 2], vec![1, 4, 2, 3], vec![1, 4, 3, 2], vec![2, 1, 3, 4], vec![2, 1, 4, 3], vec![2, 3, 1, 4], vec![2, 3, 4, 1], vec![2, 4, 1, 3], vec![2, 4, 3, 1], vec![3, 1, 2, 4], vec![3, 1, 4, 2], vec![3, 2, 1, 4], vec![3, 2, 4, 1], vec![3, 4, 1, 2], vec![3, 4, 2, 1], vec![4, 1, 2, 3], vec![4, 1, 3, 2], vec![4, 2, 1, 3], vec![4, 2, 3, 1], vec![4, 3, 1, 2], vec![4, 3, 2, 1], ]), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/backtracking/mod.rs
src/backtracking/mod.rs
mod all_combination_of_size_k; mod graph_coloring; mod hamiltonian_cycle; mod knight_tour; mod n_queens; mod parentheses_generator; mod permutations; mod rat_in_maze; mod subset_sum; mod sudoku; pub use all_combination_of_size_k::generate_all_combinations; pub use graph_coloring::generate_colorings; pub use hamiltonian_cycle::find_hamiltonian_cycle; pub use knight_tour::find_knight_tour; pub use n_queens::n_queens_solver; pub use parentheses_generator::generate_parentheses; pub use permutations::permute; pub use rat_in_maze::find_path_in_maze; pub use subset_sum::has_subset_with_sum; pub use sudoku::sudoku_solver;
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/backtracking/all_combination_of_size_k.rs
src/backtracking/all_combination_of_size_k.rs
//! This module provides a function to generate all possible combinations //! of `k` numbers out of `0...n-1` using a backtracking algorithm. /// Custom error type for combination generation. #[derive(Debug, PartialEq)] pub enum CombinationError { KGreaterThanN, InvalidZeroRange, } /// Generates all possible combinations of `k` numbers out of `0...n-1`. /// /// # Arguments /// /// * `n` - The upper limit of the range (`0` to `n-1`). /// * `k` - The number of elements in each combination. /// /// # Returns /// /// A `Result` containing a vector with all possible combinations of `k` numbers out of `0...n-1`, /// or a `CombinationError` if the input is invalid. pub fn generate_all_combinations(n: usize, k: usize) -> Result<Vec<Vec<usize>>, CombinationError> { if n == 0 && k > 0 { return Err(CombinationError::InvalidZeroRange); } if k > n { return Err(CombinationError::KGreaterThanN); } let mut combinations = vec![]; let mut current = vec![0; k]; backtrack(0, n, k, 0, &mut current, &mut combinations); Ok(combinations) } /// Helper function to generate combinations recursively. /// /// # Arguments /// /// * `start` - The current number to start the combination with. /// * `n` - The upper limit of the range (`0` to `n-1`). /// * `k` - The number of elements left to complete the combination. /// * `index` - The current index being filled in the combination. /// * `current` - A mutable reference to the current combination being constructed. /// * `combinations` - A mutable reference to the vector holding all combinations. fn backtrack( start: usize, n: usize, k: usize, index: usize, current: &mut Vec<usize>, combinations: &mut Vec<Vec<usize>>, ) { if index == k { combinations.push(current.clone()); return; } for num in start..=(n - k + index) { current[index] = num; backtrack(num + 1, n, k, index + 1, current, combinations); } } #[cfg(test)] mod tests { use super::*; macro_rules! combination_tests { ($($name:ident: $test_case:expr,)*) => { $( #[test] fn $name() { let (n, k, expected) = $test_case; assert_eq!(generate_all_combinations(n, k), expected); } )* } } combination_tests! { test_generate_4_2: (4, 2, Ok(vec![ vec![0, 1], vec![0, 2], vec![0, 3], vec![1, 2], vec![1, 3], vec![2, 3], ])), test_generate_4_3: (4, 3, Ok(vec![ vec![0, 1, 2], vec![0, 1, 3], vec![0, 2, 3], vec![1, 2, 3], ])), test_generate_5_3: (5, 3, Ok(vec![ vec![0, 1, 2], vec![0, 1, 3], vec![0, 1, 4], vec![0, 2, 3], vec![0, 2, 4], vec![0, 3, 4], vec![1, 2, 3], vec![1, 2, 4], vec![1, 3, 4], vec![2, 3, 4], ])), test_generate_5_1: (5, 1, Ok(vec![ vec![0], vec![1], vec![2], vec![3], vec![4], ])), test_empty: (0, 0, Ok(vec![vec![]])), test_generate_n_eq_k: (3, 3, Ok(vec![ vec![0, 1, 2], ])), test_generate_k_greater_than_n: (3, 4, Err(CombinationError::KGreaterThanN)), test_zero_range_with_nonzero_k: (0, 1, Err(CombinationError::InvalidZeroRange)), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/backtracking/rat_in_maze.rs
src/backtracking/rat_in_maze.rs
//! This module contains the implementation of the Rat in Maze problem. //! //! The Rat in Maze problem is a classic algorithmic problem where the //! objective is to find a path from the starting position to the exit //! position in a maze. /// Enum representing various errors that can occur while working with mazes. #[derive(Debug, PartialEq, Eq)] pub enum MazeError { /// Indicates that the maze is empty (zero rows). EmptyMaze, /// Indicates that the starting position is out of bounds. OutOfBoundPos, /// Indicates an improper representation of the maze (e.g., non-rectangular maze). ImproperMazeRepr, } /// Finds a path through the maze starting from the specified position. /// /// # Arguments /// /// * `maze` - The maze represented as a vector of vectors where each /// inner vector represents a row in the maze grid. /// * `start_x` - The x-coordinate of the starting position. /// * `start_y` - The y-coordinate of the starting position. /// /// # Returns /// /// A `Result` where: /// - `Ok(Some(solution))` if a path is found and contains the solution matrix. /// - `Ok(None)` if no path is found. /// - `Err(MazeError)` for various error conditions such as out-of-bound start position or improper maze representation. /// /// # Solution Selection /// /// The function returns the first successful path it discovers based on the predefined order of moves. /// The order of moves is defined in the `MOVES` constant of the `Maze` struct. /// /// The backtracking algorithm explores each direction in this order. If multiple solutions exist, /// the algorithm returns the first path it finds according to this sequence. It recursively explores /// each direction, marks valid moves, and backtracks if necessary, ensuring that the solution is found /// efficiently and consistently. pub fn find_path_in_maze( maze: &[Vec<bool>], start_x: usize, start_y: usize, ) -> Result<Option<Vec<Vec<bool>>>, MazeError> { if maze.is_empty() { return Err(MazeError::EmptyMaze); } // Validate start position if start_x >= maze.len() || start_y >= maze[0].len() { return Err(MazeError::OutOfBoundPos); } // Validate maze representation (if necessary) if maze.iter().any(|row| row.len() != maze[0].len()) { return Err(MazeError::ImproperMazeRepr); } // If validations pass, proceed with finding the path let maze_instance = Maze::new(maze.to_owned()); Ok(maze_instance.find_path(start_x, start_y)) } /// Represents a maze. struct Maze { maze: Vec<Vec<bool>>, } impl Maze { /// Represents possible moves in the maze. const MOVES: [(isize, isize); 4] = [(0, 1), (1, 0), (0, -1), (-1, 0)]; /// Constructs a new Maze instance. /// # Arguments /// /// * `maze` - The maze represented as a vector of vectors where each /// inner vector represents a row in the maze grid. /// /// # Returns /// /// A new Maze instance. fn new(maze: Vec<Vec<bool>>) -> Self { Maze { maze } } /// Returns the width of the maze. /// /// # Returns /// /// The width of the maze. fn width(&self) -> usize { self.maze[0].len() } /// Returns the height of the maze. /// /// # Returns /// /// The height of the maze. fn height(&self) -> usize { self.maze.len() } /// Finds a path through the maze starting from the specified position. /// /// # Arguments /// /// * `start_x` - The x-coordinate of the starting position. /// * `start_y` - The y-coordinate of the starting position. /// /// # Returns /// /// A solution matrix if a path is found or None if not found. fn find_path(&self, start_x: usize, start_y: usize) -> Option<Vec<Vec<bool>>> { let mut solution = vec![vec![false; self.width()]; self.height()]; if self.solve(start_x as isize, start_y as isize, &mut solution) { Some(solution) } else { None } } /// Recursively solves the Rat in Maze problem using backtracking. /// /// # Arguments /// /// * `x` - The current x-coordinate. /// * `y` - The current y-coordinate. /// * `solution` - The current solution matrix. /// /// # Returns /// /// A boolean indicating whether a solution was found. fn solve(&self, x: isize, y: isize, solution: &mut [Vec<bool>]) -> bool { if x == (self.height() as isize - 1) && y == (self.width() as isize - 1) { solution[x as usize][y as usize] = true; return true; } if self.is_valid(x, y, solution) { solution[x as usize][y as usize] = true; for &(dx, dy) in &Self::MOVES { if self.solve(x + dx, y + dy, solution) { return true; } } // If none of the directions lead to the solution, backtrack solution[x as usize][y as usize] = false; return false; } false } /// Checks if a given position is valid in the maze. /// /// # Arguments /// /// * `x` - The x-coordinate of the position. /// * `y` - The y-coordinate of the position. /// * `solution` - The current solution matrix. /// /// # Returns /// /// A boolean indicating whether the position is valid. fn is_valid(&self, x: isize, y: isize, solution: &[Vec<bool>]) -> bool { x >= 0 && y >= 0 && x < self.height() as isize && y < self.width() as isize && self.maze[x as usize][y as usize] && !solution[x as usize][y as usize] } } #[cfg(test)] mod tests { use super::*; macro_rules! test_find_path_in_maze { ($($name:ident: $start_x:expr, $start_y:expr, $maze:expr, $expected:expr,)*) => { $( #[test] fn $name() { let solution = find_path_in_maze($maze, $start_x, $start_y); assert_eq!(solution, $expected); if let Ok(Some(expected_solution)) = &solution { assert_eq!(expected_solution[$start_x][$start_y], true); } } )* } } test_find_path_in_maze! { maze_with_solution_5x5: 0, 0, &[ vec![true, false, true, false, false], vec![true, true, false, true, false], vec![false, true, true, true, false], vec![false, false, false, true, true], vec![false, true, false, false, true], ], Ok(Some(vec![ vec![true, false, false, false, false], vec![true, true, false, false, false], vec![false, true, true, true, false], vec![false, false, false, true, true], vec![false, false, false, false, true], ])), maze_with_solution_6x6: 0, 0, &[ vec![true, false, true, false, true, false], vec![true, true, false, true, false, true], vec![false, true, true, true, true, false], vec![false, false, false, true, true, true], vec![false, true, false, false, true, false], vec![true, true, true, true, true, true], ], Ok(Some(vec![ vec![true, false, false, false, false, false], vec![true, true, false, false, false, false], vec![false, true, true, true, true, false], vec![false, false, false, false, true, false], vec![false, false, false, false, true, false], vec![false, false, false, false, true, true], ])), maze_with_solution_8x8: 0, 0, &[ vec![true, false, false, false, false, false, false, true], vec![true, true, false, true, true, true, false, false], vec![false, true, true, true, false, false, false, false], vec![false, false, false, true, false, true, true, false], vec![false, true, false, true, true, true, false, true], vec![true, false, true, false, false, true, true, true], vec![false, false, true, true, true, false, true, true], vec![true, true, true, false, true, true, true, true], ], Ok(Some(vec![ vec![true, false, false, false, false, false, false, false], vec![true, true, false, false, false, false, false, false], vec![false, true, true, true, false, false, false, false], vec![false, false, false, true, false, false, false, false], vec![false, false, false, true, true, true, false, false], vec![false, false, false, false, false, true, true, true], vec![false, false, false, false, false, false, false, true], vec![false, false, false, false, false, false, false, true], ])), maze_without_solution_4x4: 0, 0, &[ vec![true, false, false, false], vec![true, true, false, false], vec![false, false, true, false], vec![false, false, false, true], ], Ok(None::<Vec<Vec<bool>>>), maze_with_solution_3x4: 0, 0, &[ vec![true, false, true, true], vec![true, true, true, false], vec![false, true, true, true], ], Ok(Some(vec![ vec![true, false, false, false], vec![true, true, true, false], vec![false, false, true, true], ])), maze_without_solution_3x4: 0, 0, &[ vec![true, false, true, true], vec![true, false, true, false], vec![false, true, false, true], ], Ok(None::<Vec<Vec<bool>>>), improper_maze_representation: 0, 0, &[ vec![true], vec![true, true], vec![true, true, true], vec![true, true, true, true] ], Err(MazeError::ImproperMazeRepr), out_of_bound_start: 0, 3, &[ vec![true, false, true], vec![true, true], vec![false, true, true], ], Err(MazeError::OutOfBoundPos), empty_maze: 0, 0, &[], Err(MazeError::EmptyMaze), maze_with_single_cell: 0, 0, &[ vec![true], ], Ok(Some(vec![ vec![true] ])), maze_with_one_row_and_multiple_columns: 0, 0, &[ vec![true, false, true, true, false] ], Ok(None::<Vec<Vec<bool>>>), maze_with_multiple_rows_and_one_column: 0, 0, &[ vec![true], vec![true], vec![false], vec![true], ], Ok(None::<Vec<Vec<bool>>>), maze_with_walls_surrounding_border: 0, 0, &[ vec![false, false, false], vec![false, true, false], vec![false, false, false], ], Ok(None::<Vec<Vec<bool>>>), maze_with_no_walls: 0, 0, &[ vec![true, true, true], vec![true, true, true], vec![true, true, true], ], Ok(Some(vec![ vec![true, true, true], vec![false, false, true], vec![false, false, true], ])), maze_with_going_back: 0, 0, &[ vec![true, true, true, true, true, true], vec![false, false, false, true, false, true], vec![true, true, true, true, false, false], vec![true, false, false, false, false, false], vec![true, false, false, false, true, true], vec![true, false, true, true, true, false], vec![true, false, true , false, true, false], vec![true, true, true, false, true, true], ], Ok(Some(vec![ vec![true, true, true, true, false, false], vec![false, false, false, true, false, false], vec![true, true, true, true, false, false], vec![true, false, false, false, false, false], vec![true, false, false, false, false, false], vec![true, false, true, true, true, false], vec![true, false, true , false, true, false], vec![true, true, true, false, true, true], ])), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/backtracking/knight_tour.rs
src/backtracking/knight_tour.rs
//! This module contains the implementation of the Knight's Tour problem. //! //! The Knight's Tour is a classic chess problem where the objective is to move a knight to every square on a chessboard exactly once. /// Finds the Knight's Tour starting from the specified position. /// /// # Arguments /// /// * `size_x` - The width of the chessboard. /// * `size_y` - The height of the chessboard. /// * `start_x` - The x-coordinate of the starting position. /// * `start_y` - The y-coordinate of the starting position. /// /// # Returns /// /// A tour matrix if the tour was found or None if not found. /// The tour matrix returned is essentially the board field of the `KnightTour` /// struct `Vec<Vec<usize>>`. It represents the sequence of moves made by the /// knight on the chessboard, with each cell containing the order in which the knight visited that square. pub fn find_knight_tour( size_x: usize, size_y: usize, start_x: usize, start_y: usize, ) -> Option<Vec<Vec<usize>>> { let mut tour = KnightTour::new(size_x, size_y); tour.find_tour(start_x, start_y) } /// Represents the KnightTour struct which implements the Knight's Tour problem. struct KnightTour { board: Vec<Vec<usize>>, } impl KnightTour { /// Possible moves of the knight on the board const MOVES: [(isize, isize); 8] = [ (2, 1), (1, 2), (-1, 2), (-2, 1), (-2, -1), (-1, -2), (1, -2), (2, -1), ]; /// Constructs a new KnightTour instance with the given board size. /// # Arguments /// /// * `size_x` - The width of the chessboard. /// * `size_y` - The height of the chessboard. /// /// # Returns /// /// A new KnightTour instance. fn new(size_x: usize, size_y: usize) -> Self { let board = vec![vec![0; size_x]; size_y]; KnightTour { board } } /// Returns the width of the chessboard. fn size_x(&self) -> usize { self.board.len() } /// Returns the height of the chessboard. fn size_y(&self) -> usize { self.board[0].len() } /// Checks if the given position is safe to move to. /// /// # Arguments /// /// * `x` - The x-coordinate of the position. /// * `y` - The y-coordinate of the position. /// /// # Returns /// /// A boolean indicating whether the position is safe to move to. fn is_safe(&self, x: isize, y: isize) -> bool { x >= 0 && y >= 0 && x < self.size_x() as isize && y < self.size_y() as isize && self.board[x as usize][y as usize] == 0 } /// Recursively solves the Knight's Tour problem. /// /// # Arguments /// /// * `x` - The current x-coordinate of the knight. /// * `y` - The current y-coordinate of the knight. /// * `move_count` - The current move count. /// /// # Returns /// /// A boolean indicating whether a solution was found. fn solve_tour(&mut self, x: isize, y: isize, move_count: usize) -> bool { if move_count == self.size_x() * self.size_y() { return true; } for &(dx, dy) in &Self::MOVES { let next_x = x + dx; let next_y = y + dy; if self.is_safe(next_x, next_y) { self.board[next_x as usize][next_y as usize] = move_count + 1; if self.solve_tour(next_x, next_y, move_count + 1) { return true; } // Backtrack self.board[next_x as usize][next_y as usize] = 0; } } false } /// Finds the Knight's Tour starting from the specified position. /// /// # Arguments /// /// * `start_x` - The x-coordinate of the starting position. /// * `start_y` - The y-coordinate of the starting position. /// /// # Returns /// /// A tour matrix if the tour was found or None if not found. fn find_tour(&mut self, start_x: usize, start_y: usize) -> Option<Vec<Vec<usize>>> { if !self.is_safe(start_x as isize, start_y as isize) { return None; } self.board[start_x][start_y] = 1; if !self.solve_tour(start_x as isize, start_y as isize, 1) { return None; } Some(self.board.clone()) } } #[cfg(test)] mod tests { use super::*; macro_rules! test_find_knight_tour { ($($name:ident: $tc:expr,)*) => { $( #[test] fn $name() { let (size_x, size_y, start_x, start_y, expected) = $tc; if expected.is_some() { assert_eq!(expected.clone().unwrap()[start_x][start_y], 1) } assert_eq!(find_knight_tour(size_x, size_y, start_x, start_y), expected); } )* } } test_find_knight_tour! { test_knight_tour_5x5: (5, 5, 0, 0, Some(vec![ vec![1, 6, 15, 10, 21], vec![14, 9, 20, 5, 16], vec![19, 2, 7, 22, 11], vec![8, 13, 24, 17, 4], vec![25, 18, 3, 12, 23], ])), test_knight_tour_6x6: (6, 6, 0, 0, Some(vec![ vec![1, 16, 7, 26, 11, 14], vec![34, 25, 12, 15, 6, 27], vec![17, 2, 33, 8, 13, 10], vec![32, 35, 24, 21, 28, 5], vec![23, 18, 3, 30, 9, 20], vec![36, 31, 22, 19, 4, 29], ])), test_knight_tour_8x8: (8, 8, 0, 0, Some(vec![ vec![1, 60, 39, 34, 31, 18, 9, 64], vec![38, 35, 32, 61, 10, 63, 30, 17], vec![59, 2, 37, 40, 33, 28, 19, 8], vec![36, 49, 42, 27, 62, 11, 16, 29], vec![43, 58, 3, 50, 41, 24, 7, 20], vec![48, 51, 46, 55, 26, 21, 12, 15], vec![57, 44, 53, 4, 23, 14, 25, 6], vec![52, 47, 56, 45, 54, 5, 22, 13], ])), test_no_solution: (5, 5, 2, 1, None::<Vec<Vec<usize>>>), test_invalid_start_position: (8, 8, 10, 10, None::<Vec<Vec<usize>>>), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/backtracking/hamiltonian_cycle.rs
src/backtracking/hamiltonian_cycle.rs
//! This module provides functionality to find a Hamiltonian cycle in a directed or undirected graph. //! Source: [Wikipedia](https://en.wikipedia.org/wiki/Hamiltonian_path_problem) /// Represents potential errors when finding hamiltonian cycle on an adjacency matrix. #[derive(Debug, PartialEq, Eq)] pub enum FindHamiltonianCycleError { /// Indicates that the adjacency matrix is empty. EmptyAdjacencyMatrix, /// Indicates that the adjacency matrix is not square. ImproperAdjacencyMatrix, /// Indicates that the starting vertex is out of bounds. StartOutOfBound, } /// Represents a graph using an adjacency matrix. struct Graph { /// The adjacency matrix representing the graph. adjacency_matrix: Vec<Vec<bool>>, } impl Graph { /// Creates a new graph with the provided adjacency matrix. /// /// # Arguments /// /// * `adjacency_matrix` - A square matrix where each element indicates /// the presence (`true`) or absence (`false`) of an edge /// between two vertices. /// /// # Returns /// /// A `Result` containing the graph if successful, or an `FindHamiltonianCycleError` if there is an issue with the matrix. fn new(adjacency_matrix: Vec<Vec<bool>>) -> Result<Self, FindHamiltonianCycleError> { // Check if the adjacency matrix is empty. if adjacency_matrix.is_empty() { return Err(FindHamiltonianCycleError::EmptyAdjacencyMatrix); } // Validate that the adjacency matrix is square. if adjacency_matrix .iter() .any(|row| row.len() != adjacency_matrix.len()) { return Err(FindHamiltonianCycleError::ImproperAdjacencyMatrix); } Ok(Self { adjacency_matrix }) } /// Returns the number of vertices in the graph. fn num_vertices(&self) -> usize { self.adjacency_matrix.len() } /// Determines if it is safe to include vertex `v` in the Hamiltonian cycle path. /// /// # Arguments /// /// * `v` - The index of the vertex being considered. /// * `visited` - A reference to the vector representing the visited vertices. /// * `path` - A reference to the current path being explored. /// * `pos` - The position of the current vertex being considered. /// /// # Returns /// /// `true` if it is safe to include `v` in the path, `false` otherwise. fn is_safe(&self, v: usize, visited: &[bool], path: &[Option<usize>], pos: usize) -> bool { // Check if the current vertex and the last vertex in the path are adjacent. if !self.adjacency_matrix[path[pos - 1].unwrap()][v] { return false; } // Check if the vertex has already been included in the path. !visited[v] } /// Recursively searches for a Hamiltonian cycle. /// /// This function is called by `find_hamiltonian_cycle`. /// /// # Arguments /// /// * `path` - A mutable vector representing the current path being explored. /// * `visited` - A mutable vector representing the visited vertices. /// * `pos` - The position of the current vertex being considered. /// /// # Returns /// /// `true` if a Hamiltonian cycle is found, `false` otherwise. fn hamiltonian_cycle_util( &self, path: &mut [Option<usize>], visited: &mut [bool], pos: usize, ) -> bool { if pos == self.num_vertices() { // Check if there is an edge from the last included vertex to the first vertex. return self.adjacency_matrix[path[pos - 1].unwrap()][path[0].unwrap()]; } for v in 0..self.num_vertices() { if self.is_safe(v, visited, path, pos) { path[pos] = Some(v); visited[v] = true; if self.hamiltonian_cycle_util(path, visited, pos + 1) { return true; } path[pos] = None; visited[v] = false; } } false } /// Attempts to find a Hamiltonian cycle in the graph, starting from the specified vertex. /// /// A Hamiltonian cycle visits every vertex exactly once and returns to the starting vertex. /// /// # Note /// This implementation may not find all possible Hamiltonian cycles. /// It stops as soon as it finds one valid cycle. If multiple Hamiltonian cycles exist, /// only one will be returned. /// /// # Returns /// /// `Ok(Some(path))` if a Hamiltonian cycle is found, where `path` is a vector /// containing the indices of vertices in the cycle, starting and ending with the same vertex. /// /// `Ok(None)` if no Hamiltonian cycle exists. fn find_hamiltonian_cycle( &self, start_vertex: usize, ) -> Result<Option<Vec<usize>>, FindHamiltonianCycleError> { // Validate the start vertex. if start_vertex >= self.num_vertices() { return Err(FindHamiltonianCycleError::StartOutOfBound); } // Initialize the path. let mut path = vec![None; self.num_vertices()]; // Start at the specified vertex. path[0] = Some(start_vertex); // Initialize the visited vector. let mut visited = vec![false; self.num_vertices()]; visited[start_vertex] = true; if self.hamiltonian_cycle_util(&mut path, &mut visited, 1) { // Complete the cycle by returning to the starting vertex. path.push(Some(start_vertex)); Ok(Some(path.into_iter().map(Option::unwrap).collect())) } else { Ok(None) } } } /// Attempts to find a Hamiltonian cycle in a graph represented by an adjacency matrix, starting from a specified vertex. pub fn find_hamiltonian_cycle( adjacency_matrix: Vec<Vec<bool>>, start_vertex: usize, ) -> Result<Option<Vec<usize>>, FindHamiltonianCycleError> { Graph::new(adjacency_matrix)?.find_hamiltonian_cycle(start_vertex) } #[cfg(test)] mod tests { use super::*; macro_rules! hamiltonian_cycle_tests { ($($name:ident: $test_case:expr,)*) => { $( #[test] fn $name() { let (adjacency_matrix, start_vertex, expected) = $test_case; let result = find_hamiltonian_cycle(adjacency_matrix, start_vertex); assert_eq!(result, expected); } )* }; } hamiltonian_cycle_tests! { test_complete_graph: ( vec![ vec![false, true, true, true], vec![true, false, true, true], vec![true, true, false, true], vec![true, true, true, false], ], 0, Ok(Some(vec![0, 1, 2, 3, 0])) ), test_directed_graph_with_cycle: ( vec![ vec![false, true, false, false, false], vec![false, false, true, true, false], vec![true, false, false, true, true], vec![false, false, true, false, true], vec![true, true, false, false, false], ], 2, Ok(Some(vec![2, 3, 4, 0, 1, 2])) ), test_undirected_graph_with_cycle: ( vec![ vec![false, true, false, false, true], vec![true, false, true, false, false], vec![false, true, false, true, false], vec![false, false, true, false, true], vec![true, false, false, true, false], ], 2, Ok(Some(vec![2, 1, 0, 4, 3, 2])) ), test_directed_graph_no_cycle: ( vec![ vec![false, true, false, true, false], vec![false, false, true, true, false], vec![false, false, false, true, false], vec![false, false, false, false, true], vec![false, false, true, false, false], ], 0, Ok(None::<Vec<usize>>) ), test_undirected_graph_no_cycle: ( vec![ vec![false, true, false, false, false], vec![true, false, true, true, false], vec![false, true, false, true, true], vec![false, true, true, false, true], vec![false, false, true, true, false], ], 0, Ok(None::<Vec<usize>>) ), test_triangle_graph: ( vec![ vec![false, true, false], vec![false, false, true], vec![true, false, false], ], 1, Ok(Some(vec![1, 2, 0, 1])) ), test_tree_graph: ( vec![ vec![false, true, false, true, false], vec![true, false, true, true, false], vec![false, true, false, false, false], vec![true, true, false, false, true], vec![false, false, false, true, false], ], 0, Ok(None::<Vec<usize>>) ), test_empty_graph: ( vec![], 0, Err(FindHamiltonianCycleError::EmptyAdjacencyMatrix) ), test_improper_graph: ( vec![ vec![false, true], vec![true], vec![false, true, true], vec![true, true, true, false] ], 0, Err(FindHamiltonianCycleError::ImproperAdjacencyMatrix) ), test_start_out_of_bound: ( vec![ vec![false, true, true], vec![true, false, true], vec![true, true, false], ], 3, Err(FindHamiltonianCycleError::StartOutOfBound) ), test_complex_directed_graph: ( vec![ vec![false, true, false, true, false, false], vec![false, false, true, false, true, false], vec![false, false, false, true, false, false], vec![false, true, false, false, true, false], vec![false, false, true, false, false, true], vec![true, false, false, false, false, false], ], 0, Ok(Some(vec![0, 1, 2, 3, 4, 5, 0])) ), single_node_self_loop: ( vec![ vec![true], ], 0, Ok(Some(vec![0, 0])) ), single_node: ( vec![ vec![false], ], 0, Ok(None) ), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/backtracking/graph_coloring.rs
src/backtracking/graph_coloring.rs
//! This module provides functionality for generating all possible colorings of a undirected (or directed) graph //! given a certain number of colors. It includes the GraphColoring struct and methods //! for validating color assignments and finding all valid colorings. /// Represents potential errors when coloring on an adjacency matrix. #[derive(Debug, PartialEq, Eq)] pub enum GraphColoringError { // Indicates that the adjacency matrix is empty EmptyAdjacencyMatrix, // Indicates that the adjacency matrix is not squared ImproperAdjacencyMatrix, } /// Generates all possible valid colorings of a graph. /// /// # Arguments /// /// * `adjacency_matrix` - A 2D vector representing the adjacency matrix of the graph. /// * `num_colors` - The number of colors available for coloring the graph. /// /// # Returns /// /// * A `Result` containing an `Option` with a vector of solutions or a `GraphColoringError` if /// there is an issue with the matrix. pub fn generate_colorings( adjacency_matrix: Vec<Vec<bool>>, num_colors: usize, ) -> Result<Option<Vec<Vec<usize>>>, GraphColoringError> { Ok(GraphColoring::new(adjacency_matrix)?.find_solutions(num_colors)) } /// A struct representing a graph coloring problem. struct GraphColoring { // The adjacency matrix of the graph adjacency_matrix: Vec<Vec<bool>>, // The current colors assigned to each vertex vertex_colors: Vec<usize>, // Vector of all valid color assignments for the vertices found during the search solutions: Vec<Vec<usize>>, } impl GraphColoring { /// Creates a new GraphColoring instance. /// /// # Arguments /// /// * `adjacency_matrix` - A 2D vector representing the adjacency matrix of the graph. /// /// # Returns /// /// * A new instance of GraphColoring or a `GraphColoringError` if the matrix is empty or non-square. fn new(adjacency_matrix: Vec<Vec<bool>>) -> Result<Self, GraphColoringError> { let num_vertices = adjacency_matrix.len(); // Check if the adjacency matrix is empty if num_vertices == 0 { return Err(GraphColoringError::EmptyAdjacencyMatrix); } // Check if the adjacency matrix is square if adjacency_matrix.iter().any(|row| row.len() != num_vertices) { return Err(GraphColoringError::ImproperAdjacencyMatrix); } Ok(GraphColoring { adjacency_matrix, vertex_colors: vec![usize::MAX; num_vertices], solutions: Vec::new(), }) } /// Returns the number of vertices in the graph. fn num_vertices(&self) -> usize { self.adjacency_matrix.len() } /// Checks if a given color can be assigned to a vertex without causing a conflict. /// /// # Arguments /// /// * `vertex` - The index of the vertex to be colored. /// * `color` - The color to be assigned to the vertex. /// /// # Returns /// /// * `true` if the color can be assigned to the vertex, `false` otherwise. fn is_color_valid(&self, vertex: usize, color: usize) -> bool { for neighbor in 0..self.num_vertices() { // Check outgoing edges from vertex and incoming edges to vertex if (self.adjacency_matrix[vertex][neighbor] || self.adjacency_matrix[neighbor][vertex]) && self.vertex_colors[neighbor] == color { return false; } } true } /// Recursively finds all valid colorings for the graph. /// /// # Arguments /// /// * `vertex` - The current vertex to be colored. /// * `num_colors` - The number of colors available for coloring the graph. fn find_colorings(&mut self, vertex: usize, num_colors: usize) { if vertex == self.num_vertices() { self.solutions.push(self.vertex_colors.clone()); return; } for color in 0..num_colors { if self.is_color_valid(vertex, color) { self.vertex_colors[vertex] = color; self.find_colorings(vertex + 1, num_colors); self.vertex_colors[vertex] = usize::MAX; } } } /// Finds all solutions for the graph coloring problem. /// /// # Arguments /// /// * `num_colors` - The number of colors available for coloring the graph. /// /// # Returns /// /// * A `Result` containing an `Option` with a vector of solutions or a `GraphColoringError`. fn find_solutions(&mut self, num_colors: usize) -> Option<Vec<Vec<usize>>> { self.find_colorings(0, num_colors); if self.solutions.is_empty() { None } else { Some(std::mem::take(&mut self.solutions)) } } } #[cfg(test)] mod tests { use super::*; macro_rules! test_graph_coloring { ($($name:ident: $test_case:expr,)*) => { $( #[test] fn $name() { let (adjacency_matrix, num_colors, expected) = $test_case; let actual = generate_colorings(adjacency_matrix, num_colors); assert_eq!(actual, expected); } )* }; } test_graph_coloring! { test_complete_graph_with_3_colors: ( vec![ vec![false, true, true, true], vec![true, false, true, false], vec![true, true, false, true], vec![true, false, true, false], ], 3, Ok(Some(vec![ vec![0, 1, 2, 1], vec![0, 2, 1, 2], vec![1, 0, 2, 0], vec![1, 2, 0, 2], vec![2, 0, 1, 0], vec![2, 1, 0, 1], ])) ), test_linear_graph_with_2_colors: ( vec![ vec![false, true, false, false], vec![true, false, true, false], vec![false, true, false, true], vec![false, false, true, false], ], 2, Ok(Some(vec![ vec![0, 1, 0, 1], vec![1, 0, 1, 0], ])) ), test_incomplete_graph_with_insufficient_colors: ( vec![ vec![false, true, true], vec![true, false, true], vec![true, true, false], ], 1, Ok(None::<Vec<Vec<usize>>>) ), test_empty_graph: ( vec![], 1, Err(GraphColoringError::EmptyAdjacencyMatrix) ), test_non_square_matrix: ( vec![ vec![false, true, true], vec![true, false, true], ], 3, Err(GraphColoringError::ImproperAdjacencyMatrix) ), test_single_vertex_graph: ( vec![ vec![false], ], 1, Ok(Some(vec![ vec![0], ])) ), test_bipartite_graph_with_2_colors: ( vec![ vec![false, true, false, true], vec![true, false, true, false], vec![false, true, false, true], vec![true, false, true, false], ], 2, Ok(Some(vec![ vec![0, 1, 0, 1], vec![1, 0, 1, 0], ])) ), test_large_graph_with_3_colors: ( vec![ vec![false, true, true, false, true, true, false, true, true, false], vec![true, false, true, true, false, true, true, false, true, true], vec![true, true, false, true, true, false, true, true, false, true], vec![false, true, true, false, true, true, false, true, true, false], vec![true, false, true, true, false, true, true, false, true, true], vec![true, true, false, true, true, false, true, true, false, true], vec![false, true, true, false, true, true, false, true, true, false], vec![true, false, true, true, false, true, true, false, true, true], vec![true, true, false, true, true, false, true, true, false, true], vec![false, true, true, false, true, true, false, true, true, false], ], 3, Ok(Some(vec![ vec![0, 1, 2, 0, 1, 2, 0, 1, 2, 0], vec![0, 2, 1, 0, 2, 1, 0, 2, 1, 0], vec![1, 0, 2, 1, 0, 2, 1, 0, 2, 1], vec![1, 2, 0, 1, 2, 0, 1, 2, 0, 1], vec![2, 0, 1, 2, 0, 1, 2, 0, 1, 2], vec![2, 1, 0, 2, 1, 0, 2, 1, 0, 2], ])) ), test_disconnected_graph: ( vec![ vec![false, false, false], vec![false, false, false], vec![false, false, false], ], 2, Ok(Some(vec![ vec![0, 0, 0], vec![0, 0, 1], vec![0, 1, 0], vec![0, 1, 1], vec![1, 0, 0], vec![1, 0, 1], vec![1, 1, 0], vec![1, 1, 1], ])) ), test_no_valid_coloring: ( vec![ vec![false, true, true], vec![true, false, true], vec![true, true, false], ], 2, Ok(None::<Vec<Vec<usize>>>) ), test_more_colors_than_nodes: ( vec![ vec![true, true], vec![true, true], ], 3, Ok(Some(vec![ vec![0, 1], vec![0, 2], vec![1, 0], vec![1, 2], vec![2, 0], vec![2, 1], ])) ), test_no_coloring_with_zero_colors: ( vec![ vec![true], ], 0, Ok(None::<Vec<Vec<usize>>>) ), test_complete_graph_with_3_vertices_and_3_colors: ( vec![ vec![false, true, true], vec![true, false, true], vec![true, true, false], ], 3, Ok(Some(vec![ vec![0, 1, 2], vec![0, 2, 1], vec![1, 0, 2], vec![1, 2, 0], vec![2, 0, 1], vec![2, 1, 0], ])) ), test_directed_graph_with_3_colors: ( vec![ vec![false, true, false, true], vec![false, false, true, false], vec![true, false, false, true], vec![true, false, false, false], ], 3, Ok(Some(vec![ vec![0, 1, 2, 1], vec![0, 2, 1, 2], vec![1, 0, 2, 0], vec![1, 2, 0, 2], vec![2, 0, 1, 0], vec![2, 1, 0, 1], ])) ), test_directed_graph_no_valid_coloring: ( vec![ vec![false, true, false, true], vec![false, false, true, true], vec![true, false, false, true], vec![true, false, false, false], ], 3, Ok(None::<Vec<Vec<usize>>>) ), test_large_directed_graph_with_3_colors: ( vec![ vec![false, true, false, false, true, false, false, true, false, false], vec![false, false, true, false, false, true, false, false, true, false], vec![false, false, false, true, false, false, true, false, false, true], vec![true, false, false, false, true, false, false, true, false, false], vec![false, true, false, false, false, true, false, false, true, false], vec![false, false, true, false, false, false, true, false, false, true], vec![true, false, false, false, true, false, false, true, false, false], vec![false, true, false, false, false, true, false, false, true, false], vec![false, false, true, false, false, false, true, false, false, true], vec![true, false, false, false, true, false, false, true, false, false], ], 3, Ok(Some(vec![ vec![0, 1, 2, 1, 2, 0, 1, 2, 0, 1], vec![0, 2, 1, 2, 1, 0, 2, 1, 0, 2], vec![1, 0, 2, 0, 2, 1, 0, 2, 1, 0], vec![1, 2, 0, 2, 0, 1, 2, 0, 1, 2], vec![2, 0, 1, 0, 1, 2, 0, 1, 2, 0], vec![2, 1, 0, 1, 0, 2, 1, 0, 2, 1] ])) ), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/lowest_common_ancestor.rs
src/graph/lowest_common_ancestor.rs
/* Note: We will assume that here tree vertices are numbered from 1 to n. If a tree is not enumerated that way or its vertices are not represented using numbers, it can trivially be converted using Depth First Search manually or by using `src/graph/graph_enumeration.rs` Here we implement two different algorithms: - The online one is implemented using Sparse Table and has O(n.lg(n)) time complexity and memory usage. It answers each query in O(lg(n)). - The offline algorithm was discovered by Robert Tarjan. At first each query should be determined and saved. Then, vertices are visited in Depth First Search order and queries are answered using Disjoint Set Union algorithm. The time complexity is O(n.alpha(n) + q) and memory usage is O(n + q), but time complexity can be considered to be O(n + q), because alpha(n) < 5 for n < 10 ^ 600 */ use super::DisjointSetUnion; pub struct LowestCommonAncestorOnline { // Make members public to allow the user to fill them themself. pub parents_sparse_table: Vec<Vec<usize>>, pub height: Vec<usize>, } impl LowestCommonAncestorOnline { // Should be called once as: // fill_sparse_table(tree_root, 0, 0, adjacency_list) #[inline] fn get_parent(&self, v: usize, i: usize) -> usize { self.parents_sparse_table[v][i] } #[inline] fn num_parents(&self, v: usize) -> usize { self.parents_sparse_table[v].len() } pub fn new(num_vertices: usize) -> Self { let mut pars = vec![vec![0]; num_vertices + 1]; pars[0].clear(); LowestCommonAncestorOnline { parents_sparse_table: pars, height: vec![0; num_vertices + 1], } } pub fn fill_sparse_table( &mut self, vertex: usize, parent: usize, height: usize, adj: &[Vec<usize>], ) { self.parents_sparse_table[vertex][0] = parent; self.height[vertex] = height; let mut level = 1; let mut current_parent = parent; while self.num_parents(current_parent) >= level { current_parent = self.get_parent(current_parent, level - 1); level += 1; self.parents_sparse_table[vertex].push(current_parent); } for &child in adj[vertex].iter() { if child == parent { // It isn't a child! continue; } self.fill_sparse_table(child, vertex, height + 1, adj); } } pub fn get_ancestor(&self, mut v: usize, mut u: usize) -> usize { if self.height[v] < self.height[u] { std::mem::swap(&mut v, &mut u); } // Bring v up to so that it has the same height as u let height_diff = self.height[v] - self.height[u]; for i in 0..63 { let bit = 1 << i; if bit > height_diff { break; } if height_diff & bit != 0 { v = self.get_parent(v, i); } } if u == v { return u; } // `self.num_parents` of u and v should be equal for i in (0..self.num_parents(v)).rev() { let nv = self.get_parent(v, i); let nu = self.get_parent(u, i); if nv != nu { u = nu; v = nv; } } self.get_parent(v, 0) } } #[derive(Clone, Copy)] pub struct LCAQuery { other: usize, query_id: usize, } #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub struct QueryAnswer { query_id: usize, answer: usize, } pub struct LowestCommonAncestorOffline { pub queries: Vec<Vec<LCAQuery>>, dsu: DisjointSetUnion, /* The LSB of dsu_parent[v] determines whether it was visited or not. The rest of the number determines the vertex that represents a particular set in DSU. */ dsu_parent: Vec<u64>, } impl LowestCommonAncestorOffline { pub fn new(num_vertices: usize) -> Self { LowestCommonAncestorOffline { queries: vec![vec![]; num_vertices + 1], dsu: DisjointSetUnion::new(num_vertices), dsu_parent: vec![0; num_vertices + 1], } } pub fn add_query(&mut self, u: usize, v: usize, query_id: usize) { // We should add this query to both vertices, and it will be answered // the second time it is seen in DFS. self.queries[u].push(LCAQuery { other: v, query_id }); if u == v { return; } self.queries[v].push(LCAQuery { other: u, query_id }); } fn calculate_answers( &mut self, vertex: usize, parent: usize, adj: &[Vec<usize>], answers: &mut Vec<QueryAnswer>, ) { self.dsu_parent[vertex] = (vertex as u64) << 1; for &child in adj[vertex].iter() { if child == parent { continue; } self.calculate_answers(child, vertex, adj, answers); self.dsu.merge(child, vertex); let set = self.dsu.find_set(vertex); self.dsu_parent[set] = ((vertex as u64) << 1) | (self.dsu_parent[set] & 1); } self.dsu_parent[vertex] |= 0b1; for &query in self.queries[vertex].iter() { if self.dsu_parent[query.other] & 1 != 0 { // It has been visited answers.push(QueryAnswer { query_id: query.query_id, answer: (self.dsu_parent[self.dsu.find_set(query.other)] >> 1) as usize, }); } } } pub fn answer_queries(&mut self, root: usize, adj: &[Vec<usize>]) -> Vec<QueryAnswer> { let mut answers = Vec::new(); self.calculate_answers(root, 0, adj, &mut answers); answers } } #[cfg(test)] mod tests { use super::*; #[test] fn small_binary_tree() { let num_verts = 127; let mut tree: Vec<Vec<usize>> = vec![vec![]; num_verts + 1]; for i in 1..=num_verts >> 1 { let left_child = i << 1; let right_child = left_child + 1; tree[i].push(left_child); tree[i].push(right_child); tree[left_child].push(i); tree[right_child].push(i); } let mut online_answers: Vec<QueryAnswer> = Vec::new(); let mut online = LowestCommonAncestorOnline::new(num_verts); let mut offline = LowestCommonAncestorOffline::new(num_verts); let mut query_id = 314; // A random number, doesn't matter online.fill_sparse_table(1, 0, 0, &tree); for i in 1..=num_verts { for j in 1..i { // Query every possible pair online_answers.push(QueryAnswer { query_id, answer: online.get_ancestor(i, j), }); offline.add_query(i, j, query_id); query_id += 1; } } let mut offline_answers = offline.answer_queries(1, &tree); offline_answers.sort_unstable_by(|a1, a2| a1.query_id.cmp(&a2.query_id)); assert_eq!(offline_answers, online_answers); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/bellman_ford.rs
src/graph/bellman_ford.rs
use std::collections::BTreeMap; use std::ops::Add; use std::ops::Neg; type Graph<V, E> = BTreeMap<V, BTreeMap<V, E>>; // performs the Bellman-Ford algorithm on the given graph from the given start // the graph is an undirected graph // // if there is a negative weighted loop it returns None // else it returns a map that for each reachable vertex associates the distance and the predecessor // since the start has no predecessor but is reachable, map[start] will be None pub fn bellman_ford< V: Ord + Copy, E: Ord + Copy + Add<Output = E> + Neg<Output = E> + std::ops::Sub<Output = E>, >( graph: &Graph<V, E>, start: &V, ) -> Option<BTreeMap<V, Option<(V, E)>>> { let mut ans: BTreeMap<V, Option<(V, E)>> = BTreeMap::new(); ans.insert(*start, None); for _ in 1..(graph.len()) { for (u, edges) in graph { let dist_u = match ans.get(u) { Some(Some((_, d))) => Some(*d), Some(None) => None, None => continue, }; for (v, d) in edges { match ans.get(v) { Some(Some((_, dist))) // if this is a longer path, do nothing if match dist_u { Some(dist_u) => dist_u + *d >= *dist, None => d >= dist, } => {} Some(None) => { match dist_u { // if dist_u + d < 0 there is a negative loop going by start // else it's just a longer path Some(dist_u) if dist_u >= -*d => {} // negative self edge or negative loop _ => { if *d > *d + *d { return None; } } }; } // it's a shorter path: either dist_v was infinite or it was longer than dist_u + d _ => { ans.insert( *v, Some(( *u, match dist_u { Some(dist) => dist + *d, None => *d, }, )), ); } } } } } for (u, edges) in graph { for (v, d) in edges { match (ans.get(u), ans.get(v)) { (Some(None), Some(None)) if *d > *d + *d => return None, (Some(None), Some(Some((_, dv)))) if d < dv => return None, (Some(Some((_, du))), Some(None)) if *du < -*d => return None, (Some(Some((_, du))), Some(Some((_, dv)))) if *du + *d < *dv => return None, (_, _) => {} } } } Some(ans) } #[cfg(test)] mod tests { use super::{bellman_ford, Graph}; use std::collections::BTreeMap; fn add_edge<V: Ord + Copy, E: Ord>(graph: &mut Graph<V, E>, v1: V, v2: V, c: E) { graph.entry(v1).or_default().insert(v2, c); graph.entry(v2).or_default(); } #[test] fn single_vertex() { let mut graph: Graph<isize, isize> = BTreeMap::new(); graph.insert(0, BTreeMap::new()); let mut dists = BTreeMap::new(); dists.insert(0, None); assert_eq!(bellman_ford(&graph, &0), Some(dists)); } #[test] fn single_edge() { let mut graph = BTreeMap::new(); add_edge(&mut graph, 0, 1, 2); let mut dists_0 = BTreeMap::new(); dists_0.insert(0, None); dists_0.insert(1, Some((0, 2))); assert_eq!(bellman_ford(&graph, &0), Some(dists_0)); let mut dists_1 = BTreeMap::new(); dists_1.insert(1, None); assert_eq!(bellman_ford(&graph, &1), Some(dists_1)); } #[test] fn tree_1() { let mut graph = BTreeMap::new(); let mut dists = BTreeMap::new(); dists.insert(1, None); for i in 1..100 { add_edge(&mut graph, i, i * 2, i * 2); add_edge(&mut graph, i, i * 2 + 1, i * 2 + 1); match dists[&i] { Some((_, d)) => { dists.insert(i * 2, Some((i, d + i * 2))); dists.insert(i * 2 + 1, Some((i, d + i * 2 + 1))); } None => { dists.insert(i * 2, Some((i, i * 2))); dists.insert(i * 2 + 1, Some((i, i * 2 + 1))); } } } assert_eq!(bellman_ford(&graph, &1), Some(dists)); } #[test] fn graph_1() { let mut graph = BTreeMap::new(); add_edge(&mut graph, 'a', 'c', 12); add_edge(&mut graph, 'a', 'd', 60); add_edge(&mut graph, 'b', 'a', 10); add_edge(&mut graph, 'c', 'b', 20); add_edge(&mut graph, 'c', 'd', 32); add_edge(&mut graph, 'e', 'a', 7); let mut dists_a = BTreeMap::new(); dists_a.insert('a', None); dists_a.insert('c', Some(('a', 12))); dists_a.insert('d', Some(('c', 44))); dists_a.insert('b', Some(('c', 32))); assert_eq!(bellman_ford(&graph, &'a'), Some(dists_a)); let mut dists_b = BTreeMap::new(); dists_b.insert('b', None); dists_b.insert('a', Some(('b', 10))); dists_b.insert('c', Some(('a', 22))); dists_b.insert('d', Some(('c', 54))); assert_eq!(bellman_ford(&graph, &'b'), Some(dists_b)); let mut dists_c = BTreeMap::new(); dists_c.insert('c', None); dists_c.insert('b', Some(('c', 20))); dists_c.insert('d', Some(('c', 32))); dists_c.insert('a', Some(('b', 30))); assert_eq!(bellman_ford(&graph, &'c'), Some(dists_c)); let mut dists_d = BTreeMap::new(); dists_d.insert('d', None); assert_eq!(bellman_ford(&graph, &'d'), Some(dists_d)); let mut dists_e = BTreeMap::new(); dists_e.insert('e', None); dists_e.insert('a', Some(('e', 7))); dists_e.insert('c', Some(('a', 19))); dists_e.insert('d', Some(('c', 51))); dists_e.insert('b', Some(('c', 39))); assert_eq!(bellman_ford(&graph, &'e'), Some(dists_e)); } #[test] fn graph_2() { let mut graph = BTreeMap::new(); add_edge(&mut graph, 0, 1, 6); add_edge(&mut graph, 0, 3, 7); add_edge(&mut graph, 1, 2, 5); add_edge(&mut graph, 1, 3, 8); add_edge(&mut graph, 1, 4, -4); add_edge(&mut graph, 2, 1, -2); add_edge(&mut graph, 3, 2, -3); add_edge(&mut graph, 3, 4, 9); add_edge(&mut graph, 4, 0, 3); add_edge(&mut graph, 4, 2, 7); let mut dists_0 = BTreeMap::new(); dists_0.insert(0, None); dists_0.insert(1, Some((2, 2))); dists_0.insert(2, Some((3, 4))); dists_0.insert(3, Some((0, 7))); dists_0.insert(4, Some((1, -2))); assert_eq!(bellman_ford(&graph, &0), Some(dists_0)); let mut dists_1 = BTreeMap::new(); dists_1.insert(0, Some((4, -1))); dists_1.insert(1, None); dists_1.insert(2, Some((4, 3))); dists_1.insert(3, Some((0, 6))); dists_1.insert(4, Some((1, -4))); assert_eq!(bellman_ford(&graph, &1), Some(dists_1)); let mut dists_2 = BTreeMap::new(); dists_2.insert(0, Some((4, -3))); dists_2.insert(1, Some((2, -2))); dists_2.insert(2, None); dists_2.insert(3, Some((0, 4))); dists_2.insert(4, Some((1, -6))); assert_eq!(bellman_ford(&graph, &2), Some(dists_2)); let mut dists_3 = BTreeMap::new(); dists_3.insert(0, Some((4, -6))); dists_3.insert(1, Some((2, -5))); dists_3.insert(2, Some((3, -3))); dists_3.insert(3, None); dists_3.insert(4, Some((1, -9))); assert_eq!(bellman_ford(&graph, &3), Some(dists_3)); let mut dists_4 = BTreeMap::new(); dists_4.insert(0, Some((4, 3))); dists_4.insert(1, Some((2, 5))); dists_4.insert(2, Some((4, 7))); dists_4.insert(3, Some((0, 10))); dists_4.insert(4, None); assert_eq!(bellman_ford(&graph, &4), Some(dists_4)); } #[test] fn graph_with_negative_loop() { let mut graph = BTreeMap::new(); add_edge(&mut graph, 0, 1, 6); add_edge(&mut graph, 0, 3, 7); add_edge(&mut graph, 1, 2, 5); add_edge(&mut graph, 1, 3, 8); add_edge(&mut graph, 1, 4, -4); add_edge(&mut graph, 2, 1, -4); add_edge(&mut graph, 3, 2, -3); add_edge(&mut graph, 3, 4, 9); add_edge(&mut graph, 4, 0, 3); add_edge(&mut graph, 4, 2, 7); assert_eq!(bellman_ford(&graph, &0), None); assert_eq!(bellman_ford(&graph, &1), None); assert_eq!(bellman_ford(&graph, &2), None); assert_eq!(bellman_ford(&graph, &3), None); assert_eq!(bellman_ford(&graph, &4), None); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/prufer_code.rs
src/graph/prufer_code.rs
use std::collections::{BTreeMap, BTreeSet, BinaryHeap}; type Graph<V> = BTreeMap<V, Vec<V>>; pub fn prufer_encode<V: Ord + Copy>(tree: &Graph<V>) -> Vec<V> { if tree.len() <= 2 { return vec![]; } let mut result: Vec<V> = Vec::with_capacity(tree.len() - 2); let mut queue = BinaryHeap::new(); let mut in_tree = BTreeSet::new(); let mut degree = BTreeMap::new(); for (vertex, adj) in tree { in_tree.insert(*vertex); degree.insert(*vertex, adj.len()); if adj.len() == 1 { queue.push(*vertex); } } for _ in 2..tree.len() { let v = queue.pop().unwrap(); in_tree.remove(&v); let u = tree[&v].iter().find(|u| in_tree.contains(u)).unwrap(); result.push(*u); *degree.get_mut(u).unwrap() -= 1; if degree[u] == 1 { queue.push(*u); } } result } #[inline] fn add_directed_edge<V: Ord + Copy>(tree: &mut Graph<V>, a: V, b: V) { tree.entry(a).or_default().push(b); } #[inline] fn add_edge<V: Ord + Copy>(tree: &mut Graph<V>, a: V, b: V) { add_directed_edge(tree, a, b); add_directed_edge(tree, b, a); } pub fn prufer_decode<V: Ord + Copy>(code: &[V], vertex_list: &[V]) -> Graph<V> { // For many cases, this function won't fail even if given unsuitable code // array. As such, returning really unlikely errors doesn't make much sense. let mut result = BTreeMap::new(); let mut list_count: BTreeMap<V, usize> = BTreeMap::new(); for vertex in code { *list_count.entry(*vertex).or_insert(0) += 1; } let mut queue = BinaryHeap::from( vertex_list .iter() .filter(|v| !list_count.contains_key(v)) .cloned() .collect::<Vec<V>>(), ); for vertex in code { let child = queue.pop().unwrap(); add_edge(&mut result, child, *vertex); let cnt = list_count.get_mut(vertex).unwrap(); *cnt -= 1; if *cnt == 0 { queue.push(*vertex); } } let u = queue.pop().unwrap(); let v = queue.pop().unwrap(); add_edge(&mut result, u, v); result } #[cfg(test)] mod tests { use super::{add_edge, prufer_decode, prufer_encode, Graph}; fn equal_graphs<V: Ord + Copy>(g1: &mut Graph<V>, g2: &mut Graph<V>) -> bool { for adj in g1.values_mut() { adj.sort(); } for adj in g2.values_mut() { adj.sort(); } g1 == g2 } #[test] fn small_trees() { let mut g: Graph<u32> = Graph::new(); // Binary tree with 7 vertices let edges = vec![(1, 2), (1, 3), (2, 4), (2, 5), (3, 6), (3, 7)]; for (u, v) in edges { add_edge(&mut g, u, v); } let code = prufer_encode(&g); let vertices = g.keys().cloned().collect::<Vec<u32>>(); let mut decoded = prufer_decode(&code, &vertices); assert_eq!(code, vec![3, 3, 2, 2, 1]); assert!(equal_graphs(&mut g, &mut decoded)); g.clear(); // A path of length 10 for v in 2..=9 { g.insert(v, vec![v - 1, v + 1]); } g.insert(1, vec![2]); g.insert(10, vec![9]); let code = prufer_encode(&g); let vertices = g.keys().cloned().collect::<Vec<u32>>(); let mut decoded = prufer_decode(&code, &vertices); assert_eq!(code, vec![9, 8, 7, 6, 5, 4, 3, 2]); assert!(equal_graphs(&mut g, &mut decoded)); g.clear(); // 7-5-3-1-2-4-6 let edges = vec![(1, 2), (2, 4), (4, 6), (1, 3), (3, 5), (5, 7)]; for (u, v) in edges { add_edge(&mut g, u, v); } let code = prufer_encode(&g); let vertices = g.keys().cloned().collect::<Vec<u32>>(); let mut decoded = prufer_decode(&code, &vertices); assert_eq!(code, vec![5, 4, 3, 2, 1]); assert!(equal_graphs(&mut g, &mut decoded)); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/ford_fulkerson.rs
src/graph/ford_fulkerson.rs
//! The Ford-Fulkerson algorithm is a widely used algorithm to solve the maximum flow problem in a flow network. //! //! The maximum flow problem involves determining the maximum amount of flow that can be sent from a source vertex to a sink vertex //! in a directed weighted graph, subject to capacity constraints on the edges. use std::collections::VecDeque; /// Enum representing the possible errors that can occur when running the Ford-Fulkerson algorithm. #[derive(Debug, PartialEq)] pub enum FordFulkersonError { EmptyGraph, ImproperGraph, SourceOutOfBounds, SinkOutOfBounds, } /// Performs a Breadth-First Search (BFS) on the residual graph to find an augmenting path /// from the source vertex `source` to the sink vertex `sink`. /// /// # Arguments /// /// * `graph` - A reference to the residual graph represented as an adjacency matrix. /// * `source` - The source vertex. /// * `sink` - The sink vertex. /// * `parent` - A mutable reference to the parent array used to store the augmenting path. /// /// # Returns /// /// Returns `true` if an augmenting path is found from `source` to `sink`, `false` otherwise. fn bfs(graph: &[Vec<usize>], source: usize, sink: usize, parent: &mut [usize]) -> bool { let mut visited = vec![false; graph.len()]; visited[source] = true; parent[source] = usize::MAX; let mut queue = VecDeque::new(); queue.push_back(source); while let Some(current_vertex) = queue.pop_front() { for (previous_vertex, &capacity) in graph[current_vertex].iter().enumerate() { if !visited[previous_vertex] && capacity > 0 { visited[previous_vertex] = true; parent[previous_vertex] = current_vertex; if previous_vertex == sink { return true; } queue.push_back(previous_vertex); } } } false } /// Validates the input parameters for the Ford-Fulkerson algorithm. /// /// This function checks if the provided graph, source vertex, and sink vertex /// meet the requirements for the Ford-Fulkerson algorithm. It ensures the graph /// is non-empty, square (each row has the same length as the number of rows), and /// that the source and sink vertices are within the valid range of vertex indices. /// /// # Arguments /// /// * `graph` - A reference to the flow network represented as an adjacency matrix. /// * `source` - The source vertex. /// * `sink` - The sink vertex. /// /// # Returns /// /// Returns `Ok(())` if the input parameters are valid, otherwise returns an appropriate /// `FordFulkersonError`. fn validate_ford_fulkerson_input( graph: &[Vec<usize>], source: usize, sink: usize, ) -> Result<(), FordFulkersonError> { if graph.is_empty() { return Err(FordFulkersonError::EmptyGraph); } if graph.iter().any(|row| row.len() != graph.len()) { return Err(FordFulkersonError::ImproperGraph); } if source >= graph.len() { return Err(FordFulkersonError::SourceOutOfBounds); } if sink >= graph.len() { return Err(FordFulkersonError::SinkOutOfBounds); } Ok(()) } /// Applies the Ford-Fulkerson algorithm to find the maximum flow in a flow network /// represented by a weighted directed graph. /// /// # Arguments /// /// * `graph` - A mutable reference to the flow network represented as an adjacency matrix. /// * `source` - The source vertex. /// * `sink` - The sink vertex. /// /// # Returns /// /// Returns the maximum flow and the residual graph pub fn ford_fulkerson( graph: &[Vec<usize>], source: usize, sink: usize, ) -> Result<usize, FordFulkersonError> { validate_ford_fulkerson_input(graph, source, sink)?; let mut residual_graph = graph.to_owned(); let mut parent = vec![usize::MAX; graph.len()]; let mut max_flow = 0; while bfs(&residual_graph, source, sink, &mut parent) { let mut path_flow = usize::MAX; let mut previous_vertex = sink; while previous_vertex != source { let current_vertex = parent[previous_vertex]; path_flow = path_flow.min(residual_graph[current_vertex][previous_vertex]); previous_vertex = current_vertex; } previous_vertex = sink; while previous_vertex != source { let current_vertex = parent[previous_vertex]; residual_graph[current_vertex][previous_vertex] -= path_flow; residual_graph[previous_vertex][current_vertex] += path_flow; previous_vertex = current_vertex; } max_flow += path_flow; } Ok(max_flow) } #[cfg(test)] mod tests { use super::*; macro_rules! test_max_flow { ($($name:ident: $tc:expr,)* ) => { $( #[test] fn $name() { let (graph, source, sink, expected_result) = $tc; assert_eq!(ford_fulkerson(&graph, source, sink), expected_result); } )* }; } test_max_flow! { test_empty_graph: ( vec![], 0, 0, Err(FordFulkersonError::EmptyGraph), ), test_source_out_of_bound: ( vec![ vec![0, 8, 0, 0, 3, 0], vec![0, 0, 9, 0, 0, 0], vec![0, 0, 0, 0, 7, 2], vec![0, 0, 0, 0, 0, 5], vec![0, 0, 7, 4, 0, 0], vec![0, 0, 0, 0, 0, 0], ], 6, 5, Err(FordFulkersonError::SourceOutOfBounds), ), test_sink_out_of_bound: ( vec![ vec![0, 8, 0, 0, 3, 0], vec![0, 0, 9, 0, 0, 0], vec![0, 0, 0, 0, 7, 2], vec![0, 0, 0, 0, 0, 5], vec![0, 0, 7, 4, 0, 0], vec![0, 0, 0, 0, 0, 0], ], 0, 6, Err(FordFulkersonError::SinkOutOfBounds), ), test_improper_graph: ( vec![ vec![0, 8], vec![0], ], 0, 1, Err(FordFulkersonError::ImproperGraph), ), test_graph_with_small_flow: ( vec![ vec![0, 8, 0, 0, 3, 0], vec![0, 0, 9, 0, 0, 0], vec![0, 0, 0, 0, 7, 2], vec![0, 0, 0, 0, 0, 5], vec![0, 0, 7, 4, 0, 0], vec![0, 0, 0, 0, 0, 0], ], 0, 5, Ok(6), ), test_graph_with_medium_flow: ( vec![ vec![0, 10, 0, 10, 0, 0], vec![0, 0, 4, 2, 8, 0], vec![0, 0, 0, 0, 0, 10], vec![0, 0, 0, 0, 9, 0], vec![0, 0, 6, 0, 0, 10], vec![0, 0, 0, 0, 0, 0], ], 0, 5, Ok(19), ), test_graph_with_large_flow: ( vec![ vec![0, 12, 0, 13, 0, 0], vec![0, 0, 10, 0, 0, 0], vec![0, 0, 0, 13, 3, 15], vec![0, 0, 7, 0, 15, 0], vec![0, 0, 6, 0, 0, 17], vec![0, 0, 0, 0, 0, 0], ], 0, 5, Ok(23), ), test_complex_graph: ( vec![ vec![0, 16, 13, 0, 0, 0], vec![0, 0, 10, 12, 0, 0], vec![0, 4, 0, 0, 14, 0], vec![0, 0, 9, 0, 0, 20], vec![0, 0, 0, 7, 0, 4], vec![0, 0, 0, 0, 0, 0], ], 0, 5, Ok(23), ), test_disconnected_graph: ( vec![ vec![0, 0, 0, 0], vec![0, 0, 0, 1], vec![0, 0, 0, 1], vec![0, 0, 0, 0], ], 0, 3, Ok(0), ), test_unconnected_sink: ( vec![ vec![0, 4, 0, 3, 0, 0], vec![0, 0, 4, 0, 8, 0], vec![0, 0, 0, 3, 0, 2], vec![0, 0, 0, 0, 6, 0], vec![0, 0, 6, 0, 0, 6], vec![0, 0, 0, 0, 0, 0], ], 0, 5, Ok(7), ), test_no_edges: ( vec![ vec![0, 0, 0], vec![0, 0, 0], vec![0, 0, 0], ], 0, 2, Ok(0), ), test_single_vertex: ( vec![ vec![0], ], 0, 0, Ok(0), ), test_self_loop: ( vec![ vec![10, 0], vec![0, 0], ], 0, 1, Ok(0), ), test_same_source_sink: ( vec![ vec![0, 10, 10], vec![0, 0, 10], vec![0, 0, 0], ], 0, 0, Ok(0), ), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/astar.rs
src/graph/astar.rs
use std::{ collections::{BTreeMap, BinaryHeap}, ops::Add, }; use num_traits::Zero; type Graph<V, E> = BTreeMap<V, BTreeMap<V, E>>; #[derive(Clone, Debug, Eq, PartialEq)] struct Candidate<V, E> { estimated_weight: E, real_weight: E, state: V, } impl<V: Ord + Copy, E: Ord + Copy> PartialOrd for Candidate<V, E> { fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { // Note the inverted order; we want nodes with lesser weight to have // higher priority Some(self.cmp(other)) } } impl<V: Ord + Copy, E: Ord + Copy> Ord for Candidate<V, E> { fn cmp(&self, other: &Self) -> std::cmp::Ordering { // Note the inverted order; we want nodes with lesser weight to have // higher priority other.estimated_weight.cmp(&self.estimated_weight) } } pub fn astar<V: Ord + Copy, E: Ord + Copy + Add<Output = E> + Zero>( graph: &Graph<V, E>, start: V, target: V, heuristic: impl Fn(V) -> E, ) -> Option<(E, Vec<V>)> { // traversal front let mut queue = BinaryHeap::new(); // maps each node to its predecessor in the final path let mut previous = BTreeMap::new(); // weights[v] is the accumulated weight from start to v let mut weights = BTreeMap::new(); // initialize traversal weights.insert(start, E::zero()); queue.push(Candidate { estimated_weight: heuristic(start), real_weight: E::zero(), state: start, }); while let Some(Candidate { real_weight, state: current, .. }) = queue.pop() { if current == target { break; } for (&next, &weight) in &graph[&current] { let real_weight = real_weight + weight; if weights .get(&next) .is_none_or(|&weight| real_weight < weight) { // current allows us to reach next with lower weight (or at all) // add next to the front let estimated_weight = real_weight + heuristic(next); weights.insert(next, real_weight); queue.push(Candidate { estimated_weight, real_weight, state: next, }); previous.insert(next, current); } } } let weight = if let Some(&weight) = weights.get(&target) { weight } else { // we did not reach target from start return None; }; // build path in reverse let mut current = target; let mut path = vec![current]; while current != start { let prev = previous .get(&current) .copied() .expect("We reached the target, but are unable to reconsistute the path"); current = prev; path.push(current); } path.reverse(); Some((weight, path)) } #[cfg(test)] mod tests { use super::{astar, Graph}; use num_traits::Zero; use std::collections::BTreeMap; // the null heuristic make A* equivalent to Dijkstra fn null_heuristic<V, E: Zero>(_v: V) -> E { E::zero() } fn add_edge<V: Ord + Copy, E: Ord>(graph: &mut Graph<V, E>, v1: V, v2: V, c: E) { graph.entry(v1).or_default().insert(v2, c); graph.entry(v2).or_default(); } #[test] fn single_vertex() { let mut graph: Graph<usize, usize> = BTreeMap::new(); graph.insert(0, BTreeMap::new()); assert_eq!(astar(&graph, 0, 0, null_heuristic), Some((0, vec![0]))); assert_eq!(astar(&graph, 0, 1, null_heuristic), None); } #[test] fn single_edge() { let mut graph = BTreeMap::new(); add_edge(&mut graph, 0, 1, 2); assert_eq!(astar(&graph, 0, 1, null_heuristic), Some((2, vec![0, 1]))); assert_eq!(astar(&graph, 1, 0, null_heuristic), None); } #[test] fn graph_1() { let mut graph = BTreeMap::new(); add_edge(&mut graph, 'a', 'c', 12); add_edge(&mut graph, 'a', 'd', 60); add_edge(&mut graph, 'b', 'a', 10); add_edge(&mut graph, 'c', 'b', 20); add_edge(&mut graph, 'c', 'd', 32); add_edge(&mut graph, 'e', 'a', 7); // from a assert_eq!( astar(&graph, 'a', 'a', null_heuristic), Some((0, vec!['a'])) ); assert_eq!( astar(&graph, 'a', 'b', null_heuristic), Some((32, vec!['a', 'c', 'b'])) ); assert_eq!( astar(&graph, 'a', 'c', null_heuristic), Some((12, vec!['a', 'c'])) ); assert_eq!( astar(&graph, 'a', 'd', null_heuristic), Some((12 + 32, vec!['a', 'c', 'd'])) ); assert_eq!(astar(&graph, 'a', 'e', null_heuristic), None); // from b assert_eq!( astar(&graph, 'b', 'a', null_heuristic), Some((10, vec!['b', 'a'])) ); assert_eq!( astar(&graph, 'b', 'b', null_heuristic), Some((0, vec!['b'])) ); assert_eq!( astar(&graph, 'b', 'c', null_heuristic), Some((10 + 12, vec!['b', 'a', 'c'])) ); assert_eq!( astar(&graph, 'b', 'd', null_heuristic), Some((10 + 12 + 32, vec!['b', 'a', 'c', 'd'])) ); assert_eq!(astar(&graph, 'b', 'e', null_heuristic), None); // from c assert_eq!( astar(&graph, 'c', 'a', null_heuristic), Some((20 + 10, vec!['c', 'b', 'a'])) ); assert_eq!( astar(&graph, 'c', 'b', null_heuristic), Some((20, vec!['c', 'b'])) ); assert_eq!( astar(&graph, 'c', 'c', null_heuristic), Some((0, vec!['c'])) ); assert_eq!( astar(&graph, 'c', 'd', null_heuristic), Some((32, vec!['c', 'd'])) ); assert_eq!(astar(&graph, 'c', 'e', null_heuristic), None); // from d assert_eq!(astar(&graph, 'd', 'a', null_heuristic), None); assert_eq!(astar(&graph, 'd', 'b', null_heuristic), None); assert_eq!(astar(&graph, 'd', 'c', null_heuristic), None); assert_eq!( astar(&graph, 'd', 'd', null_heuristic), Some((0, vec!['d'])) ); assert_eq!(astar(&graph, 'd', 'e', null_heuristic), None); // from e assert_eq!( astar(&graph, 'e', 'a', null_heuristic), Some((7, vec!['e', 'a'])) ); assert_eq!( astar(&graph, 'e', 'b', null_heuristic), Some((7 + 12 + 20, vec!['e', 'a', 'c', 'b'])) ); assert_eq!( astar(&graph, 'e', 'c', null_heuristic), Some((7 + 12, vec!['e', 'a', 'c'])) ); assert_eq!( astar(&graph, 'e', 'd', null_heuristic), Some((7 + 12 + 32, vec!['e', 'a', 'c', 'd'])) ); assert_eq!( astar(&graph, 'e', 'e', null_heuristic), Some((0, vec!['e'])) ); } #[test] fn test_heuristic() { // make a grid let mut graph = BTreeMap::new(); let rows = 100; let cols = 100; for row in 0..rows { for col in 0..cols { add_edge(&mut graph, (row, col), (row + 1, col), 1); add_edge(&mut graph, (row, col), (row, col + 1), 1); add_edge(&mut graph, (row, col), (row + 1, col + 1), 1); add_edge(&mut graph, (row + 1, col), (row, col), 1); add_edge(&mut graph, (row + 1, col + 1), (row, col), 1); } } // Dijkstra would explore most of the 101 × 101 nodes // the heuristic should allow exploring only about 200 nodes let now = std::time::Instant::now(); let res = astar(&graph, (0, 0), (100, 90), |(i, j)| 100 - i + 90 - j); assert!(now.elapsed() < std::time::Duration::from_millis(10)); let (weight, path) = res.unwrap(); assert_eq!(weight, 100); assert_eq!(path.len(), 101); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/detect_cycle.rs
src/graph/detect_cycle.rs
use std::collections::{HashMap, HashSet, VecDeque}; use crate::data_structures::{graph::Graph, DirectedGraph, UndirectedGraph}; pub trait DetectCycle { fn detect_cycle_dfs(&self) -> bool; fn detect_cycle_bfs(&self) -> bool; } // Helper function to detect cycle in an undirected graph using DFS graph traversal fn undirected_graph_detect_cycle_dfs<'a>( graph: &'a UndirectedGraph, visited_node: &mut HashSet<&'a String>, parent: Option<&'a String>, u: &'a String, ) -> bool { visited_node.insert(u); for (v, _) in graph.adjacency_table().get(u).unwrap() { if matches!(parent, Some(parent) if v == parent) { continue; } if visited_node.contains(v) || undirected_graph_detect_cycle_dfs(graph, visited_node, Some(u), v) { return true; } } false } // Helper function to detect cycle in an undirected graph using BFS graph traversal fn undirected_graph_detect_cycle_bfs<'a>( graph: &'a UndirectedGraph, visited_node: &mut HashSet<&'a String>, u: &'a String, ) -> bool { visited_node.insert(u); // Initialize the queue for BFS, storing (current node, parent node) tuples let mut queue = VecDeque::<(&String, Option<&String>)>::new(); queue.push_back((u, None)); while let Some((u, parent)) = queue.pop_front() { for (v, _) in graph.adjacency_table().get(u).unwrap() { if matches!(parent, Some(parent) if v == parent) { continue; } if visited_node.contains(v) { return true; } visited_node.insert(v); queue.push_back((v, Some(u))); } } false } impl DetectCycle for UndirectedGraph { fn detect_cycle_dfs(&self) -> bool { let mut visited_node = HashSet::<&String>::new(); let adj = self.adjacency_table(); for u in adj.keys() { if !visited_node.contains(u) && undirected_graph_detect_cycle_dfs(self, &mut visited_node, None, u) { return true; } } false } fn detect_cycle_bfs(&self) -> bool { let mut visited_node = HashSet::<&String>::new(); let adj = self.adjacency_table(); for u in adj.keys() { if !visited_node.contains(u) && undirected_graph_detect_cycle_bfs(self, &mut visited_node, u) { return true; } } false } } // Helper function to detect cycle in a directed graph using DFS graph traversal fn directed_graph_detect_cycle_dfs<'a>( graph: &'a DirectedGraph, visited_node: &mut HashSet<&'a String>, in_stack_visited_node: &mut HashSet<&'a String>, u: &'a String, ) -> bool { visited_node.insert(u); in_stack_visited_node.insert(u); for (v, _) in graph.adjacency_table().get(u).unwrap() { if visited_node.contains(v) && in_stack_visited_node.contains(v) { return true; } if !visited_node.contains(v) && directed_graph_detect_cycle_dfs(graph, visited_node, in_stack_visited_node, v) { return true; } } in_stack_visited_node.remove(u); false } impl DetectCycle for DirectedGraph { fn detect_cycle_dfs(&self) -> bool { let mut visited_node = HashSet::<&String>::new(); let mut in_stack_visited_node = HashSet::<&String>::new(); let adj = self.adjacency_table(); for u in adj.keys() { if !visited_node.contains(u) && directed_graph_detect_cycle_dfs( self, &mut visited_node, &mut in_stack_visited_node, u, ) { return true; } } false } // detect cycle in a the graph using Kahn's algorithm // https://www.geeksforgeeks.org/detect-cycle-in-a-directed-graph-using-bfs/ fn detect_cycle_bfs(&self) -> bool { // Set 0 in-degree for each vertex let mut in_degree: HashMap<&String, usize> = self.adjacency_table().keys().map(|k| (k, 0)).collect(); // Calculate in-degree for each vertex for u in self.adjacency_table().keys() { for (v, _) in self.adjacency_table().get(u).unwrap() { *in_degree.get_mut(v).unwrap() += 1; } } // Initialize queue with vertex having 0 in-degree let mut queue: VecDeque<&String> = in_degree .iter() .filter(|(_, &degree)| degree == 0) .map(|(&k, _)| k) .collect(); let mut count = 0; while let Some(u) = queue.pop_front() { count += 1; for (v, _) in self.adjacency_table().get(u).unwrap() { in_degree.entry(v).and_modify(|d| { *d -= 1; if *d == 0 { queue.push_back(v); } }); } } // If count of processed vertices is not equal to the number of vertices, // the graph has a cycle count != self.adjacency_table().len() } } #[cfg(test)] mod test { use super::DetectCycle; use crate::data_structures::{graph::Graph, DirectedGraph, UndirectedGraph}; fn get_undirected_single_node_with_loop() -> UndirectedGraph { let mut res = UndirectedGraph::new(); res.add_edge(("a", "a", 1)); res } fn get_directed_single_node_with_loop() -> DirectedGraph { let mut res = DirectedGraph::new(); res.add_edge(("a", "a", 1)); res } fn get_undirected_two_nodes_connected() -> UndirectedGraph { let mut res = UndirectedGraph::new(); res.add_edge(("a", "b", 1)); res } fn get_directed_two_nodes_connected() -> DirectedGraph { let mut res = DirectedGraph::new(); res.add_edge(("a", "b", 1)); res.add_edge(("b", "a", 1)); res } fn get_directed_two_nodes() -> DirectedGraph { let mut res = DirectedGraph::new(); res.add_edge(("a", "b", 1)); res } fn get_undirected_triangle() -> UndirectedGraph { let mut res = UndirectedGraph::new(); res.add_edge(("a", "b", 1)); res.add_edge(("b", "c", 1)); res.add_edge(("c", "a", 1)); res } fn get_directed_triangle() -> DirectedGraph { let mut res = DirectedGraph::new(); res.add_edge(("a", "b", 1)); res.add_edge(("b", "c", 1)); res.add_edge(("c", "a", 1)); res } fn get_undirected_triangle_with_tail() -> UndirectedGraph { let mut res = get_undirected_triangle(); res.add_edge(("c", "d", 1)); res.add_edge(("d", "e", 1)); res.add_edge(("e", "f", 1)); res.add_edge(("g", "h", 1)); res } fn get_directed_triangle_with_tail() -> DirectedGraph { let mut res = get_directed_triangle(); res.add_edge(("c", "d", 1)); res.add_edge(("d", "e", 1)); res.add_edge(("e", "f", 1)); res.add_edge(("g", "h", 1)); res } fn get_undirected_graph_with_cycle() -> UndirectedGraph { let mut res = UndirectedGraph::new(); res.add_edge(("a", "b", 1)); res.add_edge(("a", "c", 1)); res.add_edge(("b", "c", 1)); res.add_edge(("b", "d", 1)); res.add_edge(("c", "d", 1)); res } fn get_undirected_graph_without_cycle() -> UndirectedGraph { let mut res = UndirectedGraph::new(); res.add_edge(("a", "b", 1)); res.add_edge(("a", "c", 1)); res.add_edge(("b", "d", 1)); res.add_edge(("c", "e", 1)); res } fn get_directed_graph_with_cycle() -> DirectedGraph { let mut res = DirectedGraph::new(); res.add_edge(("b", "a", 1)); res.add_edge(("c", "a", 1)); res.add_edge(("b", "c", 1)); res.add_edge(("c", "d", 1)); res.add_edge(("d", "b", 1)); res } fn get_directed_graph_without_cycle() -> DirectedGraph { let mut res = DirectedGraph::new(); res.add_edge(("b", "a", 1)); res.add_edge(("c", "a", 1)); res.add_edge(("b", "c", 1)); res.add_edge(("c", "d", 1)); res.add_edge(("b", "d", 1)); res } macro_rules! test_detect_cycle { ($($name:ident: $test_case:expr,)*) => { $( #[test] fn $name() { let (graph, has_cycle) = $test_case; println!("detect_cycle_dfs: {}", graph.detect_cycle_dfs()); println!("detect_cycle_bfs: {}", graph.detect_cycle_bfs()); assert_eq!(graph.detect_cycle_dfs(), has_cycle); assert_eq!(graph.detect_cycle_bfs(), has_cycle); } )* }; } test_detect_cycle! { undirected_empty: (UndirectedGraph::new(), false), directed_empty: (DirectedGraph::new(), false), undirected_single_node_with_loop: (get_undirected_single_node_with_loop(), true), directed_single_node_with_loop: (get_directed_single_node_with_loop(), true), undirected_two_nodes_connected: (get_undirected_two_nodes_connected(), false), directed_two_nodes_connected: (get_directed_two_nodes_connected(), true), directed_two_nodes: (get_directed_two_nodes(), false), undirected_triangle: (get_undirected_triangle(), true), undirected_triangle_with_tail: (get_undirected_triangle_with_tail(), true), directed_triangle: (get_directed_triangle(), true), directed_triangle_with_tail: (get_directed_triangle_with_tail(), true), undirected_graph_with_cycle: (get_undirected_graph_with_cycle(), true), undirected_graph_without_cycle: (get_undirected_graph_without_cycle(), false), directed_graph_with_cycle: (get_directed_graph_with_cycle(), true), directed_graph_without_cycle: (get_directed_graph_without_cycle(), false), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/graph_enumeration.rs
src/graph/graph_enumeration.rs
use std::collections::BTreeMap; type Graph<Vertex> = BTreeMap<Vertex, Vec<Vertex>>; /* This function creates a graph with vertices numbered from 1 to n for any input `Graph<V>`. The result is in the form of Vec<Vec<usize> to make implementing other algorithms on the graph easier and help with performance. We expect that all vertices, even the isolated ones, to have an entry in `adj` (possibly an empty vector) */ pub fn enumerate_graph<V: Ord + Clone>(adj: &Graph<V>) -> Vec<Vec<usize>> { let mut result = vec![vec![]; adj.len() + 1]; let ordering: Vec<V> = adj.keys().cloned().collect(); for (zero_idx, edges) in adj.values().enumerate() { let idx = zero_idx + 1; result[idx] = edges .iter() .map(|x| ordering.binary_search(x).unwrap() + 1) .collect(); } result } #[cfg(test)] mod tests { use super::*; fn add_edge<V: Ord + Clone>(graph: &mut Graph<V>, a: V, b: V) { graph.entry(a.clone()).or_default().push(b.clone()); graph.entry(b).or_default().push(a); } #[test] fn string_vertices() { let mut graph = Graph::new(); add_edge(&mut graph, "a", "b"); add_edge(&mut graph, "b", "c"); add_edge(&mut graph, "c", "a"); add_edge(&mut graph, "b", "d"); let mut result = enumerate_graph(&graph); let expected = vec![vec![], vec![2, 3], vec![1, 3, 4], vec![1, 2], vec![2]]; for v in result.iter_mut() { v.sort_unstable(); } assert_eq!(result, expected); } #[test] fn integer_vertices() { let mut graph = Graph::new(); add_edge(&mut graph, 1001, 1002); add_edge(&mut graph, 1002, 1003); add_edge(&mut graph, 1003, 1001); add_edge(&mut graph, 1004, 1002); let mut result = enumerate_graph(&graph); let expected = vec![vec![], vec![2, 3], vec![1, 3, 4], vec![1, 2], vec![2]]; for v in result.iter_mut() { v.sort_unstable(); } assert_eq!(result, expected); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/tarjans_ssc.rs
src/graph/tarjans_ssc.rs
pub struct Graph { n: usize, adj_list: Vec<Vec<usize>>, } impl Graph { pub fn new(n: usize) -> Self { Self { n, adj_list: vec![vec![]; n], } } pub fn add_edge(&mut self, u: usize, v: usize) { self.adj_list[u].push(v); } } pub fn tarjan_scc(graph: &Graph) -> Vec<Vec<usize>> { struct TarjanState { index: i32, stack: Vec<usize>, on_stack: Vec<bool>, index_of: Vec<i32>, lowlink_of: Vec<i32>, components: Vec<Vec<usize>>, } let mut state = TarjanState { index: 0, stack: Vec::new(), on_stack: vec![false; graph.n], index_of: vec![-1; graph.n], lowlink_of: vec![-1; graph.n], components: Vec::new(), }; fn strong_connect(v: usize, graph: &Graph, state: &mut TarjanState) { state.index_of[v] = state.index; state.lowlink_of[v] = state.index; state.index += 1; state.stack.push(v); state.on_stack[v] = true; for &w in &graph.adj_list[v] { if state.index_of[w] == -1 { strong_connect(w, graph, state); state.lowlink_of[v] = state.lowlink_of[v].min(state.lowlink_of[w]); } else if state.on_stack[w] { state.lowlink_of[v] = state.lowlink_of[v].min(state.index_of[w]); } } if state.lowlink_of[v] == state.index_of[v] { let mut component: Vec<usize> = Vec::new(); while let Some(w) = state.stack.pop() { state.on_stack[w] = false; component.push(w); if w == v { break; } } state.components.push(component); } } for v in 0..graph.n { if state.index_of[v] == -1 { strong_connect(v, graph, &mut state); } } state.components } #[cfg(test)] mod tests { use super::*; #[test] fn test_tarjan_scc() { // Test 1: A graph with multiple strongly connected components let n_vertices = 11; let edges = vec![ (0, 1), (0, 3), (1, 2), (1, 4), (2, 0), (2, 6), (3, 2), (4, 5), (4, 6), (5, 6), (5, 7), (5, 8), (5, 9), (6, 4), (7, 9), (8, 9), (9, 8), ]; let mut graph = Graph::new(n_vertices); for &(u, v) in &edges { graph.add_edge(u, v); } let components = tarjan_scc(&graph); assert_eq!( components, vec![ vec![8, 9], vec![7], vec![5, 4, 6], vec![3, 2, 1, 0], vec![10], ] ); // Test 2: A graph with no edges let n_vertices = 5; let edges: Vec<(usize, usize)> = vec![]; let mut graph = Graph::new(n_vertices); for &(u, v) in &edges { graph.add_edge(u, v); } let components = tarjan_scc(&graph); // Each node is its own SCC assert_eq!( components, vec![vec![0], vec![1], vec![2], vec![3], vec![4]] ); // Test 3: A graph with single strongly connected component let n_vertices = 5; let edges = vec![(0, 1), (1, 2), (2, 3), (2, 4), (3, 0), (4, 2)]; let mut graph = Graph::new(n_vertices); for &(u, v) in &edges { graph.add_edge(u, v); } let components = tarjan_scc(&graph); assert_eq!(components, vec![vec![4, 3, 2, 1, 0]]); // Test 4: A graph with multiple strongly connected component let n_vertices = 7; let edges = vec![ (0, 1), (1, 2), (2, 0), (1, 3), (1, 4), (1, 6), (3, 5), (4, 5), ]; let mut graph = Graph::new(n_vertices); for &(u, v) in &edges { graph.add_edge(u, v); } let components = tarjan_scc(&graph); assert_eq!( components, vec![vec![5], vec![3], vec![4], vec![6], vec![2, 1, 0],] ); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/dinic_maxflow.rs
src/graph/dinic_maxflow.rs
use std::collections::VecDeque; use std::ops::{Add, AddAssign, Neg, Sub, SubAssign}; // We assume that graph vertices are numbered from 1 to n. /// Adjacency matrix type Graph = Vec<Vec<usize>>; /// We assume that T::default() gives "zero" flow and T supports negative values pub struct FlowEdge<T> { pub sink: usize, pub capacity: T, pub flow: T, } pub struct FlowResultEdge<T> { pub source: usize, pub sink: usize, pub flow: T, } impl<T: Clone + Copy + Add + AddAssign + Sub<Output = T> + SubAssign + Ord + Neg + Default> FlowEdge<T> { pub fn new(sink: usize, capacity: T) -> Self { FlowEdge { sink, capacity, flow: T::default(), } } } pub struct DinicMaxFlow<T> { /// BFS Level of each vertex. starts from 1 level: Vec<usize>, /// The index of the last visited edge connected to each vertex pub last_edge: Vec<usize>, /// Holds wether the solution has already been calculated network_solved: bool, pub source: usize, pub sink: usize, /// Number of edges added to the residual network pub num_edges: usize, pub num_vertices: usize, pub adj: Graph, /// The list of flow edges pub edges: Vec<FlowEdge<T>>, } impl<T: Clone + Copy + Add + AddAssign + Sub<Output = T> + SubAssign + Neg + Ord + Default> DinicMaxFlow<T> { pub fn new(source: usize, sink: usize, num_vertices: usize) -> Self { DinicMaxFlow { level: vec![0; num_vertices + 1], last_edge: vec![0; num_vertices + 1], network_solved: false, source, sink, num_edges: 0, num_vertices, adj: vec![vec![]; num_vertices + 1], edges: vec![], } } #[inline] pub fn add_edge(&mut self, source: usize, sink: usize, capacity: T) { self.edges.push(FlowEdge::new(sink, capacity)); // Add the reverse edge with zero capacity self.edges.push(FlowEdge::new(source, T::default())); // We inserted the m'th edge from source to sink self.adj[source].push(self.num_edges); self.adj[sink].push(self.num_edges + 1); self.num_edges += 2; } fn bfs(&mut self) -> bool { let mut q: VecDeque<usize> = VecDeque::new(); q.push_back(self.source); while !q.is_empty() { let v = q.pop_front().unwrap(); for &e in self.adj[v].iter() { if self.edges[e].capacity <= self.edges[e].flow { continue; } let u = self.edges[e].sink; if self.level[u] != 0 { continue; } self.level[u] = self.level[v] + 1; q.push_back(u); } } self.level[self.sink] != 0 } fn dfs(&mut self, v: usize, pushed: T) -> T { // We have pushed nothing, or we are at the sink if v == self.sink { return pushed; } for e_pos in self.last_edge[v]..self.adj[v].len() { let e = self.adj[v][e_pos]; let u = self.edges[e].sink; if (self.level[v] + 1) != self.level[u] || self.edges[e].capacity <= self.edges[e].flow { continue; } let down_flow = self.dfs( u, std::cmp::min(pushed, self.edges[e].capacity - self.edges[e].flow), ); if down_flow == T::default() { continue; } self.last_edge[v] = e_pos; self.edges[e].flow += down_flow; self.edges[e ^ 1].flow -= down_flow; return down_flow; } self.last_edge[v] = self.adj[v].len(); T::default() } pub fn find_maxflow(&mut self, infinite_flow: T) -> T { self.network_solved = true; let mut total_flow: T = T::default(); loop { self.level.fill(0); self.level[self.source] = 1; // There is no longer a path from source to sink in the residual // network if !self.bfs() { break; } self.last_edge.fill(0); let mut next_flow = self.dfs(self.source, infinite_flow); while next_flow != T::default() { total_flow += next_flow; next_flow = self.dfs(self.source, infinite_flow); } } total_flow } pub fn get_flow_edges(&mut self, infinite_flow: T) -> Vec<FlowResultEdge<T>> { if !self.network_solved { self.find_maxflow(infinite_flow); } let mut result = Vec::new(); for v in 1..self.adj.len() { for &e_ind in self.adj[v].iter() { let e = &self.edges[e_ind]; // Make sure that reverse edges from residual network are not // included if e.flow > T::default() { result.push(FlowResultEdge { source: v, sink: e.sink, flow: e.flow, }); } } } result } } #[cfg(test)] mod tests { use super::*; #[test] fn small_graph() { let mut flow: DinicMaxFlow<i32> = DinicMaxFlow::new(1, 6, 6); flow.add_edge(1, 2, 16); flow.add_edge(1, 4, 13); flow.add_edge(2, 3, 12); flow.add_edge(3, 4, 9); flow.add_edge(3, 6, 20); flow.add_edge(4, 2, 4); flow.add_edge(4, 5, 14); flow.add_edge(5, 3, 7); flow.add_edge(5, 6, 4); let max_flow = flow.find_maxflow(i32::MAX); assert_eq!(max_flow, 23); let mut sm_out = [0; 7]; let mut sm_in = [0; 7]; let flow_edges = flow.get_flow_edges(i32::MAX); for e in flow_edges { sm_out[e.source] += e.flow; sm_in[e.sink] += e.flow; } for i in 2..=5 { assert_eq!(sm_in[i], sm_out[i]); } assert_eq!(sm_in[1], 0); assert_eq!(sm_out[1], max_flow); assert_eq!(sm_in[6], max_flow); assert_eq!(sm_out[6], 0); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/centroid_decomposition.rs
src/graph/centroid_decomposition.rs
type Adj = [Vec<usize>]; const IN_DECOMPOSITION: u64 = 1 << 63; /// Centroid Decomposition for a tree. /// /// Given a tree, it can be recursively decomposed into centroids. Then the /// parent of a centroid `c` is the previous centroid that splitted its connected /// component into two or more components. It can be shown that in such /// decomposition, for each path `p` with starting and ending vertices `u`, `v`, /// the lowest common ancestor of `u` and `v` in centroid tree is a vertex of `p`. /// /// The input tree should have its vertices numbered from 1 to n, and /// `graph_enumeration.rs` may help to convert other representations. pub struct CentroidDecomposition { /// The root of the centroid tree, should _not_ be set by the user pub root: usize, /// The result. `decomposition[v]` is the parent of `v` in centroid tree. /// `decomposition[root]` is 0 pub decomposition: Vec<usize>, /// Used internally to save the big_child of a vertex, and whether it has /// been added to the centroid tree. vert_state: Vec<u64>, /// Used internally to save the subtree size of a vertex vert_size: Vec<usize>, } impl CentroidDecomposition { pub fn new(mut num_vertices: usize) -> Self { num_vertices += 1; CentroidDecomposition { root: 0, decomposition: vec![0; num_vertices], vert_state: vec![0; num_vertices], vert_size: vec![0; num_vertices], } } #[inline] fn put_in_decomposition(&mut self, v: usize, parent: usize) { self.decomposition[v] = parent; self.vert_state[v] |= IN_DECOMPOSITION; } #[inline] fn is_in_decomposition(&self, v: usize) -> bool { (self.vert_state[v] & IN_DECOMPOSITION) != 0 } fn dfs_size(&mut self, v: usize, parent: usize, adj: &Adj) -> usize { self.vert_size[v] = 1; let mut big_child = 0_usize; let mut bc_size = 0_usize; // big child size for &u in adj[v].iter() { if u == parent || self.is_in_decomposition(u) { continue; } let u_size = self.dfs_size(u, v, adj); self.vert_size[v] += u_size; if u_size > bc_size { big_child = u; bc_size = u_size; } } self.vert_state[v] = big_child as u64; self.vert_size[v] } fn dfs_centroid(&self, v: usize, size_thr: usize) -> usize { // recurse until big child's size is <= `size_thr` match self.vert_state[v] as usize { u if self.vert_size[u] <= size_thr => v, u => self.dfs_centroid(u, size_thr), } } fn decompose_subtree( &mut self, v: usize, centroid_parent: usize, calculate_vert_size: bool, adj: &Adj, ) -> usize { // `calculate_vert_size` determines if it is necessary to recalculate // `self.vert_size` if calculate_vert_size { self.dfs_size(v, centroid_parent, adj); } let v_size = self.vert_size[v]; let centroid = self.dfs_centroid(v, v_size >> 1); self.put_in_decomposition(centroid, centroid_parent); for &u in adj[centroid].iter() { if self.is_in_decomposition(u) { continue; } self.decompose_subtree( u, centroid, self.vert_size[u] > self.vert_size[centroid], adj, ); } centroid } pub fn decompose_tree(&mut self, adj: &Adj) { self.decompose_subtree(1, 0, true, adj); } } #[cfg(test)] mod tests { use super::CentroidDecomposition; use crate::{ graph::{enumerate_graph, prufer_code}, math::PCG32, }; fn calculate_height(v: usize, heights: &mut [usize], parents: &mut [usize]) -> usize { if heights[v] == 0 { heights[v] = calculate_height(parents[v], heights, parents) + 1; } heights[v] } #[test] fn single_path() { let len = 16; let mut adj: Vec<Vec<usize>> = vec![vec![]; len]; adj[1].push(2); adj[15].push(14); #[allow(clippy::needless_range_loop)] for i in 2..15 { adj[i].push(i + 1); adj[i].push(i - 1); } let mut cd = CentroidDecomposition::new(len - 1); cd.decompose_tree(&adj); // We should get a complete binary tree assert_eq!( cd.decomposition, vec![0, 2, 4, 2, 8, 6, 4, 6, 0, 10, 12, 10, 8, 14, 12, 14] ); } #[test] #[ignore] fn random_tree_height() { // Do not run this test in debug mode! It takes > 30s to run without // optimizations! let n = 1e6 as usize; let max_height = 1 + 20; let len = n + 1; let mut rng = PCG32::new_default(314159); let mut tree_prufer_code: Vec<u32> = vec![0; n - 2]; tree_prufer_code.fill_with(|| (rng.get_u32() % (n as u32)) + 1); let vertex_list: Vec<u32> = (1..=(n as u32)).collect(); let adj = enumerate_graph(&prufer_code::prufer_decode(&tree_prufer_code, &vertex_list)); let mut cd = CentroidDecomposition::new(n); cd.decompose_tree(&adj); let mut heights: Vec<usize> = vec![0; len]; heights[0] = 1; for i in 1..=n { let h = calculate_height(i, &mut heights, &mut cd.decomposition); assert!(h <= max_height); } } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/two_satisfiability.rs
src/graph/two_satisfiability.rs
use super::strongly_connected_components::StronglyConnectedComponents as SCCs; pub type Condition = (i64, i64); type Graph = Vec<Vec<usize>>; #[inline] fn variable(var: i64) -> usize { if var < 0 { (((-var) << 1) + 1) as usize } else { (var << 1) as usize } } /// Returns an assignment that satisfies all the constraints, or a variable that makes such an assignment impossible.\ /// Variables should be numbered from 1 to `n`, and a negative number `-m` corresponds to the negated variable `m`.\ /// For more information about this problem, please visit: <https://en.wikipedia.org/wiki/2-satisfiability> pub fn solve_two_satisfiability( expression: &[Condition], num_variables: usize, ) -> Result<Vec<bool>, i64> { let num_verts = (num_variables + 1) << 1; let mut result = Vec::new(); let mut sccs = SCCs::new(num_verts); let mut adj = Graph::new(); adj.resize(num_verts, vec![]); for cond in expression.iter() { let v1 = variable(cond.0); let v2 = variable(cond.1); adj[v1 ^ 1].push(v2); adj[v2 ^ 1].push(v1); } sccs.find_components(&adj); result.resize(num_variables + 1, false); for var in (2..num_verts).step_by(2) { if sccs.component[var] == sccs.component[var ^ 1] { return Err((var >> 1) as i64); } // if a variable isn't if sccs.component[var] < sccs.component[var ^ 1] { result[var >> 1] = true; } } Ok(result) } #[cfg(test)] mod tests { use std::thread; use super::*; fn check_answer(expression: &[Condition], answers: &[bool]) -> bool { let mut ok = true; for &(c1, c2) in expression { let mut cv = false; if c1 < 0 { cv |= !answers[-c1 as usize]; } else { cv |= answers[c1 as usize]; } if c2 < 0 { cv |= !answers[-c2 as usize]; } else { cv |= answers[c2 as usize]; } ok &= cv; } ok } #[test] fn basic_test() { let conds = vec![(1, 1), (2, 2)]; let res = solve_two_satisfiability(&conds, 2); assert!(res.is_ok()); assert!(check_answer(&conds, &res.unwrap())); let conds = vec![(1, 2), (-2, -2)]; let res = solve_two_satisfiability(&conds, 2); assert!(res.is_ok()); assert!(check_answer(&conds, &res.unwrap())); let conds = vec![]; let res = solve_two_satisfiability(&conds, 2); assert!(res.is_ok()); assert!(check_answer(&conds, &res.unwrap())); let conds = vec![(-1, -1), (-2, -2), (1, 2)]; let res = solve_two_satisfiability(&conds, 2); assert!(res.is_err()); } #[test] #[ignore] fn big_test() { // We should spawn a new thread and set its stack size to something // big (256MB in this case), because doing DFS (for finding SCCs) is // a stack-intensive operation. 256MB should be enough for 3e5 // variables though. let builder = thread::Builder::new().stack_size(256 * 1024 * 1024); let handler = builder .spawn(|| { let num_conds = 3e5 as i64; let mut conds = vec![]; for i in 1..num_conds { conds.push((i, -(i + 1))); } conds.push((num_conds, num_conds)); let res = solve_two_satisfiability(&conds, num_conds as usize); assert!(res.is_ok()); assert!(check_answer(&conds, &res.unwrap())); }) .unwrap(); handler.join().unwrap(); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/floyd_warshall.rs
src/graph/floyd_warshall.rs
use num_traits::Zero; use std::collections::BTreeMap; use std::ops::Add; type Graph<V, E> = BTreeMap<V, BTreeMap<V, E>>; /// Performs the Floyd-Warshall algorithm on the input graph.\ /// The graph is a weighted, directed graph with no negative cycles. /// /// Returns a map storing the distance from each node to all the others.\ /// i.e. For each vertex `u`, `map[u][v] == Some(distance)` means /// distance is the sum of the weights of the edges on the shortest path /// from `u` to `v`. /// /// For a key `v`, if `map[v].len() == 0`, then `v` cannot reach any other vertex, but is in the graph /// (island node, or sink in the case of a directed graph) pub fn floyd_warshall<V: Ord + Copy, E: Ord + Copy + Add<Output = E> + num_traits::Zero>( graph: &Graph<V, E>, ) -> BTreeMap<V, BTreeMap<V, E>> { let mut map: BTreeMap<V, BTreeMap<V, E>> = BTreeMap::new(); for (u, edges) in graph.iter() { if !map.contains_key(u) { map.insert(*u, BTreeMap::new()); } map.entry(*u).or_default().insert(*u, Zero::zero()); for (v, weight) in edges.iter() { if !map.contains_key(v) { map.insert(*v, BTreeMap::new()); } map.entry(*v).or_default().insert(*v, Zero::zero()); map.entry(*u).and_modify(|mp| { mp.insert(*v, *weight); }); } } let keys = map.keys().copied().collect::<Vec<_>>(); for &k in &keys { for &i in &keys { if !map[&i].contains_key(&k) { continue; } for &j in &keys { if i == j { continue; } if !map[&k].contains_key(&j) { continue; } let entry_i_j = map[&i].get(&j); let entry_i_k = map[&i][&k]; let entry_k_j = map[&k][&j]; match entry_i_j { Some(&e) => { if e > entry_i_k + entry_k_j { map.entry(i).or_default().insert(j, entry_i_k + entry_k_j); } } None => { map.entry(i).or_default().insert(j, entry_i_k + entry_k_j); } }; } } } map } #[cfg(test)] mod tests { use super::{floyd_warshall, Graph}; use std::collections::BTreeMap; fn add_edge<V: Ord + Copy, E: Ord + Copy>(graph: &mut Graph<V, E>, v1: V, v2: V, c: E) { graph.entry(v1).or_default().insert(v2, c); } fn bi_add_edge<V: Ord + Copy, E: Ord + Copy>(graph: &mut Graph<V, E>, v1: V, v2: V, c: E) { add_edge(graph, v1, v2, c); add_edge(graph, v2, v1, c); } #[test] fn single_vertex() { let mut graph: Graph<usize, usize> = BTreeMap::new(); graph.insert(0, BTreeMap::new()); let mut dists = BTreeMap::new(); dists.insert(0, BTreeMap::new()); dists.get_mut(&0).unwrap().insert(0, 0); assert_eq!(floyd_warshall(&graph), dists); } #[test] fn single_edge() { let mut graph = BTreeMap::new(); bi_add_edge(&mut graph, 0, 1, 2); bi_add_edge(&mut graph, 1, 2, 3); let mut dists_0 = BTreeMap::new(); dists_0.insert(0, BTreeMap::new()); dists_0.insert(1, BTreeMap::new()); dists_0.insert(2, BTreeMap::new()); dists_0.get_mut(&0).unwrap().insert(0, 0); dists_0.get_mut(&1).unwrap().insert(1, 0); dists_0.get_mut(&2).unwrap().insert(2, 0); dists_0.get_mut(&1).unwrap().insert(0, 2); dists_0.get_mut(&0).unwrap().insert(1, 2); dists_0.get_mut(&1).unwrap().insert(2, 3); dists_0.get_mut(&2).unwrap().insert(1, 3); dists_0.get_mut(&2).unwrap().insert(0, 5); dists_0.get_mut(&0).unwrap().insert(2, 5); assert_eq!(floyd_warshall(&graph), dists_0); } #[test] fn graph_1() { let mut graph = BTreeMap::new(); add_edge(&mut graph, 'a', 'c', 12); add_edge(&mut graph, 'a', 'd', 60); add_edge(&mut graph, 'b', 'a', 10); add_edge(&mut graph, 'c', 'b', 20); add_edge(&mut graph, 'c', 'd', 32); add_edge(&mut graph, 'e', 'a', 7); let mut dists_a = BTreeMap::new(); dists_a.insert('d', BTreeMap::new()); dists_a.entry('a').or_insert(BTreeMap::new()).insert('a', 0); dists_a.entry('b').or_insert(BTreeMap::new()).insert('b', 0); dists_a.entry('c').or_insert(BTreeMap::new()).insert('c', 0); dists_a.entry('d').or_insert(BTreeMap::new()).insert('d', 0); dists_a.entry('e').or_insert(BTreeMap::new()).insert('e', 0); dists_a .entry('a') .or_insert(BTreeMap::new()) .insert('c', 12); dists_a .entry('c') .or_insert(BTreeMap::new()) .insert('a', 30); dists_a .entry('c') .or_insert(BTreeMap::new()) .insert('b', 20); dists_a .entry('c') .or_insert(BTreeMap::new()) .insert('d', 32); dists_a.entry('e').or_insert(BTreeMap::new()).insert('a', 7); dists_a .entry('b') .or_insert(BTreeMap::new()) .insert('a', 10); dists_a .entry('a') .or_insert(BTreeMap::new()) .insert('d', 44); dists_a .entry('a') .or_insert(BTreeMap::new()) .insert('b', 32); dists_a .entry('a') .or_insert(BTreeMap::new()) .insert('b', 32); dists_a .entry('b') .or_insert(BTreeMap::new()) .insert('c', 22); dists_a .entry('b') .or_insert(BTreeMap::new()) .insert('d', 54); dists_a .entry('e') .or_insert(BTreeMap::new()) .insert('c', 19); dists_a .entry('e') .or_insert(BTreeMap::new()) .insert('d', 51); dists_a .entry('e') .or_insert(BTreeMap::new()) .insert('b', 39); assert_eq!(floyd_warshall(&graph), dists_a); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/prim.rs
src/graph/prim.rs
use std::cmp::Reverse; use std::collections::{BTreeMap, BinaryHeap}; use std::ops::Add; type Graph<V, E> = BTreeMap<V, BTreeMap<V, E>>; fn add_edge<V: Ord + Copy, E: Ord + Add + Copy>(graph: &mut Graph<V, E>, v1: V, v2: V, c: E) { graph.entry(v1).or_default().insert(v2, c); graph.entry(v2).or_default().insert(v1, c); } // selects a start and run the algorithm from it pub fn prim<V: Ord + Copy + std::fmt::Debug, E: Ord + Add + Copy + std::fmt::Debug>( graph: &Graph<V, E>, ) -> Graph<V, E> { match graph.keys().next() { Some(v) => prim_with_start(graph, *v), None => BTreeMap::new(), } } // only works for a connected graph // if the given graph is not connected it will return the MST of the connected subgraph pub fn prim_with_start<V: Ord + Copy, E: Ord + Add + Copy>( graph: &Graph<V, E>, start: V, ) -> Graph<V, E> { // will contain the MST let mut mst: Graph<V, E> = Graph::new(); // a priority queue based on a binary heap, used to get the cheapest edge // the elements are an edge: the cost, destination and source let mut prio = BinaryHeap::new(); mst.insert(start, BTreeMap::new()); for (v, c) in &graph[&start] { // the heap is a max heap, we have to use Reverse when adding to simulate a min heap prio.push(Reverse((*c, v, start))); } while let Some(Reverse((dist, t, prev))) = prio.pop() { // the destination of the edge has already been seen if mst.contains_key(t) { continue; } // the destination is a new vertex add_edge(&mut mst, prev, *t, dist); for (v, c) in &graph[t] { if !mst.contains_key(v) { prio.push(Reverse((*c, v, *t))); } } } mst } #[cfg(test)] mod tests { use super::{add_edge, prim, Graph}; use std::collections::BTreeMap; #[test] fn empty() { assert_eq!(prim::<usize, usize>(&BTreeMap::new()), BTreeMap::new()); } #[test] fn single_vertex() { let mut graph: Graph<usize, usize> = BTreeMap::new(); graph.insert(42, BTreeMap::new()); assert_eq!(prim(&graph), graph); } #[test] fn single_edge() { let mut graph = BTreeMap::new(); add_edge(&mut graph, 42, 666, 12); assert_eq!(prim(&graph), graph); } #[test] fn tree_1() { let mut graph = BTreeMap::new(); add_edge(&mut graph, 0, 1, 10); add_edge(&mut graph, 0, 2, 11); add_edge(&mut graph, 2, 3, 12); add_edge(&mut graph, 2, 4, 13); add_edge(&mut graph, 1, 5, 14); add_edge(&mut graph, 1, 6, 15); add_edge(&mut graph, 3, 7, 16); assert_eq!(prim(&graph), graph); } #[test] fn tree_2() { let mut graph = BTreeMap::new(); add_edge(&mut graph, 1, 2, 11); add_edge(&mut graph, 2, 3, 12); add_edge(&mut graph, 2, 4, 13); add_edge(&mut graph, 4, 5, 14); add_edge(&mut graph, 4, 6, 15); add_edge(&mut graph, 6, 7, 16); assert_eq!(prim(&graph), graph); } #[test] fn tree_3() { let mut graph = BTreeMap::new(); for i in 1..100 { add_edge(&mut graph, i, 2 * i, i); add_edge(&mut graph, i, 2 * i + 1, -i); } assert_eq!(prim(&graph), graph); } #[test] fn graph_1() { let mut graph = BTreeMap::new(); add_edge(&mut graph, 'a', 'b', 6); add_edge(&mut graph, 'a', 'c', 7); add_edge(&mut graph, 'a', 'e', 2); add_edge(&mut graph, 'a', 'f', 3); add_edge(&mut graph, 'b', 'c', 5); add_edge(&mut graph, 'c', 'e', 5); add_edge(&mut graph, 'd', 'e', 4); add_edge(&mut graph, 'd', 'f', 1); add_edge(&mut graph, 'e', 'f', 2); let mut ans = BTreeMap::new(); add_edge(&mut ans, 'd', 'f', 1); add_edge(&mut ans, 'e', 'f', 2); add_edge(&mut ans, 'a', 'e', 2); add_edge(&mut ans, 'b', 'c', 5); add_edge(&mut ans, 'c', 'e', 5); assert_eq!(prim(&graph), ans); } #[test] fn graph_2() { let mut graph = BTreeMap::new(); add_edge(&mut graph, 1, 2, 6); add_edge(&mut graph, 1, 3, 1); add_edge(&mut graph, 1, 4, 5); add_edge(&mut graph, 2, 3, 5); add_edge(&mut graph, 2, 5, 3); add_edge(&mut graph, 3, 4, 5); add_edge(&mut graph, 3, 5, 6); add_edge(&mut graph, 3, 6, 4); add_edge(&mut graph, 4, 6, 2); add_edge(&mut graph, 5, 6, 6); let mut ans = BTreeMap::new(); add_edge(&mut ans, 1, 3, 1); add_edge(&mut ans, 4, 6, 2); add_edge(&mut ans, 2, 5, 3); add_edge(&mut ans, 2, 3, 5); add_edge(&mut ans, 3, 6, 4); assert_eq!(prim(&graph), ans); } #[test] fn graph_3() { let mut graph = BTreeMap::new(); add_edge(&mut graph, "v1", "v2", 1); add_edge(&mut graph, "v1", "v3", 3); add_edge(&mut graph, "v1", "v5", 6); add_edge(&mut graph, "v2", "v3", 2); add_edge(&mut graph, "v2", "v4", 3); add_edge(&mut graph, "v2", "v5", 5); add_edge(&mut graph, "v3", "v4", 5); add_edge(&mut graph, "v3", "v6", 2); add_edge(&mut graph, "v4", "v5", 2); add_edge(&mut graph, "v4", "v6", 4); add_edge(&mut graph, "v5", "v6", 1); let mut ans = BTreeMap::new(); add_edge(&mut ans, "v1", "v2", 1); add_edge(&mut ans, "v5", "v6", 1); add_edge(&mut ans, "v2", "v3", 2); add_edge(&mut ans, "v3", "v6", 2); add_edge(&mut ans, "v4", "v5", 2); assert_eq!(prim(&graph), ans); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/depth_first_search.rs
src/graph/depth_first_search.rs
use std::collections::HashSet; use std::collections::VecDeque; // Perform a Depth First Search Algorithm to find a element in a graph // // Return a Optional with a vector with history of vertex visiteds // or a None if the element not exists on the graph pub fn depth_first_search(graph: &Graph, root: Vertex, objective: Vertex) -> Option<Vec<u32>> { let mut visited: HashSet<Vertex> = HashSet::new(); let mut history: Vec<u32> = Vec::new(); let mut queue = VecDeque::new(); queue.push_back(root); // While there is an element in the queue // get the first element of the vertex queue while let Some(current_vertex) = queue.pop_front() { // Added current vertex in the history of visiteds vertex history.push(current_vertex.value()); // Verify if this vertex is the objective if current_vertex == objective { // Return the Optional with the history of visiteds vertex return Some(history); } // For each over the neighbors of current vertex for neighbor in current_vertex.neighbors(graph).into_iter().rev() { // Insert in the HashSet of visiteds if this value not exist yet if visited.insert(neighbor) { // Add the neighbor on front of queue queue.push_front(neighbor); } } } // If all vertex is visited and the objective is not found // return a Optional with None value None } // Data Structures #[derive(Copy, Clone, PartialEq, Eq, Hash)] pub struct Vertex(u32); #[derive(Copy, Clone, PartialEq, Eq, Hash)] pub struct Edge(u32, u32); #[derive(Clone)] pub struct Graph { #[allow(dead_code)] vertices: Vec<Vertex>, edges: Vec<Edge>, } impl Graph { pub fn new(vertices: Vec<Vertex>, edges: Vec<Edge>) -> Self { Graph { vertices, edges } } } impl From<u32> for Vertex { fn from(item: u32) -> Self { Vertex(item) } } impl Vertex { pub fn value(&self) -> u32 { self.0 } pub fn neighbors(&self, graph: &Graph) -> VecDeque<Vertex> { graph .edges .iter() .filter(|e| e.0 == self.0) .map(|e| e.1.into()) .collect() } } impl From<(u32, u32)> for Edge { fn from(item: (u32, u32)) -> Self { Edge(item.0, item.1) } } #[cfg(test)] mod tests { use super::*; #[test] fn find_1_fail() { let vertices = vec![1, 2, 3, 4, 5, 6, 7]; let edges = vec![(1, 2), (1, 3), (2, 4), (2, 5), (3, 6), (3, 7)]; let root = 1; let objective = 99; let graph = Graph::new( vertices.into_iter().map(|v| v.into()).collect(), edges.into_iter().map(|e| e.into()).collect(), ); assert_eq!( depth_first_search(&graph, root.into(), objective.into()), None ); } #[test] fn find_1_sucess() { let vertices = vec![1, 2, 3, 4, 5, 6, 7]; let edges = vec![(1, 2), (1, 3), (2, 4), (2, 5), (3, 6), (3, 7)]; let root = 1; let objective = 7; let correct_path = vec![1, 2, 4, 5, 3, 6, 7]; let graph = Graph::new( vertices.into_iter().map(|v| v.into()).collect(), edges.into_iter().map(|e| e.into()).collect(), ); assert_eq!( depth_first_search(&graph, root.into(), objective.into()), Some(correct_path) ); } #[test] fn find_2_sucess() { let vertices = vec![0, 1, 2, 3, 4, 5, 6, 7]; let edges = vec![ (0, 1), (1, 3), (3, 2), (2, 1), (3, 4), (4, 5), (5, 7), (7, 6), (6, 4), ]; let root = 0; let objective = 6; let correct_path = vec![0, 1, 3, 2, 4, 5, 7, 6]; let graph = Graph::new( vertices.into_iter().map(|v| v.into()).collect(), edges.into_iter().map(|e| e.into()).collect(), ); assert_eq!( depth_first_search(&graph, root.into(), objective.into()), Some(correct_path) ); } #[test] fn find_3_sucess() { let vertices = vec![0, 1, 2, 3, 4, 5, 6, 7]; let edges = vec![ (0, 1), (1, 3), (3, 2), (2, 1), (3, 4), (4, 5), (5, 7), (7, 6), (6, 4), ]; let root = 0; let objective = 4; let correct_path = vec![0, 1, 3, 2, 4]; let graph = Graph::new( vertices.into_iter().map(|v| v.into()).collect(), edges.into_iter().map(|e| e.into()).collect(), ); assert_eq!( depth_first_search(&graph, root.into(), objective.into()), Some(correct_path) ); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/dijkstra.rs
src/graph/dijkstra.rs
use std::collections::{BTreeMap, BTreeSet}; use std::ops::Add; type Graph<V, E> = BTreeMap<V, BTreeMap<V, E>>; // performs Dijsktra's algorithm on the given graph from the given start // the graph is a positively-weighted directed graph // // returns a map that for each reachable vertex associates the distance and the predecessor // since the start has no predecessor but is reachable, map[start] will be None // // Time: O(E * logV). For each vertex, we traverse each edge, resulting in O(E). For each edge, we // insert a new shortest path for a vertex into the tree, resulting in O(E * logV). // Space: O(V). The tree holds up to V vertices. pub fn dijkstra<V: Ord + Copy, E: Ord + Copy + Add<Output = E>>( graph: &Graph<V, E>, start: V, ) -> BTreeMap<V, Option<(V, E)>> { let mut ans = BTreeMap::new(); let mut prio = BTreeSet::new(); // start is the special case that doesn't have a predecessor ans.insert(start, None); for (new, weight) in &graph[&start] { ans.insert(*new, Some((start, *weight))); prio.insert((*weight, *new)); } while let Some((path_weight, vertex)) = prio.pop_first() { for (next, weight) in &graph[&vertex] { let new_weight = path_weight + *weight; match ans.get(next) { // if ans[next] is a lower dist than the alternative one, we do nothing Some(Some((_, dist_next))) if new_weight >= *dist_next => {} // if ans[next] is None then next is start and so the distance won't be changed, it won't be added again in prio Some(None) => {} // the new path is shorter, either new was not in ans or it was farther _ => { if let Some(Some((_, prev_weight))) = ans.insert(*next, Some((vertex, new_weight))) { prio.remove(&(prev_weight, *next)); } prio.insert((new_weight, *next)); } } } } ans } #[cfg(test)] mod tests { use super::{dijkstra, Graph}; use std::collections::BTreeMap; fn add_edge<V: Ord + Copy, E: Ord>(graph: &mut Graph<V, E>, v1: V, v2: V, c: E) { graph.entry(v1).or_default().insert(v2, c); graph.entry(v2).or_default(); } #[test] fn single_vertex() { let mut graph: Graph<usize, usize> = BTreeMap::new(); graph.insert(0, BTreeMap::new()); let mut dists = BTreeMap::new(); dists.insert(0, None); assert_eq!(dijkstra(&graph, 0), dists); } #[test] fn single_edge() { let mut graph = BTreeMap::new(); add_edge(&mut graph, 0, 1, 2); let mut dists_0 = BTreeMap::new(); dists_0.insert(0, None); dists_0.insert(1, Some((0, 2))); assert_eq!(dijkstra(&graph, 0), dists_0); let mut dists_1 = BTreeMap::new(); dists_1.insert(1, None); assert_eq!(dijkstra(&graph, 1), dists_1); } #[test] fn tree_1() { let mut graph = BTreeMap::new(); let mut dists = BTreeMap::new(); dists.insert(1, None); for i in 1..100 { add_edge(&mut graph, i, i * 2, i * 2); add_edge(&mut graph, i, i * 2 + 1, i * 2 + 1); match dists[&i] { Some((_, d)) => { dists.insert(i * 2, Some((i, d + i * 2))); dists.insert(i * 2 + 1, Some((i, d + i * 2 + 1))); } None => { dists.insert(i * 2, Some((i, i * 2))); dists.insert(i * 2 + 1, Some((i, i * 2 + 1))); } } } assert_eq!(dijkstra(&graph, 1), dists); } #[test] fn graph_1() { let mut graph = BTreeMap::new(); add_edge(&mut graph, 'a', 'c', 12); add_edge(&mut graph, 'a', 'd', 60); add_edge(&mut graph, 'b', 'a', 10); add_edge(&mut graph, 'c', 'b', 20); add_edge(&mut graph, 'c', 'd', 32); add_edge(&mut graph, 'e', 'a', 7); let mut dists_a = BTreeMap::new(); dists_a.insert('a', None); dists_a.insert('c', Some(('a', 12))); dists_a.insert('d', Some(('c', 44))); dists_a.insert('b', Some(('c', 32))); assert_eq!(dijkstra(&graph, 'a'), dists_a); let mut dists_b = BTreeMap::new(); dists_b.insert('b', None); dists_b.insert('a', Some(('b', 10))); dists_b.insert('c', Some(('a', 22))); dists_b.insert('d', Some(('c', 54))); assert_eq!(dijkstra(&graph, 'b'), dists_b); let mut dists_c = BTreeMap::new(); dists_c.insert('c', None); dists_c.insert('b', Some(('c', 20))); dists_c.insert('d', Some(('c', 32))); dists_c.insert('a', Some(('b', 30))); assert_eq!(dijkstra(&graph, 'c'), dists_c); let mut dists_d = BTreeMap::new(); dists_d.insert('d', None); assert_eq!(dijkstra(&graph, 'd'), dists_d); let mut dists_e = BTreeMap::new(); dists_e.insert('e', None); dists_e.insert('a', Some(('e', 7))); dists_e.insert('c', Some(('a', 19))); dists_e.insert('d', Some(('c', 51))); dists_e.insert('b', Some(('c', 39))); assert_eq!(dijkstra(&graph, 'e'), dists_e); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/mod.rs
src/graph/mod.rs
mod astar; mod bellman_ford; mod bipartite_matching; mod breadth_first_search; mod centroid_decomposition; mod decremental_connectivity; mod depth_first_search; mod depth_first_search_tic_tac_toe; mod detect_cycle; mod dijkstra; mod dinic_maxflow; mod disjoint_set_union; mod eulerian_path; mod floyd_warshall; mod ford_fulkerson; mod graph_enumeration; mod heavy_light_decomposition; mod kosaraju; mod lee_breadth_first_search; mod lowest_common_ancestor; mod minimum_spanning_tree; mod prim; mod prufer_code; mod strongly_connected_components; mod tarjans_ssc; mod topological_sort; mod two_satisfiability; pub use self::astar::astar; pub use self::bellman_ford::bellman_ford; pub use self::bipartite_matching::BipartiteMatching; pub use self::breadth_first_search::breadth_first_search; pub use self::centroid_decomposition::CentroidDecomposition; pub use self::decremental_connectivity::DecrementalConnectivity; pub use self::depth_first_search::depth_first_search; pub use self::depth_first_search_tic_tac_toe::minimax; pub use self::detect_cycle::DetectCycle; pub use self::dijkstra::dijkstra; pub use self::dinic_maxflow::DinicMaxFlow; pub use self::disjoint_set_union::DisjointSetUnion; pub use self::eulerian_path::find_eulerian_path; pub use self::floyd_warshall::floyd_warshall; pub use self::ford_fulkerson::ford_fulkerson; pub use self::graph_enumeration::enumerate_graph; pub use self::heavy_light_decomposition::HeavyLightDecomposition; pub use self::kosaraju::kosaraju; pub use self::lee_breadth_first_search::lee; pub use self::lowest_common_ancestor::{LowestCommonAncestorOffline, LowestCommonAncestorOnline}; pub use self::minimum_spanning_tree::kruskal; pub use self::prim::{prim, prim_with_start}; pub use self::prufer_code::{prufer_decode, prufer_encode}; pub use self::strongly_connected_components::StronglyConnectedComponents; pub use self::tarjans_ssc::tarjan_scc; pub use self::topological_sort::topological_sort; pub use self::two_satisfiability::solve_two_satisfiability;
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/breadth_first_search.rs
src/graph/breadth_first_search.rs
use std::collections::HashSet; use std::collections::VecDeque; /// Perform a breadth-first search on Graph `graph`. /// /// # Parameters /// /// - `graph`: The graph to search. /// - `root`: The starting node of the graph from which to begin searching. /// - `target`: The target node for the search. /// /// # Returns /// /// If the target is found, an Optional vector is returned with the history /// of nodes visited as its contents. /// /// If the target is not found or there is no path from the root, /// `None` is returned. /// pub fn breadth_first_search(graph: &Graph, root: Node, target: Node) -> Option<Vec<u32>> { let mut visited: HashSet<Node> = HashSet::new(); let mut history: Vec<u32> = Vec::new(); let mut queue = VecDeque::new(); visited.insert(root); queue.push_back(root); while let Some(currentnode) = queue.pop_front() { history.push(currentnode.value()); // If we reach the goal, return our travel history. if currentnode == target { return Some(history); } // Check the neighboring nodes for any that we've not visited yet. for neighbor in currentnode.neighbors(graph) { if visited.insert(neighbor) { queue.push_back(neighbor); } } } // All nodes were visited, yet the target was not found. None } // Data Structures #[derive(Copy, Clone, PartialEq, Eq, Hash)] pub struct Node(u32); #[derive(Copy, Clone, PartialEq, Eq, Hash)] pub struct Edge(u32, u32); #[derive(Clone)] pub struct Graph { #[allow(dead_code)] nodes: Vec<Node>, edges: Vec<Edge>, } impl Graph { pub fn new(nodes: Vec<Node>, edges: Vec<Edge>) -> Self { Graph { nodes, edges } } } impl From<u32> for Node { fn from(item: u32) -> Self { Node(item) } } impl Node { pub fn value(&self) -> u32 { self.0 } pub fn neighbors(&self, graph: &Graph) -> Vec<Node> { graph .edges .iter() .filter(|e| e.0 == self.0) .map(|e| e.1.into()) .collect() } } impl From<(u32, u32)> for Edge { fn from(item: (u32, u32)) -> Self { Edge(item.0, item.1) } } #[cfg(test)] mod tests { use super::*; /* Example graph #1: * * (1) <--- Root * / \ * (2) (3) * / | | \ * (4) (5) (6) (7) * | * (8) */ fn graph1() -> Graph { let nodes = vec![1, 2, 3, 4, 5, 6, 7]; let edges = vec![(1, 2), (1, 3), (2, 4), (2, 5), (3, 6), (3, 7), (5, 8)]; Graph::new( nodes.into_iter().map(|v| v.into()).collect(), edges.into_iter().map(|e| e.into()).collect(), ) } #[test] fn breadth_first_search_graph1_when_node_not_found_returns_none() { let graph = graph1(); let root = 1; let target = 10; assert_eq!( breadth_first_search(&graph, root.into(), target.into()), None ); } #[test] fn breadth_first_search_graph1_when_target_8_should_evaluate_all_nodes_first() { let graph = graph1(); let root = 1; let target = 8; let expected_path = vec![1, 2, 3, 4, 5, 6, 7, 8]; assert_eq!( breadth_first_search(&graph, root.into(), target.into()), Some(expected_path) ); } /* Example graph #2: * * (1) --- (2) (3) --- (4) * / | / / * / | / / * / | / / * (5) (6) --- (7) (8) */ fn graph2() -> Graph { let nodes = vec![1, 2, 3, 4, 5, 6, 7, 8]; let undirected_edges = vec![ (1, 2), (2, 1), (2, 5), (5, 2), (2, 6), (6, 2), (3, 4), (4, 3), (3, 6), (6, 3), (4, 7), (7, 4), (6, 7), (7, 6), ]; Graph::new( nodes.into_iter().map(|v| v.into()).collect(), undirected_edges.into_iter().map(|e| e.into()).collect(), ) } #[test] fn breadth_first_search_graph2_when_no_path_to_node_returns_none() { let graph = graph2(); let root = 8; let target = 4; assert_eq!( breadth_first_search(&graph, root.into(), target.into()), None ); } #[test] fn breadth_first_search_graph2_should_find_path_from_4_to_1() { let graph = graph2(); let root = 4; let target = 1; let expected_path = vec![4, 3, 7, 6, 2, 1]; assert_eq!( breadth_first_search(&graph, root.into(), target.into()), Some(expected_path) ); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/kosaraju.rs
src/graph/kosaraju.rs
// Kosaraju algorithm, a linear-time algorithm to find the strongly connected components (SCCs) of a directed graph, in Rust. pub struct Graph { vertices: usize, adj_list: Vec<Vec<usize>>, transpose_adj_list: Vec<Vec<usize>>, } impl Graph { pub fn new(vertices: usize) -> Self { Graph { vertices, adj_list: vec![vec![]; vertices], transpose_adj_list: vec![vec![]; vertices], } } pub fn add_edge(&mut self, u: usize, v: usize) { self.adj_list[u].push(v); self.transpose_adj_list[v].push(u); } pub fn dfs(&self, node: usize, visited: &mut Vec<bool>, stack: &mut Vec<usize>) { visited[node] = true; for &neighbor in &self.adj_list[node] { if !visited[neighbor] { self.dfs(neighbor, visited, stack); } } stack.push(node); } pub fn dfs_scc(&self, node: usize, visited: &mut Vec<bool>, scc: &mut Vec<usize>) { visited[node] = true; scc.push(node); for &neighbor in &self.transpose_adj_list[node] { if !visited[neighbor] { self.dfs_scc(neighbor, visited, scc); } } } } pub fn kosaraju(graph: &Graph) -> Vec<Vec<usize>> { let mut visited = vec![false; graph.vertices]; let mut stack = Vec::new(); for i in 0..graph.vertices { if !visited[i] { graph.dfs(i, &mut visited, &mut stack); } } let mut sccs = Vec::new(); visited = vec![false; graph.vertices]; while let Some(node) = stack.pop() { if !visited[node] { let mut scc = Vec::new(); graph.dfs_scc(node, &mut visited, &mut scc); sccs.push(scc); } } sccs } #[cfg(test)] mod tests { use super::*; #[test] fn test_kosaraju_single_sccs() { let vertices = 5; let mut graph = Graph::new(vertices); graph.add_edge(0, 1); graph.add_edge(1, 2); graph.add_edge(2, 3); graph.add_edge(2, 4); graph.add_edge(3, 0); graph.add_edge(4, 2); let sccs = kosaraju(&graph); assert_eq!(sccs.len(), 1); assert!(sccs.contains(&vec![0, 3, 2, 1, 4])); } #[test] fn test_kosaraju_multiple_sccs() { let vertices = 8; let mut graph = Graph::new(vertices); graph.add_edge(1, 0); graph.add_edge(0, 1); graph.add_edge(1, 2); graph.add_edge(2, 0); graph.add_edge(2, 3); graph.add_edge(3, 4); graph.add_edge(4, 5); graph.add_edge(5, 6); graph.add_edge(6, 7); graph.add_edge(4, 7); graph.add_edge(6, 4); let sccs = kosaraju(&graph); assert_eq!(sccs.len(), 4); assert!(sccs.contains(&vec![0, 1, 2])); assert!(sccs.contains(&vec![3])); assert!(sccs.contains(&vec![4, 6, 5])); assert!(sccs.contains(&vec![7])); } #[test] fn test_kosaraju_multiple_sccs1() { let vertices = 8; let mut graph = Graph::new(vertices); graph.add_edge(0, 2); graph.add_edge(1, 0); graph.add_edge(2, 3); graph.add_edge(3, 4); graph.add_edge(4, 7); graph.add_edge(5, 2); graph.add_edge(5, 6); graph.add_edge(6, 5); graph.add_edge(7, 6); let sccs = kosaraju(&graph); assert_eq!(sccs.len(), 3); assert!(sccs.contains(&vec![0])); assert!(sccs.contains(&vec![1])); assert!(sccs.contains(&vec![2, 5, 6, 7, 4, 3])); } #[test] fn test_kosaraju_no_scc() { let vertices = 4; let mut graph = Graph::new(vertices); graph.add_edge(0, 1); graph.add_edge(1, 2); graph.add_edge(2, 3); let sccs = kosaraju(&graph); assert_eq!(sccs.len(), 4); for (i, _) in sccs.iter().enumerate().take(vertices) { assert_eq!(sccs[i], vec![i]); } } #[test] fn test_kosaraju_empty_graph() { let vertices = 0; let graph = Graph::new(vertices); let sccs = kosaraju(&graph); assert_eq!(sccs.len(), 0); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/minimum_spanning_tree.rs
src/graph/minimum_spanning_tree.rs
//! This module implements Kruskal's algorithm to find the Minimum Spanning Tree (MST) //! of an undirected, weighted graph using a Disjoint Set Union (DSU) for cycle detection. use crate::graph::DisjointSetUnion; /// Represents an edge in the graph with a source, destination, and associated cost. #[derive(Debug, PartialEq, Eq)] pub struct Edge { /// The starting vertex of the edge. source: usize, /// The ending vertex of the edge. destination: usize, /// The cost associated with the edge. cost: usize, } impl Edge { /// Creates a new edge with the specified source, destination, and cost. pub fn new(source: usize, destination: usize, cost: usize) -> Self { Self { source, destination, cost, } } } /// Executes Kruskal's algorithm to compute the Minimum Spanning Tree (MST) of a graph. /// /// # Parameters /// /// - `edges`: A vector of `Edge` instances representing all edges in the graph. /// - `num_vertices`: The total number of vertices in the graph. /// /// # Returns /// /// An `Option` containing a tuple with: /// /// - The total cost of the MST (usize). /// - A vector of edges that are included in the MST. /// /// Returns `None` if the graph is disconnected. /// /// # Complexity /// /// The time complexity is O(E log E), where E is the number of edges. pub fn kruskal(mut edges: Vec<Edge>, num_vertices: usize) -> Option<(usize, Vec<Edge>)> { let mut dsu = DisjointSetUnion::new(num_vertices); let mut mst_cost: usize = 0; let mut mst_edges: Vec<Edge> = Vec::with_capacity(num_vertices - 1); // Sort edges by cost in ascending order edges.sort_unstable_by_key(|edge| edge.cost); for edge in edges { if mst_edges.len() == num_vertices - 1 { break; } // Attempt to merge the sets containing the edge’s vertices if dsu.merge(edge.source, edge.destination) != usize::MAX { mst_cost += edge.cost; mst_edges.push(edge); } } // Return MST if it includes exactly num_vertices - 1 edges, otherwise None for disconnected graphs (mst_edges.len() == num_vertices - 1).then_some((mst_cost, mst_edges)) } #[cfg(test)] mod tests { use super::*; macro_rules! test_cases { ($($name:ident: $test_case:expr,)*) => { $( #[test] fn $name() { let (edges, num_vertices, expected_result) = $test_case; let actual_result = kruskal(edges, num_vertices); assert_eq!(actual_result, expected_result); } )* }; } test_cases! { test_seven_vertices_eleven_edges: ( vec![ Edge::new(0, 1, 7), Edge::new(0, 3, 5), Edge::new(1, 2, 8), Edge::new(1, 3, 9), Edge::new(1, 4, 7), Edge::new(2, 4, 5), Edge::new(3, 4, 15), Edge::new(3, 5, 6), Edge::new(4, 5, 8), Edge::new(4, 6, 9), Edge::new(5, 6, 11), ], 7, Some((39, vec![ Edge::new(0, 3, 5), Edge::new(2, 4, 5), Edge::new(3, 5, 6), Edge::new(0, 1, 7), Edge::new(1, 4, 7), Edge::new(4, 6, 9), ])) ), test_ten_vertices_twenty_edges: ( vec![ Edge::new(0, 1, 3), Edge::new(0, 3, 6), Edge::new(0, 4, 9), Edge::new(1, 2, 2), Edge::new(1, 3, 4), Edge::new(1, 4, 9), Edge::new(2, 3, 2), Edge::new(2, 5, 8), Edge::new(2, 6, 9), Edge::new(3, 6, 9), Edge::new(4, 5, 8), Edge::new(4, 9, 18), Edge::new(5, 6, 7), Edge::new(5, 8, 9), Edge::new(5, 9, 10), Edge::new(6, 7, 4), Edge::new(6, 8, 5), Edge::new(7, 8, 1), Edge::new(7, 9, 4), Edge::new(8, 9, 3), ], 10, Some((38, vec![ Edge::new(7, 8, 1), Edge::new(1, 2, 2), Edge::new(2, 3, 2), Edge::new(0, 1, 3), Edge::new(8, 9, 3), Edge::new(6, 7, 4), Edge::new(5, 6, 7), Edge::new(2, 5, 8), Edge::new(4, 5, 8), ])) ), test_disconnected_graph: ( vec![ Edge::new(0, 1, 4), Edge::new(0, 2, 6), Edge::new(3, 4, 2), ], 5, None ), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/lee_breadth_first_search.rs
src/graph/lee_breadth_first_search.rs
use std::collections::VecDeque; // All four potential movements from a cell are listed here. fn validate(matrix: &[Vec<i32>], visited: &[Vec<bool>], row: isize, col: isize) -> bool { // Check if it is possible to move to the position (row, col) from the current cell. let (row, col) = (row as usize, col as usize); row < matrix.len() && col < matrix[0].len() && matrix[row][col] == 1 && !visited[row][col] } pub fn lee(matrix: Vec<Vec<i32>>, source: (usize, usize), destination: (usize, usize)) -> isize { const ROW: [isize; 4] = [-1, 0, 0, 1]; const COL: [isize; 4] = [0, -1, 1, 0]; let (i, j) = source; let (x, y) = destination; // Base case: invalid input if matrix.is_empty() || matrix[i][j] == 0 || matrix[x][y] == 0 { return -1; } let (m, n) = (matrix.len(), matrix[0].len()); let mut visited = vec![vec![false; n]; m]; let mut q = VecDeque::new(); visited[i][j] = true; q.push_back((i, j, 0)); let mut min_dist = isize::MAX; // Loop until the queue is empty while let Some((i, j, dist)) = q.pop_front() { if i == x && j == y { // If the destination is found, update `min_dist` and stop min_dist = dist; break; } // Check for all four possible movements from the current cell for k in 0..ROW.len() { let row = i as isize + ROW[k]; let col = j as isize + COL[k]; if validate(&matrix, &visited, row, col) { // Mark the next cell as visited and enqueue it let (row, col) = (row as usize, col as usize); visited[row][col] = true; q.push_back((row, col, dist + 1)); } } } if min_dist == isize::MAX { -1 } else { min_dist } } #[cfg(test)] mod tests { use super::*; #[test] fn test_lee_exists() { let mat: Vec<Vec<i32>> = vec![ vec![1, 0, 1, 1, 1], vec![1, 0, 1, 0, 1], vec![1, 1, 1, 0, 1], vec![0, 0, 0, 0, 1], vec![1, 1, 1, 0, 1], ]; let source = (0, 0); let dest = (2, 1); assert_eq!(lee(mat, source, dest), 3); } #[test] fn test_lee_does_not_exist() { let mat: Vec<Vec<i32>> = vec![ vec![1, 0, 1, 1, 1], vec![1, 0, 0, 0, 1], vec![1, 1, 1, 0, 1], vec![0, 0, 0, 0, 1], vec![1, 1, 1, 0, 1], ]; let source = (0, 0); let dest = (3, 4); assert_eq!(lee(mat, source, dest), -1); } #[test] fn test_source_equals_destination() { let mat: Vec<Vec<i32>> = vec![ vec![1, 0, 1, 1, 1], vec![1, 0, 1, 0, 1], vec![1, 1, 1, 0, 1], vec![0, 0, 0, 0, 1], vec![1, 1, 1, 0, 1], ]; let source = (2, 1); let dest = (2, 1); assert_eq!(lee(mat, source, dest), 0); } #[test] fn test_lee_exists_2() { let mat: Vec<Vec<i32>> = vec![ vec![1, 1, 1, 1, 1, 0, 0], vec![1, 1, 1, 1, 1, 1, 0], vec![1, 0, 1, 0, 1, 1, 1], vec![1, 1, 1, 1, 1, 0, 1], vec![0, 0, 0, 1, 0, 0, 0], vec![1, 0, 1, 1, 1, 0, 0], vec![0, 0, 0, 0, 1, 0, 0], ]; let source = (0, 0); let dest = (3, 2); assert_eq!(lee(mat, source, dest), 5); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/decremental_connectivity.rs
src/graph/decremental_connectivity.rs
use std::collections::HashSet; /// A data-structure that, given a forest, allows dynamic-connectivity queries. /// Meaning deletion of an edge (u,v) and checking whether two vertecies are still connected. /// /// # Complexity /// The preprocessing phase runs in O(n) time, where n is the number of vertecies in the forest. /// Deletion runs in O(log n) and checking for connectivity runs in O(1) time. /// /// # Sources /// used Wikipedia as reference: <https://en.wikipedia.org/wiki/Dynamic_connectivity> pub struct DecrementalConnectivity { adjacent: Vec<HashSet<usize>>, component: Vec<usize>, count: usize, visited: Vec<usize>, dfs_id: usize, } impl DecrementalConnectivity { //expects the parent of a root to be itself pub fn new(adjacent: Vec<HashSet<usize>>) -> Result<Self, String> { let n = adjacent.len(); if !is_forest(&adjacent) { return Err("input graph is not a forest!".to_string()); } let mut tmp = DecrementalConnectivity { adjacent, component: vec![0; n], count: 0, visited: vec![0; n], dfs_id: 1, }; tmp.component = tmp.calc_component(); Ok(tmp) } pub fn connected(&self, u: usize, v: usize) -> Option<bool> { match (self.component.get(u), self.component.get(v)) { (Some(a), Some(b)) => Some(a == b), _ => None, } } pub fn delete(&mut self, u: usize, v: usize) { if !self.adjacent[u].contains(&v) || self.component[u] != self.component[v] { panic!("delete called on the edge ({u}, {v}) which doesn't exist"); } self.adjacent[u].remove(&v); self.adjacent[v].remove(&u); let mut queue: Vec<usize> = Vec::new(); if self.is_smaller(u, v) { queue.push(u); self.dfs_id += 1; self.visited[v] = self.dfs_id; } else { queue.push(v); self.dfs_id += 1; self.visited[u] = self.dfs_id; } while !queue.is_empty() { let &current = queue.last().unwrap(); self.dfs_step(&mut queue, self.dfs_id); self.component[current] = self.count; } self.count += 1; } fn calc_component(&mut self) -> Vec<usize> { let mut visited: Vec<bool> = vec![false; self.adjacent.len()]; let mut comp: Vec<usize> = vec![0; self.adjacent.len()]; for i in 0..self.adjacent.len() { if visited[i] { continue; } let mut queue: Vec<usize> = vec![i]; while let Some(current) = queue.pop() { if !visited[current] { for &neighbour in self.adjacent[current].iter() { queue.push(neighbour); } } visited[current] = true; comp[current] = self.count; } self.count += 1; } comp } fn is_smaller(&mut self, u: usize, v: usize) -> bool { let mut u_queue: Vec<usize> = vec![u]; let u_id = self.dfs_id; self.visited[v] = u_id; self.dfs_id += 1; let mut v_queue: Vec<usize> = vec![v]; let v_id = self.dfs_id; self.visited[u] = v_id; self.dfs_id += 1; // parallel depth first search while !u_queue.is_empty() && !v_queue.is_empty() { self.dfs_step(&mut u_queue, u_id); self.dfs_step(&mut v_queue, v_id); } u_queue.is_empty() } fn dfs_step(&mut self, queue: &mut Vec<usize>, dfs_id: usize) { let u = queue.pop().unwrap(); self.visited[u] = dfs_id; for &v in self.adjacent[u].iter() { if self.visited[v] == dfs_id { continue; } queue.push(v); } } } // checks whether the given graph is a forest // also checks for all adjacent vertices a,b if adjacent[a].contains(b) && adjacent[b].contains(a) fn is_forest(adjacent: &Vec<HashSet<usize>>) -> bool { let mut visited = vec![false; adjacent.len()]; for node in 0..adjacent.len() { if visited[node] { continue; } if has_cycle(adjacent, &mut visited, node, node) { return false; } } true } fn has_cycle( adjacent: &Vec<HashSet<usize>>, visited: &mut Vec<bool>, node: usize, parent: usize, ) -> bool { visited[node] = true; for &neighbour in adjacent[node].iter() { if !adjacent[neighbour].contains(&node) { panic!("the given graph does not strictly contain bidirectional edges\n {node} -> {neighbour} exists, but the other direction does not"); } if !visited[neighbour] { if has_cycle(adjacent, visited, neighbour, node) { return true; } } else if neighbour != parent { return true; } } false } #[cfg(test)] mod tests { use std::collections::HashSet; // test forest (remember the assumptoin that roots are adjacent to themselves) // _ _ // \ / \ / // 0 7 // / | \ | // 1 2 3 8 // / / \ // 4 5 6 #[test] fn construction_test() { let mut adjacent = vec![ HashSet::from([0, 1, 2, 3]), HashSet::from([0, 4]), HashSet::from([0, 5, 6]), HashSet::from([0]), HashSet::from([1]), HashSet::from([2]), HashSet::from([2]), HashSet::from([7, 8]), HashSet::from([7]), ]; let dec_con = super::DecrementalConnectivity::new(adjacent.clone()).unwrap(); assert_eq!(dec_con.component, vec![0, 0, 0, 0, 0, 0, 0, 1, 1]); // add a cycle to the tree adjacent[2].insert(4); adjacent[4].insert(2); assert!(super::DecrementalConnectivity::new(adjacent.clone()).is_err()); } #[test] #[should_panic(expected = "2 -> 4 exists")] fn non_bidirectional_test() { let adjacent = vec![ HashSet::from([0, 1, 2, 3]), HashSet::from([0, 4]), HashSet::from([0, 5, 6, 4]), HashSet::from([0]), HashSet::from([1]), HashSet::from([2]), HashSet::from([2]), HashSet::from([7, 8]), HashSet::from([7]), ]; // should panic now since our graph is not bidirectional super::DecrementalConnectivity::new(adjacent).unwrap(); } #[test] #[should_panic(expected = "delete called on the edge (2, 4)")] fn delete_panic_test() { let adjacent = vec![ HashSet::from([0, 1, 2, 3]), HashSet::from([0, 4]), HashSet::from([0, 5, 6]), HashSet::from([0]), HashSet::from([1]), HashSet::from([2]), HashSet::from([2]), HashSet::from([7, 8]), HashSet::from([7]), ]; let mut dec_con = super::DecrementalConnectivity::new(adjacent).unwrap(); dec_con.delete(2, 4); } #[test] fn query_test() { let adjacent = vec![ HashSet::from([0, 1, 2, 3]), HashSet::from([0, 4]), HashSet::from([0, 5, 6]), HashSet::from([0]), HashSet::from([1]), HashSet::from([2]), HashSet::from([2]), HashSet::from([7, 8]), HashSet::from([7]), ]; let mut dec_con1 = super::DecrementalConnectivity::new(adjacent.clone()).unwrap(); assert!(dec_con1.connected(3, 4).unwrap()); assert!(dec_con1.connected(5, 0).unwrap()); assert!(!dec_con1.connected(2, 7).unwrap()); assert!(dec_con1.connected(0, 9).is_none()); dec_con1.delete(0, 2); assert!(dec_con1.connected(3, 4).unwrap()); assert!(!dec_con1.connected(5, 0).unwrap()); assert!(dec_con1.connected(5, 6).unwrap()); assert!(dec_con1.connected(8, 7).unwrap()); dec_con1.delete(7, 8); assert!(!dec_con1.connected(8, 7).unwrap()); dec_con1.delete(1, 4); assert!(!dec_con1.connected(1, 4).unwrap()); let mut dec_con2 = super::DecrementalConnectivity::new(adjacent.clone()).unwrap(); dec_con2.delete(4, 1); assert!(!dec_con2.connected(1, 4).unwrap()); let mut dec_con3 = super::DecrementalConnectivity::new(adjacent).unwrap(); dec_con3.delete(1, 4); assert!(!dec_con3.connected(4, 1).unwrap()); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/strongly_connected_components.rs
src/graph/strongly_connected_components.rs
/* Tarjan's algorithm to find Strongly Connected Components (SCCs): It runs in O(n + m) (so it is optimal) and as a by-product, it returns the components in some (reverse) topologically sorted order. We assume that graph is represented using (compressed) adjacency matrix and its vertices are numbered from 1 to n. If this is not the case, one can use `src/graph/graph_enumeration.rs` to convert their graph. */ pub struct StronglyConnectedComponents { // The number of the SCC the vertex is in, starting from 1 pub component: Vec<usize>, // The discover time of the vertex with minimum discover time reachable // from this vertex. The MSB of the numbers are used to save whether the // vertex has been visited (but the MSBs are cleared after // the algorithm is done) pub state: Vec<u64>, // The total number of SCCs pub num_components: usize, // The stack of vertices that DFS has seen (used internally) stack: Vec<usize>, // Used internally during DFS to know the current discover time current_time: usize, } // Some functions to help with DRY and code readability const NOT_DONE: u64 = 1 << 63; #[inline] fn set_done(vertex_state: &mut u64) { *vertex_state ^= NOT_DONE; } #[inline] fn is_in_stack(vertex_state: u64) -> bool { vertex_state != 0 && (vertex_state & NOT_DONE) != 0 } #[inline] fn is_unvisited(vertex_state: u64) -> bool { vertex_state == NOT_DONE } #[inline] fn get_discover_time(vertex_state: u64) -> u64 { vertex_state ^ NOT_DONE } impl StronglyConnectedComponents { pub fn new(mut num_vertices: usize) -> Self { num_vertices += 1; // Vertices are numbered from 1, not 0 StronglyConnectedComponents { component: vec![0; num_vertices], state: vec![NOT_DONE; num_vertices], num_components: 0, stack: vec![], current_time: 1, } } fn dfs(&mut self, v: usize, adj: &[Vec<usize>]) -> u64 { let mut min_disc = self.current_time as u64; // self.state[v] = NOT_DONE + min_disc self.state[v] ^= min_disc; self.current_time += 1; self.stack.push(v); for &u in adj[v].iter() { if is_unvisited(self.state[u]) { min_disc = std::cmp::min(self.dfs(u, adj), min_disc); } else if is_in_stack(self.state[u]) { min_disc = std::cmp::min(get_discover_time(self.state[u]), min_disc); } } // No vertex with a lower discovery time is reachable from this one // So it should be "the head" of a new SCC. if min_disc == get_discover_time(self.state[v]) { self.num_components += 1; loop { let u = self.stack.pop().unwrap(); self.component[u] = self.num_components; set_done(&mut self.state[u]); if u == v { break; } } } min_disc } pub fn find_components(&mut self, adj: &[Vec<usize>]) { self.state[0] = 0; for v in 1..adj.len() { if is_unvisited(self.state[v]) { self.dfs(v, adj); } } } } #[cfg(test)] mod tests { use super::*; #[test] fn acyclic() { let mut sccs = StronglyConnectedComponents::new(5); let adj = vec![vec![], vec![2, 4], vec![3, 4], vec![5], vec![5], vec![]]; sccs.find_components(&adj); assert_eq!(sccs.component, vec![0, 5, 4, 2, 3, 1]); assert_eq!(sccs.state, vec![0, 1, 2, 3, 5, 4]); assert_eq!(sccs.num_components, 5); } #[test] fn cycle() { let mut sccs = StronglyConnectedComponents::new(4); let adj = vec![vec![], vec![2], vec![3], vec![4], vec![1]]; sccs.find_components(&adj); assert_eq!(sccs.component, vec![0, 1, 1, 1, 1]); assert_eq!(sccs.state, vec![0, 1, 2, 3, 4]); assert_eq!(sccs.num_components, 1); } #[test] fn dumbbell() { let mut sccs = StronglyConnectedComponents::new(6); let adj = vec![ vec![], vec![2], vec![3, 4], vec![1], vec![5], vec![6], vec![4], ]; sccs.find_components(&adj); assert_eq!(sccs.component, vec![0, 2, 2, 2, 1, 1, 1]); assert_eq!(sccs.state, vec![0, 1, 2, 3, 4, 5, 6]); assert_eq!(sccs.num_components, 2); } #[test] fn connected_dumbbell() { let mut sccs = StronglyConnectedComponents::new(6); let adj = vec![ vec![], vec![2], vec![3, 4], vec![1], vec![5, 1], vec![6], vec![4], ]; sccs.find_components(&adj); assert_eq!(sccs.component, vec![0, 1, 1, 1, 1, 1, 1]); assert_eq!(sccs.state, vec![0, 1, 2, 3, 4, 5, 6]); assert_eq!(sccs.num_components, 1); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/eulerian_path.rs
src/graph/eulerian_path.rs
//! This module provides functionality to find an Eulerian path in a directed graph. //! An Eulerian path visits every edge exactly once. The algorithm checks if an Eulerian //! path exists and, if so, constructs and returns the path. use std::collections::LinkedList; /// Finds an Eulerian path in a directed graph. /// /// # Arguments /// /// * `node_count` - The number of nodes in the graph. /// * `edge_list` - A vector of tuples representing directed edges, where each tuple is of the form `(start, end)`. /// /// # Returns /// /// An `Option<Vec<usize>>` containing the Eulerian path if it exists; otherwise, `None`. pub fn find_eulerian_path(node_count: usize, edge_list: Vec<(usize, usize)>) -> Option<Vec<usize>> { let mut adjacency_list = vec![Vec::new(); node_count]; for (start, end) in edge_list { adjacency_list[start].push(end); } let mut eulerian_solver = EulerianPathSolver::new(adjacency_list); eulerian_solver.find_path() } /// Struct to represent the solver for finding an Eulerian path in a directed graph. pub struct EulerianPathSolver { node_count: usize, edge_count: usize, in_degrees: Vec<usize>, out_degrees: Vec<usize>, eulerian_path: LinkedList<usize>, adjacency_list: Vec<Vec<usize>>, } impl EulerianPathSolver { /// Creates a new instance of `EulerianPathSolver`. /// /// # Arguments /// /// * `adjacency_list` - The graph represented as an adjacency list. /// /// # Returns /// /// A new instance of `EulerianPathSolver`. pub fn new(adjacency_list: Vec<Vec<usize>>) -> Self { Self { node_count: adjacency_list.len(), edge_count: 0, in_degrees: vec![0; adjacency_list.len()], out_degrees: vec![0; adjacency_list.len()], eulerian_path: LinkedList::new(), adjacency_list, } } /// Find the Eulerian path if it exists. /// /// # Returns /// /// An `Option<Vec<usize>>` containing the Eulerian path if found; otherwise, `None`. /// /// If multiple Eulerian paths exist, the one found will be returned, but it may not be unique. fn find_path(&mut self) -> Option<Vec<usize>> { self.initialize_degrees(); if !self.has_eulerian_path() { return None; } let start_node = self.get_start_node(); self.depth_first_search(start_node); if self.eulerian_path.len() != self.edge_count + 1 { return None; } let mut path = Vec::with_capacity(self.edge_count + 1); while let Some(node) = self.eulerian_path.pop_front() { path.push(node); } Some(path) } /// Initializes in-degrees and out-degrees for each node and counts total edges. fn initialize_degrees(&mut self) { for (start_node, neighbors) in self.adjacency_list.iter().enumerate() { for &end_node in neighbors { self.in_degrees[end_node] += 1; self.out_degrees[start_node] += 1; self.edge_count += 1; } } } /// Checks if an Eulerian path exists in the graph. /// /// # Returns /// /// `true` if an Eulerian path exists; otherwise, `false`. fn has_eulerian_path(&self) -> bool { if self.edge_count == 0 { return false; } let (mut start_nodes, mut end_nodes) = (0, 0); for i in 0..self.node_count { let (in_degree, out_degree) = (self.in_degrees[i] as isize, self.out_degrees[i] as isize); match out_degree - in_degree { 1 => start_nodes += 1, -1 => end_nodes += 1, degree_diff if degree_diff.abs() > 1 => return false, _ => (), } } (start_nodes == 0 && end_nodes == 0) || (start_nodes == 1 && end_nodes == 1) } /// Finds the starting node for the Eulerian path. /// /// # Returns /// /// The index of the starting node. fn get_start_node(&self) -> usize { for i in 0..self.node_count { if self.out_degrees[i] > self.in_degrees[i] { return i; } } (0..self.node_count) .find(|&i| self.out_degrees[i] > 0) .unwrap_or(0) } /// Performs depth-first search to construct the Eulerian path. /// /// # Arguments /// /// * `curr_node` - The current node being visited in the DFS traversal. fn depth_first_search(&mut self, curr_node: usize) { while self.out_degrees[curr_node] > 0 { let next_node = self.adjacency_list[curr_node][self.out_degrees[curr_node] - 1]; self.out_degrees[curr_node] -= 1; self.depth_first_search(next_node); } self.eulerian_path.push_front(curr_node); } } #[cfg(test)] mod tests { use super::*; macro_rules! test_cases { ($($name:ident: $test_case:expr,)*) => { $( #[test] fn $name() { let (n, edges, expected) = $test_case; assert_eq!(find_eulerian_path(n, edges), expected); } )* } } test_cases! { test_eulerian_cycle: ( 7, vec![ (1, 2), (1, 3), (2, 2), (2, 4), (2, 4), (3, 1), (3, 2), (3, 5), (4, 3), (4, 6), (5, 6), (6, 3) ], Some(vec![1, 3, 5, 6, 3, 2, 4, 3, 1, 2, 2, 4, 6]) ), test_simple_path: ( 5, vec![ (0, 1), (1, 2), (1, 4), (1, 3), (2, 1), (4, 1) ], Some(vec![0, 1, 4, 1, 2, 1, 3]) ), test_disconnected_graph: ( 4, vec![ (0, 1), (2, 3) ], None::<Vec<usize>> ), test_single_cycle: ( 4, vec![ (0, 1), (1, 2), (2, 3), (3, 0) ], Some(vec![0, 1, 2, 3, 0]) ), test_empty_graph: ( 3, vec![], None::<Vec<usize>> ), test_unbalanced_path: ( 3, vec![ (0, 1), (1, 2), (2, 0), (0, 2) ], Some(vec![0, 2, 0, 1, 2]) ), test_no_eulerian_path: ( 3, vec![ (0, 1), (0, 2) ], None::<Vec<usize>> ), test_complex_eulerian_path: ( 6, vec![ (0, 1), (1, 2), (2, 3), (3, 4), (4, 0), (0, 5), (5, 0), (2, 0) ], Some(vec![2, 0, 5, 0, 1, 2, 3, 4, 0]) ), test_single_node_self_loop: ( 1, vec![(0, 0)], Some(vec![0, 0]) ), test_complete_graph: ( 3, vec![ (0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1) ], Some(vec![0, 2, 1, 2, 0, 1, 0]) ), test_multiple_disconnected_components: ( 6, vec![ (0, 1), (2, 3), (4, 5) ], None::<Vec<usize>> ), test_unbalanced_graph_with_path: ( 4, vec![ (0, 1), (1, 2), (2, 3), (3, 1) ], Some(vec![0, 1, 2, 3, 1]) ), test_node_with_no_edges: ( 4, vec![ (0, 1), (1, 2) ], Some(vec![0, 1, 2]) ), test_multiple_edges_between_same_nodes: ( 3, vec![ (0, 1), (1, 2), (1, 2), (2, 0) ], Some(vec![1, 2, 0, 1, 2]) ), test_larger_graph_with_eulerian_path: ( 10, vec![ (0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, 6), (6, 7), (7, 8), (8, 9), (9, 0), (1, 6), (6, 3), (3, 8) ], Some(vec![1, 6, 3, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8]) ), test_no_edges_multiple_nodes: ( 5, vec![], None::<Vec<usize>> ), test_multiple_start_and_end_nodes: ( 4, vec![ (0, 1), (1, 2), (2, 0), (0, 2), (1, 3) ], None::<Vec<usize>> ), test_single_edge: ( 2, vec![(0, 1)], Some(vec![0, 1]) ), test_multiple_eulerian_paths: ( 4, vec![ (0, 1), (1, 2), (2, 0), (0, 3), (3, 0) ], Some(vec![0, 3, 0, 1, 2, 0]) ), test_dag_path: ( 4, vec![ (0, 1), (1, 2), (2, 3) ], Some(vec![0, 1, 2, 3]) ), test_parallel_edges_case: ( 2, vec![ (0, 1), (0, 1), (1, 0) ], Some(vec![0, 1, 0, 1]) ), } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/bipartite_matching.rs
src/graph/bipartite_matching.rs
// Adjacency List use std::collections::VecDeque; type Graph = Vec<Vec<usize>>; pub struct BipartiteMatching { pub adj: Graph, pub num_vertices_grp1: usize, pub num_vertices_grp2: usize, // mt1[i] = v is the matching of i in grp1 to v in grp2 pub mt1: Vec<i32>, pub mt2: Vec<i32>, pub used: Vec<bool>, } impl BipartiteMatching { pub fn new(num_vertices_grp1: usize, num_vertices_grp2: usize) -> Self { BipartiteMatching { adj: vec![vec![]; num_vertices_grp1 + 1], num_vertices_grp1, num_vertices_grp2, mt2: vec![-1; num_vertices_grp2 + 1], mt1: vec![-1; num_vertices_grp1 + 1], used: vec![false; num_vertices_grp1 + 1], } } #[inline] // Add an directed edge u->v in the graph pub fn add_edge(&mut self, u: usize, v: usize) { self.adj[u].push(v); } fn try_kuhn(&mut self, cur: usize) -> bool { if self.used[cur] { return false; } self.used[cur] = true; for i in 0..self.adj[cur].len() { let to = self.adj[cur][i]; if self.mt2[to] == -1 || self.try_kuhn(self.mt2[to] as usize) { self.mt2[to] = cur as i32; return true; } } false } // Note: It does not modify self.mt1, it only works on self.mt2 pub fn kuhn(&mut self) { self.mt2 = vec![-1; self.num_vertices_grp2 + 1]; for v in 1..=self.num_vertices_grp1 { self.used = vec![false; self.num_vertices_grp1 + 1]; self.try_kuhn(v); } } pub fn print_matching(&self) { for i in 1..=self.num_vertices_grp2 { if self.mt2[i] == -1 { continue; } println!("Vertex {} in grp1 matched with {} grp2", self.mt2[i], i) } } fn bfs(&self, dist: &mut [i32]) -> bool { let mut q = VecDeque::new(); for (u, d_i) in dist .iter_mut() .enumerate() .skip(1) .take(self.num_vertices_grp1) { if self.mt1[u] == 0 { // u is not matched *d_i = 0; q.push_back(u); } else { // else set the vertex distance as infinite because it is matched // this will be considered the next time *d_i = i32::MAX; } } dist[0] = i32::MAX; while !q.is_empty() { let u = *q.front().unwrap(); q.pop_front(); if dist[u] < dist[0] { for i in 0..self.adj[u].len() { let v = self.adj[u][i]; if dist[self.mt2[v] as usize] == i32::MAX { dist[self.mt2[v] as usize] = dist[u] + 1; q.push_back(self.mt2[v] as usize); } } } } dist[0] != i32::MAX } fn dfs(&mut self, u: i32, dist: &mut Vec<i32>) -> bool { if u == 0 { return true; } for i in 0..self.adj[u as usize].len() { let v = self.adj[u as usize][i]; if dist[self.mt2[v] as usize] == dist[u as usize] + 1 && self.dfs(self.mt2[v], dist) { self.mt2[v] = u; self.mt1[u as usize] = v as i32; return true; } } dist[u as usize] = i32::MAX; false } pub fn hopcroft_karp(&mut self) -> i32 { // NOTE: how to use: https://cses.fi/paste/7558dba8d00436a847eab8/ self.mt2 = vec![0; self.num_vertices_grp2 + 1]; self.mt1 = vec![0; self.num_vertices_grp1 + 1]; let mut dist = vec![i32::MAX; self.num_vertices_grp1 + 1]; let mut res = 0; while self.bfs(&mut dist) { for u in 1..=self.num_vertices_grp1 { if self.mt1[u] == 0 && self.dfs(u as i32, &mut dist) { res += 1; } } } // for x in self.mt2 change x to -1 if it is 0 for x in self.mt2.iter_mut() { if *x == 0 { *x = -1; } } for x in self.mt1.iter_mut() { if *x == 0 { *x = -1; } } res } } #[cfg(test)] mod tests { use super::*; #[test] fn small_graph_kuhn() { let n1 = 6; let n2 = 6; let mut g = BipartiteMatching::new(n1, n2); // vertex 1 in grp1 to vertex 1 in grp 2 // denote the ith grp2 vertex as n1+i g.add_edge(1, 2); g.add_edge(1, 3); // 2 is not connected to any vertex g.add_edge(3, 4); g.add_edge(3, 1); g.add_edge(4, 3); g.add_edge(5, 3); g.add_edge(5, 4); g.add_edge(6, 6); g.kuhn(); g.print_matching(); let answer: Vec<i32> = vec![-1, 2, -1, 1, 3, 4, 6]; for i in 1..g.mt2.len() { if g.mt2[i] == -1 { // 5 in group2 has no pair assert_eq!(i, 5); continue; } // 2 in group1 has no pair assert!(g.mt2[i] != 2); assert_eq!(i as i32, answer[g.mt2[i] as usize]); } } #[test] fn small_graph_hopcroft() { let n1 = 6; let n2 = 6; let mut g = BipartiteMatching::new(n1, n2); // vertex 1 in grp1 to vertex 1 in grp 2 // denote the ith grp2 vertex as n1+i g.add_edge(1, 2); g.add_edge(1, 3); // 2 is not connected to any vertex g.add_edge(3, 4); g.add_edge(3, 1); g.add_edge(4, 3); g.add_edge(5, 3); g.add_edge(5, 4); g.add_edge(6, 6); let x = g.hopcroft_karp(); assert_eq!(x, 5); g.print_matching(); let answer: Vec<i32> = vec![-1, 2, -1, 1, 3, 4, 6]; for i in 1..g.mt2.len() { if g.mt2[i] == -1 { // 5 in group2 has no pair assert_eq!(i, 5); continue; } // 2 in group1 has no pair assert!(g.mt2[i] != 2); assert_eq!(i as i32, answer[g.mt2[i] as usize]); } } #[test] fn super_small_graph_kuhn() { let n1 = 1; let n2 = 1; let mut g = BipartiteMatching::new(n1, n2); g.add_edge(1, 1); g.kuhn(); g.print_matching(); assert_eq!(g.mt2[1], 1); } #[test] fn super_small_graph_hopcroft() { let n1 = 1; let n2 = 1; let mut g = BipartiteMatching::new(n1, n2); g.add_edge(1, 1); let x = g.hopcroft_karp(); assert_eq!(x, 1); g.print_matching(); assert_eq!(g.mt2[1], 1); assert_eq!(g.mt1[1], 1); } #[test] fn only_one_vertex_graph_kuhn() { let n1 = 10; let n2 = 10; let mut g = BipartiteMatching::new(n1, n2); g.add_edge(1, 1); g.add_edge(2, 1); g.add_edge(3, 1); g.add_edge(4, 1); g.add_edge(5, 1); g.add_edge(6, 1); g.add_edge(7, 1); g.add_edge(8, 1); g.add_edge(9, 1); g.add_edge(10, 1); g.kuhn(); g.print_matching(); assert_eq!(g.mt2[1], 1); for i in 2..g.mt2.len() { assert!(g.mt2[i] == -1); } } #[test] fn only_one_vertex_graph_hopcroft() { let n1 = 10; let n2 = 10; let mut g = BipartiteMatching::new(n1, n2); g.add_edge(1, 1); g.add_edge(2, 1); g.add_edge(3, 1); g.add_edge(4, 1); g.add_edge(5, 1); g.add_edge(6, 1); g.add_edge(7, 1); g.add_edge(8, 1); g.add_edge(9, 1); g.add_edge(10, 1); let x = g.hopcroft_karp(); assert_eq!(x, 1); g.print_matching(); assert_eq!(g.mt2[1], 1); for i in 2..g.mt2.len() { assert!(g.mt2[i] == -1); } } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/topological_sort.rs
src/graph/topological_sort.rs
use std::collections::HashMap; use std::collections::VecDeque; use std::hash::Hash; #[derive(Debug, Eq, PartialEq)] pub enum TopoligicalSortError { CycleDetected, } type TopologicalSortResult<Node> = Result<Vec<Node>, TopoligicalSortError>; /// Given a directed graph, modeled as a list of edges from source to destination /// Uses Kahn's algorithm to either: /// return the topological sort of the graph /// or detect if there's any cycle pub fn topological_sort<Node: Hash + Eq + Copy>( edges: &Vec<(Node, Node)>, ) -> TopologicalSortResult<Node> { // Preparation: // Build a map of edges, organised from source to destinations // Also, count the number of incoming edges by node let mut edges_by_source: HashMap<Node, Vec<Node>> = HashMap::default(); let mut incoming_edges_count: HashMap<Node, usize> = HashMap::default(); for (source, destination) in edges { incoming_edges_count.entry(*source).or_insert(0); // if we haven't seen this node yet, mark it as having 0 incoming nodes edges_by_source // add destination to the list of outgoing edges from source .entry(*source) .or_default() .push(*destination); // then make destination have one more incoming edge *incoming_edges_count.entry(*destination).or_insert(0) += 1; } // Now Kahn's algorithm: // Add nodes that have no incoming edges to a queue let mut no_incoming_edges_q = VecDeque::default(); for (node, count) in &incoming_edges_count { if *count == 0 { no_incoming_edges_q.push_back(*node); } } // For each node in this "O-incoming-edge-queue" let mut sorted = Vec::default(); while let Some(no_incoming_edges) = no_incoming_edges_q.pop_back() { sorted.push(no_incoming_edges); // since the node has no dependency, it can be safely pushed to the sorted result incoming_edges_count.remove(&no_incoming_edges); // For each node having this one as dependency for neighbour in edges_by_source.get(&no_incoming_edges).unwrap_or(&vec![]) { if let Some(count) = incoming_edges_count.get_mut(neighbour) { *count -= 1; // decrement the count of incoming edges for the dependent node if *count == 0 { // `node` was the last node `neighbour` was dependent on incoming_edges_count.remove(neighbour); // let's remove it from the map, so that we can know if we covered the whole graph no_incoming_edges_q.push_front(*neighbour); // it has no incoming edges anymore => push it to the queue } } } } if incoming_edges_count.is_empty() { // we have visited every node Ok(sorted) } else { // some nodes haven't been visited, meaning there's a cycle in the graph Err(TopoligicalSortError::CycleDetected) } } #[cfg(test)] mod tests { use super::topological_sort; use crate::graph::topological_sort::TopoligicalSortError; fn is_valid_sort<Node: Eq>(sorted: &[Node], graph: &[(Node, Node)]) -> bool { for (source, dest) in graph { let source_pos = sorted.iter().position(|node| node == source); let dest_pos = sorted.iter().position(|node| node == dest); match (source_pos, dest_pos) { (Some(src), Some(dst)) if src < dst => {} _ => { return false; } }; } true } #[test] fn it_works() { let graph = vec![(1, 2), (1, 3), (2, 3), (3, 4), (4, 5), (5, 6), (6, 7)]; let sort = topological_sort(&graph); assert!(sort.is_ok()); let sort = sort.unwrap(); assert!(is_valid_sort(&sort, &graph)); assert_eq!(sort, vec![1, 2, 3, 4, 5, 6, 7]); } #[test] fn test_wikipedia_example() { let graph = vec![ (5, 11), (7, 11), (7, 8), (3, 8), (3, 10), (11, 2), (11, 9), (11, 10), (8, 9), ]; let sort = topological_sort(&graph); assert!(sort.is_ok()); let sort = sort.unwrap(); assert!(is_valid_sort(&sort, &graph)); } #[test] fn test_cyclic_graph() { let graph = vec![(1, 2), (2, 3), (3, 4), (4, 5), (4, 2)]; let sort = topological_sort(&graph); assert!(sort.is_err()); assert_eq!(sort.err().unwrap(), TopoligicalSortError::CycleDetected); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/heavy_light_decomposition.rs
src/graph/heavy_light_decomposition.rs
/* Heavy Light Decomposition: It partitions a tree into disjoint paths such that: 1. Each path is a part of some leaf's path to root 2. The number of paths from any vertex to the root is of O(lg(n)) Such a decomposition can be used to answer many types of queries about vertices or edges on a particular path. It is often used with some sort of binary tree to handle different operations on the paths, for example segment tree or fenwick tree. Many members of this struct are made public, because they can either be supplied by the developer, or can be useful for other parts of the code. The implementation assumes that the tree vertices are numbered from 1 to n and it is represented using (compressed) adjacency matrix. If this is not true, maybe `graph_enumeration.rs` can help. */ type Adj = [Vec<usize>]; pub struct HeavyLightDecomposition { // Each vertex is assigned a number from 1 to n. For `v` and `u` such that // u is parent of v, and both are in path `p`, it is true that: // position[u] = position[v] - 1 pub position: Vec<usize>, // The first (closest to root) vertex of the path containing each vertex pub head: Vec<usize>, // The "heaviest" child of each vertex, its subtree is at least as big as // the other ones. If `v` is a leaf, big_child[v] = 0 pub big_child: Vec<usize>, // Used internally to fill `position` Vec current_position: usize, } impl HeavyLightDecomposition { pub fn new(mut num_vertices: usize) -> Self { num_vertices += 1; HeavyLightDecomposition { position: vec![0; num_vertices], head: vec![0; num_vertices], big_child: vec![0; num_vertices], current_position: 1, } } fn dfs(&mut self, v: usize, parent: usize, adj: &Adj) -> usize { let mut big_child = 0usize; let mut bc_size = 0usize; // big child size let mut subtree_size = 1usize; // size of this subtree for &u in adj[v].iter() { if u == parent { continue; } let u_size = self.dfs(u, v, adj); subtree_size += u_size; if u_size > bc_size { big_child = u; bc_size = u_size; } } self.big_child[v] = big_child; subtree_size } pub fn decompose(&mut self, root: usize, adj: &Adj) { self.current_position = 1; self.dfs(root, 0, adj); self.decompose_path(root, 0, root, adj); } fn decompose_path(&mut self, v: usize, parent: usize, head: usize, adj: &Adj) { self.head[v] = head; self.position[v] = self.current_position; self.current_position += 1; let bc = self.big_child[v]; if bc != 0 { // Continue this path self.decompose_path(bc, v, head, adj); } for &u in adj[v].iter() { if u == parent || u == bc { continue; } // Start a new path self.decompose_path(u, v, u, adj); } } } #[cfg(test)] mod tests { use super::*; struct LinearCongruenceGenerator { // modulus as 2 ^ 32 multiplier: u32, increment: u32, state: u32, } impl LinearCongruenceGenerator { fn new(multiplier: u32, increment: u32, state: u32) -> Self { Self { multiplier, increment, state, } } fn next(&mut self) -> u32 { self.state = (self.multiplier as u64 * self.state as u64 + self.increment as u64) as u32; self.state } } fn get_num_paths( hld: &HeavyLightDecomposition, mut v: usize, parent: &[usize], ) -> (usize, usize) { // Return height and number of paths let mut ans = 0usize; let mut height = 0usize; let mut prev_head = 0usize; loop { height += 1; let head = hld.head[v]; if head != prev_head { ans += 1; prev_head = head; } v = parent[v]; if v == 0 { break; } } (ans, height) } #[test] fn single_path() { let mut adj = vec![vec![], vec![2], vec![3], vec![4], vec![5], vec![6], vec![]]; let mut hld = HeavyLightDecomposition::new(6); hld.decompose(1, &adj); assert_eq!(hld.head, vec![0, 1, 1, 1, 1, 1, 1]); assert_eq!(hld.position, vec![0, 1, 2, 3, 4, 5, 6]); assert_eq!(hld.big_child, vec![0, 2, 3, 4, 5, 6, 0]); adj[3].push(2); adj[2].push(1); hld.decompose(3, &adj); assert_eq!(hld.head, vec![0, 2, 2, 3, 3, 3, 3]); assert_eq!(hld.position, vec![0, 6, 5, 1, 2, 3, 4]); assert_eq!(hld.big_child, vec![0, 0, 1, 4, 5, 6, 0]); } #[test] fn random_tree() { // Let it have 1e4 vertices. It should finish under 100ms even with // 1e5 vertices let n = 1e4 as usize; let threshold = 14; // 2 ^ 14 = 16384 > n let mut adj: Vec<Vec<usize>> = vec![vec![]; n + 1]; let mut parent: Vec<usize> = vec![0; n + 1]; let mut hld = HeavyLightDecomposition::new(n); let mut lcg = LinearCongruenceGenerator::new(1103515245, 12345, 314); parent[2] = 1; adj[1].push(2); #[allow(clippy::needless_range_loop)] for i in 3..=n { // randomly determine the parent of each vertex. // There will be modulus bias, but it isn't important let par_max = i - 1; let par_min = (10 * par_max + 1) / 11; // Bring par_min closer to par_max to increase expected tree height let par = (lcg.next() as usize % (par_max - par_min + 1)) + par_min; adj[par].push(i); parent[i] = par; } // let's get a few leaves let leaves: Vec<usize> = (1..=n) .rev() .filter(|&v| adj[v].is_empty()) .take(100) .collect(); hld.decompose(1, &adj); for l in leaves { let (p, _h) = get_num_paths(&hld, l, &parent); assert!(p <= threshold); } } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/depth_first_search_tic_tac_toe.rs
src/graph/depth_first_search_tic_tac_toe.rs
/* Tic-Tac-Toe Depth First Search Rust Demo Copyright 2021 David V. Makray Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #[allow(unused_imports)] use std::io; //Interactive Tic-Tac-Toe play needs the "rand = "0.8.3" crate. //#[cfg(not(test))] //extern crate rand; //#[cfg(not(test))] //use rand::Rng; #[derive(Copy, Clone, PartialEq, Eq, Debug)] struct Position { x: u8, y: u8, } #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum Players { Blank, PlayerX, PlayerO, } #[derive(Copy, Clone, PartialEq, Eq, Debug)] struct SinglePlayAction { position: Position, side: Players, } #[derive(Clone, PartialEq, Eq, Debug)] pub struct PlayActions { positions: Vec<Position>, side: Players, } #[allow(dead_code)] #[cfg(not(test))] fn main() { let mut board = vec![vec![Players::Blank; 3]; 3]; while !available_positions(&board).is_empty() && !win_check(Players::PlayerX, &board) && !win_check(Players::PlayerO, &board) { display_board(&board); println!("Type in coordinate for X mark to be played. ie. a1 etc."); let mut input = String::new(); io::stdin() .read_line(&mut input) .expect("Failed to read line"); let mut move_position: Option<Position> = None; input.make_ascii_lowercase(); let bytes = input.trim().trim_start().as_bytes(); if bytes.len() as u32 == 2 && (bytes[0] as char).is_alphabetic() && (bytes[1] as char).is_numeric() { let column: u8 = bytes[0] - b'a'; let row: u8 = bytes[1] - b'1'; if column <= 2 && row <= 2 { move_position = Some(Position { x: column, y: row }); } } //Take the validated user input coordinate and use it. if let Some(move_pos) = move_position { let open_positions = available_positions(&board); let mut search = open_positions.iter(); let result = search.find(|&&x| x == move_pos); if result.is_none() { println!("Not a valid empty coordinate."); continue; } board[move_pos.y as usize][move_pos.x as usize] = Players::PlayerX; if win_check(Players::PlayerX, &board) { display_board(&board); println!("Player X Wins!"); return; } //Find the best game plays from the current board state let recusion_result = minimax(Players::PlayerO, &board); match recusion_result { Some(x) => { //Interactive Tic-Tac-Toe play needs the "rand = "0.8.3" crate. //#[cfg(not(test))] //let random_selection = rand::rng().gen_range(0..x.positions.len()); let random_selection = 0; let response_pos = x.positions[random_selection]; board[response_pos.y as usize][response_pos.x as usize] = Players::PlayerO; if win_check(Players::PlayerO, &board) { display_board(&board); println!("Player O Wins!"); return; } } None => { display_board(&board); println!("Draw game."); return; } } } } } #[allow(dead_code)] fn display_board(board: &[Vec<Players>]) { println!(); for (y, board_row) in board.iter().enumerate() { print!("{} ", (y + 1)); for board_cell in board_row { match board_cell { Players::PlayerX => print!("X "), Players::PlayerO => print!("O "), Players::Blank => print!("_ "), } } println!(); } println!(" a b c"); } fn available_positions(board: &[Vec<Players>]) -> Vec<Position> { let mut available: Vec<Position> = Vec::new(); for (y, board_row) in board.iter().enumerate() { for (x, board_cell) in board_row.iter().enumerate() { if *board_cell == Players::Blank { available.push(Position { x: x as u8, y: y as u8, }); } } } available } fn win_check(player: Players, board: &[Vec<Players>]) -> bool { if player == Players::Blank { return false; } //Check for a win on the diagonals. if (board[0][0] == board[1][1]) && (board[1][1] == board[2][2]) && (board[2][2] == player) || (board[2][0] == board[1][1]) && (board[1][1] == board[0][2]) && (board[0][2] == player) { return true; } for i in 0..3 { //Check for a win on the horizontals. if (board[i][0] == board[i][1]) && (board[i][1] == board[i][2]) && (board[i][2] == player) { return true; } //Check for a win on the verticals. if (board[0][i] == board[1][i]) && (board[1][i] == board[2][i]) && (board[2][i] == player) { return true; } } false } //Minimize the actions of the opponent while maximizing the game state of the current player. pub fn minimax(side: Players, board: &[Vec<Players>]) -> Option<PlayActions> { //Check that board is in a valid state. if win_check(Players::PlayerX, board) || win_check(Players::PlayerO, board) { return None; } let opposite = match side { Players::PlayerX => Players::PlayerO, Players::PlayerO => Players::PlayerX, Players::Blank => panic!("Minimax can't operate when a player isn't specified."), }; let positions = available_positions(board); if positions.is_empty() { return None; } //Play position let mut best_move: Option<PlayActions> = None; for pos in positions { let mut board_next = board.to_owned(); board_next[pos.y as usize][pos.x as usize] = side; //Check for a win condition before recursion to determine if this node is terminal. if win_check(Players::PlayerX, &board_next) { append_playaction( side, &mut best_move, SinglePlayAction { position: pos, side: Players::PlayerX, }, ); continue; } if win_check(Players::PlayerO, &board_next) { append_playaction( side, &mut best_move, SinglePlayAction { position: pos, side: Players::PlayerO, }, ); continue; } let result = minimax(opposite, &board_next); let current_score = match result { Some(x) => x.side, _ => Players::Blank, }; append_playaction( side, &mut best_move, SinglePlayAction { position: pos, side: current_score, }, ) } best_move } //Promote only better or collate equally scored game plays fn append_playaction( current_side: Players, opt_play_actions: &mut Option<PlayActions>, appendee: SinglePlayAction, ) { if opt_play_actions.is_none() { *opt_play_actions = Some(PlayActions { positions: vec![appendee.position], side: appendee.side, }); return; } let play_actions = opt_play_actions.as_mut().unwrap(); //New game action is scored from the current side and the current saved best score against the new game action. match (current_side, play_actions.side, appendee.side) { (Players::Blank, _, _) => panic!("Unreachable state."), //Winning scores (Players::PlayerX, Players::PlayerX, Players::PlayerX) | (Players::PlayerO, Players::PlayerO, Players::PlayerO) => { play_actions.positions.push(appendee.position); } //Non-winning to Winning scores (Players::PlayerX, _, Players::PlayerX) => { play_actions.side = Players::PlayerX; play_actions.positions.clear(); play_actions.positions.push(appendee.position); } (Players::PlayerO, _, Players::PlayerO) => { play_actions.side = Players::PlayerO; play_actions.positions.clear(); play_actions.positions.push(appendee.position); } //Losing to Neutral scores (Players::PlayerX, Players::PlayerO, Players::Blank) | (Players::PlayerO, Players::PlayerX, Players::Blank) => { play_actions.side = Players::Blank; play_actions.positions.clear(); play_actions.positions.push(appendee.position); } //Ignoring lower scored plays (Players::PlayerX, Players::PlayerX, _) | (Players::PlayerO, Players::PlayerO, _) | (Players::PlayerX, Players::Blank, Players::PlayerO) | (Players::PlayerO, Players::Blank, Players::PlayerX) => {} //No change hence append only (_, _, _) => { assert!(play_actions.side == appendee.side); play_actions.positions.push(appendee.position); } } } #[cfg(test)] mod test { use super::*; #[test] fn win_state_check() { let mut board = vec![vec![Players::Blank; 3]; 3]; board[0][0] = Players::PlayerX; board[0][1] = Players::PlayerX; board[0][2] = Players::PlayerX; let responses = minimax(Players::PlayerO, &board); assert_eq!(responses, None); } #[test] fn win_state_check2() { let mut board = vec![vec![Players::Blank; 3]; 3]; board[0][0] = Players::PlayerX; board[0][1] = Players::PlayerO; board[1][0] = Players::PlayerX; board[1][1] = Players::PlayerO; board[2][1] = Players::PlayerO; let responses = minimax(Players::PlayerO, &board); assert_eq!(responses, None); } #[test] fn block_win_move() { let mut board = vec![vec![Players::Blank; 3]; 3]; board[0][0] = Players::PlayerX; board[0][1] = Players::PlayerX; board[1][2] = Players::PlayerO; board[2][2] = Players::PlayerO; let responses = minimax(Players::PlayerX, &board); assert_eq!( responses, Some(PlayActions { positions: vec![Position { x: 2, y: 0 }], side: Players::PlayerX }) ); } #[test] fn block_move() { let mut board = vec![vec![Players::Blank; 3]; 3]; board[0][1] = Players::PlayerX; board[0][2] = Players::PlayerO; board[2][0] = Players::PlayerO; let responses = minimax(Players::PlayerX, &board); assert_eq!( responses, Some(PlayActions { positions: vec![Position { x: 1, y: 1 }], side: Players::Blank }) ); } #[test] fn expected_loss() { let mut board = vec![vec![Players::Blank; 3]; 3]; board[0][0] = Players::PlayerX; board[0][2] = Players::PlayerO; board[1][0] = Players::PlayerX; board[2][0] = Players::PlayerO; board[2][2] = Players::PlayerO; let responses = minimax(Players::PlayerX, &board); assert_eq!( responses, Some(PlayActions { positions: vec![ Position { x: 1, y: 0 }, Position { x: 1, y: 1 }, Position { x: 2, y: 1 }, Position { x: 1, y: 2 } ], side: Players::PlayerO }) ); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/graph/disjoint_set_union.rs
src/graph/disjoint_set_union.rs
//! This module implements the Disjoint Set Union (DSU), also known as Union-Find, //! which is an efficient data structure for keeping track of a set of elements //! partitioned into disjoint (non-overlapping) subsets. /// Represents a node in the Disjoint Set Union (DSU) structure which /// keep track of the parent-child relationships in the disjoint sets. pub struct DSUNode { /// The index of the node's parent, or itself if it's the root. parent: usize, /// The size of the set rooted at this node, used for union by size. size: usize, } /// Disjoint Set Union (Union-Find) data structure, particularly useful for /// managing dynamic connectivity problems such as determining /// if two elements are in the same subset or merging two subsets. pub struct DisjointSetUnion { /// List of DSU nodes where each element's parent and size are tracked. nodes: Vec<DSUNode>, } impl DisjointSetUnion { /// Initializes `n + 1` disjoint sets, each element is its own parent. /// /// # Parameters /// /// - `n`: The number of elements to manage (`0` to `n` inclusive). /// /// # Returns /// /// A new instance of `DisjointSetUnion` with `n + 1` independent sets. pub fn new(num_elements: usize) -> DisjointSetUnion { let mut nodes = Vec::with_capacity(num_elements + 1); for idx in 0..=num_elements { nodes.push(DSUNode { parent: idx, size: 1, }); } Self { nodes } } /// Finds the representative (root) of the set containing `element` with path compression. /// /// Path compression ensures that future queries are faster by directly linking /// all nodes in the path to the root. /// /// # Parameters /// /// - `element`: The element whose set representative is being found. /// /// # Returns /// /// The root representative of the set containing `element`. pub fn find_set(&mut self, element: usize) -> usize { if element != self.nodes[element].parent { self.nodes[element].parent = self.find_set(self.nodes[element].parent); } self.nodes[element].parent } /// Merges the sets containing `first_elem` and `sec_elem` using union by size. /// /// The smaller set is always attached to the root of the larger set to ensure balanced trees. /// /// # Parameters /// /// - `first_elem`: The first element whose set is to be merged. /// - `sec_elem`: The second element whose set is to be merged. /// /// # Returns /// /// The root of the merged set, or `usize::MAX` if both elements are already in the same set. pub fn merge(&mut self, first_elem: usize, sec_elem: usize) -> usize { let mut first_root = self.find_set(first_elem); let mut sec_root = self.find_set(sec_elem); if first_root == sec_root { // Already in the same set, no merge required return usize::MAX; } // Union by size: attach the smaller tree under the larger tree if self.nodes[first_root].size < self.nodes[sec_root].size { std::mem::swap(&mut first_root, &mut sec_root); } self.nodes[sec_root].parent = first_root; self.nodes[first_root].size += self.nodes[sec_root].size; first_root } } #[cfg(test)] mod tests { use super::*; #[test] fn test_disjoint_set_union() { let mut dsu = DisjointSetUnion::new(10); dsu.merge(1, 2); dsu.merge(2, 3); dsu.merge(1, 9); dsu.merge(4, 5); dsu.merge(7, 8); dsu.merge(4, 8); dsu.merge(6, 9); assert_eq!(dsu.find_set(1), dsu.find_set(2)); assert_eq!(dsu.find_set(1), dsu.find_set(3)); assert_eq!(dsu.find_set(1), dsu.find_set(6)); assert_eq!(dsu.find_set(1), dsu.find_set(9)); assert_eq!(dsu.find_set(4), dsu.find_set(5)); assert_eq!(dsu.find_set(4), dsu.find_set(7)); assert_eq!(dsu.find_set(4), dsu.find_set(8)); assert_ne!(dsu.find_set(1), dsu.find_set(10)); assert_ne!(dsu.find_set(4), dsu.find_set(10)); dsu.merge(3, 4); assert_eq!(dsu.find_set(1), dsu.find_set(2)); assert_eq!(dsu.find_set(1), dsu.find_set(3)); assert_eq!(dsu.find_set(1), dsu.find_set(6)); assert_eq!(dsu.find_set(1), dsu.find_set(9)); assert_eq!(dsu.find_set(1), dsu.find_set(4)); assert_eq!(dsu.find_set(1), dsu.find_set(5)); assert_eq!(dsu.find_set(1), dsu.find_set(7)); assert_eq!(dsu.find_set(1), dsu.find_set(8)); assert_ne!(dsu.find_set(1), dsu.find_set(10)); dsu.merge(10, 1); assert_eq!(dsu.find_set(10), dsu.find_set(1)); assert_eq!(dsu.find_set(10), dsu.find_set(2)); assert_eq!(dsu.find_set(10), dsu.find_set(3)); assert_eq!(dsu.find_set(10), dsu.find_set(4)); assert_eq!(dsu.find_set(10), dsu.find_set(5)); assert_eq!(dsu.find_set(10), dsu.find_set(6)); assert_eq!(dsu.find_set(10), dsu.find_set(7)); assert_eq!(dsu.find_set(10), dsu.find_set(8)); assert_eq!(dsu.find_set(10), dsu.find_set(9)); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/math/relu.rs
src/math/relu.rs
//Rust implementation of the ReLU (rectified linear unit) activation function. //The formula for ReLU is quite simple really: (if x>0 -> x, else -> 0) //More information on the concepts of ReLU can be found here: //https://en.wikipedia.org/wiki/Rectifier_(neural_networks) //The function below takes a reference to a mutable <f32> Vector as an argument //and returns the vector with 'ReLU' applied to all values. //Of course, these functions can be changed by the developer so that the input vector isn't manipulated. //This is simply an implemenation of the formula. pub fn relu(array: &mut Vec<f32>) -> &mut Vec<f32> { //note that these calculations are assuming the Vector values consists of real numbers in radians for value in &mut *array { if value <= &mut 0. { *value = 0.; } } array } #[cfg(test)] mod tests { use super::*; #[test] fn test_relu() { let mut test: Vec<f32> = Vec::from([1.0, 0.5, -1.0, 0.0, 0.3]); assert_eq!( relu(&mut test), &mut Vec::<f32>::from([1.0, 0.5, 0.0, 0.0, 0.3]) ); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/math/exponential_linear_unit.rs
src/math/exponential_linear_unit.rs
//! # Exponential Linear Unit (ELU) Function //! //! The `exponential_linear_unit` function computes the Exponential Linear Unit (ELU) values of a given vector //! of f64 numbers with a specified alpha parameter. //! //! The ELU activation function is commonly used in neural networks as an alternative to the Leaky ReLU function. //! It introduces a small negative slope (controlled by the alpha parameter) for the negative input values and has //! an exponential growth for positive values, which can help mitigate the vanishing gradient problem. //! //! ## Formula //! //! For a given input vector `x` and an alpha parameter `alpha`, the ELU function computes the output //! `y` as follows: //! //! `y_i = { x_i if x_i >= 0, alpha * (e^x_i - 1) if x_i < 0 }` //! //! Where `e` is the mathematical constant (approximately 2.71828). //! //! ## Exponential Linear Unit (ELU) Function Implementation //! //! This implementation takes a reference to a vector of f64 values and an alpha parameter, and returns a new //! vector with the ELU transformation applied to each element. The input vector is not altered. //! use std::f64::consts::E; pub fn exponential_linear_unit(vector: &Vec<f64>, alpha: f64) -> Vec<f64> { let mut _vector = vector.to_owned(); for value in &mut _vector { if value < &mut 0. { *value *= alpha * (E.powf(*value) - 1.); } } _vector } #[cfg(test)] mod tests { use super::*; #[test] fn test_exponential_linear_unit() { let test_vector = vec![-10., 2., -3., 4., -5., 10., 0.05]; let alpha = 0.01; assert_eq!( exponential_linear_unit(&test_vector, alpha), vec![ 0.09999546000702375, 2.0, 0.028506387948964082, 4.0, 0.049663102650045726, 10.0, 0.05 ] ); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/math/average.rs
src/math/average.rs
#[doc = "# Average Mean, Median, and Mode, in mathematics, the three principal ways of designating the average value of a list of numbers. The arithmetic mean is found by adding the numbers and dividing the sum by the number of numbers in the list. This is what is most often meant by an average. The median is the middle value in a list ordered from smallest to largest. The mode is the most frequently occurring value on the list. Reference: https://www.britannica.com/science/mean-median-and-mode This program approximates the mean, median and mode of a finite sequence. Note: Floats sequences are not allowed for `mode` function. "] use std::collections::HashMap; use std::collections::HashSet; use num_traits::Num; fn sum<T: Num + Copy>(sequence: Vec<T>) -> T { sequence.iter().fold(T::zero(), |acc, x| acc + *x) } /// # Argument /// /// * `sequence` - A vector of numbers. /// Returns mean of `sequence`. pub fn mean<T: Num + Copy + num_traits::FromPrimitive>(sequence: Vec<T>) -> Option<T> { let len = sequence.len(); if len == 0 { return None; } Some(sum(sequence) / (T::from_usize(len).unwrap())) } fn mean_of_two<T: Num + Copy>(a: T, b: T) -> T { (a + b) / (T::one() + T::one()) } /// # Argument /// /// * `sequence` - A vector of numbers. /// Returns median of `sequence`. pub fn median<T: Num + Copy + PartialOrd>(mut sequence: Vec<T>) -> Option<T> { if sequence.is_empty() { return None; } sequence.sort_by(|a, b| a.partial_cmp(b).unwrap()); if sequence.len() % 2 == 1 { let k = (sequence.len() + 1) / 2; Some(sequence[k - 1]) } else { let j = (sequence.len()) / 2; Some(mean_of_two(sequence[j - 1], sequence[j])) } } fn histogram<T: Eq + std::hash::Hash>(sequence: Vec<T>) -> HashMap<T, usize> { sequence.into_iter().fold(HashMap::new(), |mut res, val| { *res.entry(val).or_insert(0) += 1; res }) } /// # Argument /// /// * `sequence` - The input vector. /// Returns mode of `sequence`. pub fn mode<T: Eq + std::hash::Hash>(sequence: Vec<T>) -> Option<HashSet<T>> { if sequence.is_empty() { return None; } let hist = histogram(sequence); let max_count = *hist.values().max().unwrap(); Some( hist.into_iter() .filter(|(_, count)| *count == max_count) .map(|(value, _)| value) .collect(), ) } #[cfg(test)] mod test { use super::*; #[test] fn median_test() { assert_eq!(median(vec![4, 53, 2, 1, 9, 0, 2, 3, 6]).unwrap(), 3); assert_eq!(median(vec![-9, -8, 0, 1, 2, 2, 3, 4, 6, 9, 53]).unwrap(), 2); assert_eq!(median(vec![2, 3]).unwrap(), 2); assert_eq!(median(vec![3.0, 2.0]).unwrap(), 2.5); assert_eq!(median(vec![1.0, 700.0, 5.0]).unwrap(), 5.0); assert!(median(Vec::<i32>::new()).is_none()); assert!(median(Vec::<f64>::new()).is_none()); } #[test] fn mode_test() { assert_eq!( mode(vec![4, 53, 2, 1, 9, 0, 2, 3, 6]).unwrap(), HashSet::from([2]) ); assert_eq!( mode(vec![-9, -8, 0, 1, 2, 2, 3, -1, -1, 9, -1, -9]).unwrap(), HashSet::from([-1]) ); assert_eq!(mode(vec!["a", "b", "a"]).unwrap(), HashSet::from(["a"])); assert_eq!(mode(vec![1, 2, 2, 1]).unwrap(), HashSet::from([1, 2])); assert_eq!(mode(vec![1, 2, 2, 1, 3]).unwrap(), HashSet::from([1, 2])); assert_eq!(mode(vec![1]).unwrap(), HashSet::from([1])); assert!(mode(Vec::<i32>::new()).is_none()); } #[test] fn mean_test() { assert_eq!(mean(vec![2023.1112]).unwrap(), 2023.1112); assert_eq!(mean(vec![0.0, 1.0, 2.0, 3.0, 4.0]).unwrap(), 2.0); assert_eq!( mean(vec![-7.0, 4.0, 53.0, 2.0, 1.0, -9.0, 0.0, 2.0, 3.0, -6.0]).unwrap(), 4.3 ); assert_eq!(mean(vec![1, 2]).unwrap(), 1); assert!(mean(Vec::<f64>::new()).is_none()); assert!(mean(Vec::<i32>::new()).is_none()); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/math/interquartile_range.rs
src/math/interquartile_range.rs
// Author : cyrixninja // Interquartile Range : An implementation of interquartile range (IQR) which is a measure of statistical // dispersion, which is the spread of the data. // Wikipedia Reference : https://en.wikipedia.org/wiki/Interquartile_range use std::cmp::Ordering; pub fn find_median(numbers: &[f64]) -> f64 { let mut numbers = numbers.to_vec(); numbers.sort_by(|a, b| a.partial_cmp(b).unwrap_or(Ordering::Equal)); let length = numbers.len(); let mid = length / 2; if length.is_multiple_of(2) { f64::midpoint(numbers[mid - 1], numbers[mid]) } else { numbers[mid] } } pub fn interquartile_range(numbers: &[f64]) -> f64 { if numbers.is_empty() { panic!("Error: The list is empty. Please provide a non-empty list."); } let mut numbers = numbers.to_vec(); numbers.sort_by(|a, b| a.partial_cmp(b).unwrap_or(Ordering::Equal)); let length = numbers.len(); let mid = length / 2; let (q1, q3) = if length.is_multiple_of(2) { let first_half = &numbers[0..mid]; let second_half = &numbers[mid..length]; (find_median(first_half), find_median(second_half)) } else { let first_half = &numbers[0..mid]; let second_half = &numbers[mid + 1..length]; (find_median(first_half), find_median(second_half)) }; q3 - q1 } #[cfg(test)] mod tests { use super::*; #[test] fn test_find_median() { let numbers1 = vec![1.0, 2.0, 2.0, 3.0, 4.0]; assert_eq!(find_median(&numbers1), 2.0); let numbers2 = vec![1.0, 2.0, 2.0, 3.0, 4.0, 4.0]; assert_eq!(find_median(&numbers2), 2.5); let numbers3 = vec![-1.0, 2.0, 0.0, 3.0, 4.0, -4.0]; assert_eq!(find_median(&numbers3), 1.0); let numbers4 = vec![1.1, 2.2, 2.0, 3.3, 4.4, 4.0]; assert_eq!(find_median(&numbers4), 2.75); } #[test] fn test_interquartile_range() { let numbers1 = vec![4.0, 1.0, 2.0, 3.0, 2.0]; assert_eq!(interquartile_range(&numbers1), 2.0); let numbers2 = vec![-2.0, -7.0, -10.0, 9.0, 8.0, 4.0, -67.0, 45.0]; assert_eq!(interquartile_range(&numbers2), 17.0); let numbers3 = vec![-2.1, -7.1, -10.1, 9.1, 8.1, 4.1, -67.1, 45.1]; assert_eq!(interquartile_range(&numbers3), 17.2); let numbers4 = vec![0.0, 0.0, 0.0, 0.0, 0.0]; assert_eq!(interquartile_range(&numbers4), 0.0); } #[test] #[should_panic(expected = "Error: The list is empty. Please provide a non-empty list.")] fn test_interquartile_range_empty_list() { let numbers: Vec<f64> = vec![]; interquartile_range(&numbers); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/math/quadratic_residue.rs
src/math/quadratic_residue.rs
/// Cipolla algorithm /// /// Solving quadratic residue problem: /// x^2 = a (mod p) , p is an odd prime /// with O(M*log(n)) time complexity, M depends on the complexity of complex numbers multiplication. /// /// Wikipedia reference: https://en.wikipedia.org/wiki/Cipolla%27s_algorithm /// When a is the primitive root modulo n, the answer is unique. /// Otherwise it will return the smallest positive solution use std::rc::Rc; use std::time::{SystemTime, UNIX_EPOCH}; use rand::Rng; use super::{fast_power, PCG32}; #[derive(Debug)] struct CustomFiniteField { modulus: u64, i_square: u64, } impl CustomFiniteField { pub fn new(modulus: u64, i_square: u64) -> Self { Self { modulus, i_square } } } #[derive(Clone, Debug)] struct CustomComplexNumber { real: u64, imag: u64, f: Rc<CustomFiniteField>, } impl CustomComplexNumber { pub fn new(real: u64, imag: u64, f: Rc<CustomFiniteField>) -> Self { Self { real, imag, f } } pub fn mult_other(&mut self, rhs: &Self) { let tmp = (self.imag * rhs.real + self.real * rhs.imag) % self.f.modulus; self.real = (self.real * rhs.real + ((self.imag * rhs.imag) % self.f.modulus) * self.f.i_square) % self.f.modulus; self.imag = tmp; } pub fn mult_self(&mut self) { let tmp = (self.imag * self.real + self.real * self.imag) % self.f.modulus; self.real = (self.real * self.real + ((self.imag * self.imag) % self.f.modulus) * self.f.i_square) % self.f.modulus; self.imag = tmp; } pub fn fast_power(mut base: Self, mut power: u64) -> Self { let mut result = CustomComplexNumber::new(1, 0, base.f.clone()); while power != 0 { if (power & 1) != 0 { result.mult_other(&base); // result *= base; } base.mult_self(); // base *= base; power >>= 1; } result } } fn is_residue(x: u64, modulus: u64) -> bool { let power = (modulus - 1) >> 1; x != 0 && fast_power(x as usize, power as usize, modulus as usize) == 1 } /// The Legendre symbol `(a | p)` /// /// Returns 0 if a = 0 mod p, 1 if a is a square mod p, -1 if it not a square mod p. /// /// <https://en.wikipedia.org/wiki/Legendre_symbol> pub fn legendre_symbol(a: u64, odd_prime: u64) -> i64 { debug_assert!(!odd_prime.is_multiple_of(2), "odd_prime must be odd"); if a == 0 { 0 } else if is_residue(a, odd_prime) { 1 } else { -1 } } // return two solutions (x1, x2) for Quadratic Residue problem x^2 = a (mod p), where p is an odd prime // if a is Quadratic Nonresidues, return None pub fn cipolla(a: u32, p: u32, seed: Option<u64>) -> Option<(u32, u32)> { // The params should be kept in u32 range for multiplication overflow issue // But inside we use u64 for convenience let a = a as u64; let p = p as u64; if a == 0 { return Some((0, 0)); } if !is_residue(a, p) { return None; } let seed = match seed { Some(seed) => seed, None => SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap() .as_secs(), }; let mut rng = PCG32::new_default(seed); let r = loop { let r = rng.get_u64() % p; if r == 0 || !is_residue((p + r * r - a) % p, p) { break r; } }; let filed = Rc::new(CustomFiniteField::new(p, (p + r * r - a) % p)); let comp = CustomComplexNumber::new(r, 1, filed); let power = (p + 1) >> 1; let x0 = CustomComplexNumber::fast_power(comp, power).real as u32; let x1 = p as u32 - x0; if x0 < x1 { Some((x0, x1)) } else { Some((x1, x0)) } } /// Returns one of the two possible solutions of _x² = a mod p_, if any. /// /// The other solution is _-x mod p_. If there is no solution, returns `None`. /// /// Reference: H. Cohen, _A course in computational algebraic number theory_, Algorithm 1.4.3 /// /// ## Implementation details /// /// To avoid multiplication overflows, internally the algorithm uses the `128`-bit arithmetic. /// /// Also see [`cipolla`]. pub fn tonelli_shanks(a: i64, odd_prime: u64) -> Option<u64> { let p: u128 = odd_prime as u128; let e = (p - 1).trailing_zeros(); let q = (p - 1) >> e; // p = 2^e * q, with q odd let a = if a < 0 { a.rem_euclid(p as i64) as u128 } else { a as u128 }; let power_mod_p = |b, e| fast_power(b as usize, e as usize, p as usize) as u128; // find generator: choose a random non-residue n mod p let mut rng = rand::rng(); let n = loop { let n = rng.random_range(0..p); if legendre_symbol(n as u64, p as u64) == -1 { break n; } }; let z = power_mod_p(n, q); // init let mut y = z; let mut r = e; let mut x = power_mod_p(a, (q - 1) / 2) % p; let mut b = (a * x * x) % p; x = (a * x) % p; while b % p != 1 { // find exponent let m = (1..r) .scan(b, |prev, m| { *prev = (*prev * *prev) % p; Some((m, *prev == 1)) }) .find_map(|(m, cond)| cond.then_some(m)); let Some(m) = m else { return None; // non-residue }; // reduce exponent let t = power_mod_p(y as u128, 2_u128.pow(r - m - 1)); y = (t * t) % p; r = m; x = (x * t) % p; b = (b * y) % p; } Some(x as u64) } #[cfg(test)] mod tests { use super::*; fn tonelli_shanks_residues(x: u64, odd_prime: u64) -> Option<(u64, u64)> { let x = tonelli_shanks(x as i64, odd_prime)?; let x2 = (-(x as i64)).rem_euclid(odd_prime as i64) as u64; Some(if x < x2 { (x, x2) } else { (x2, x) }) } #[test] fn cipolla_small_numbers() { assert_eq!(cipolla(1, 43, None), Some((1, 42))); assert_eq!(cipolla(2, 23, None), Some((5, 18))); assert_eq!(cipolla(17, 83, Some(42)), Some((10, 73))); } #[test] fn tonelli_shanks_small_numbers() { assert_eq!(tonelli_shanks_residues(1, 43).unwrap(), (1, 42)); assert_eq!(tonelli_shanks_residues(2, 23).unwrap(), (5, 18)); assert_eq!(tonelli_shanks_residues(17, 83).unwrap(), (10, 73)); } #[test] fn cipolla_random_numbers() { assert_eq!(cipolla(392203, 852167, None), Some((413252, 438915))); assert_eq!( cipolla(379606557, 425172197, None), Some((143417827, 281754370)) ); assert_eq!( cipolla(585251669, 892950901, None), Some((192354555, 700596346)) ); assert_eq!( cipolla(404690348, 430183399, Some(19260817)), Some((57227138, 372956261)) ); assert_eq!( cipolla(210205747, 625380647, Some(998244353)), Some((76810367, 548570280)) ); } #[test] fn tonelli_shanks_random_numbers() { assert_eq!( tonelli_shanks_residues(392203, 852167), Some((413252, 438915)) ); assert_eq!( tonelli_shanks_residues(379606557, 425172197), Some((143417827, 281754370)) ); assert_eq!( tonelli_shanks_residues(585251669, 892950901), Some((192354555, 700596346)) ); assert_eq!( tonelli_shanks_residues(404690348, 430183399), Some((57227138, 372956261)) ); assert_eq!( tonelli_shanks_residues(210205747, 625380647), Some((76810367, 548570280)) ); } #[test] fn no_answer() { assert_eq!(cipolla(650927, 852167, None), None); assert_eq!(tonelli_shanks(650927, 852167), None); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/math/decimal_to_fraction.rs
src/math/decimal_to_fraction.rs
pub fn decimal_to_fraction(decimal: f64) -> (i64, i64) { // Calculate the fractional part of the decimal number let fractional_part = decimal - decimal.floor(); // If the fractional part is zero, the number is already an integer if fractional_part == 0.0 { (decimal as i64, 1) } else { // Calculate the number of decimal places in the fractional part let number_of_frac_digits = decimal.to_string().split('.').nth(1).unwrap_or("").len(); // Calculate the numerator and denominator using integer multiplication let numerator = (decimal * 10f64.powi(number_of_frac_digits as i32)) as i64; let denominator = 10i64.pow(number_of_frac_digits as u32); // Find the greatest common divisor (GCD) using Euclid's algorithm let mut divisor = denominator; let mut dividend = numerator; while divisor != 0 { let r = dividend % divisor; dividend = divisor; divisor = r; } // Reduce the fraction by dividing both numerator and denominator by the GCD let gcd = dividend.abs(); let numerator = numerator / gcd; let denominator = denominator / gcd; (numerator, denominator) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_decimal_to_fraction_1() { assert_eq!(decimal_to_fraction(2.0), (2, 1)); } #[test] fn test_decimal_to_fraction_2() { assert_eq!(decimal_to_fraction(89.45), (1789, 20)); } #[test] fn test_decimal_to_fraction_3() { assert_eq!(decimal_to_fraction(67.), (67, 1)); } #[test] fn test_decimal_to_fraction_4() { assert_eq!(decimal_to_fraction(45.2), (226, 5)); } #[test] fn test_decimal_to_fraction_5() { assert_eq!(decimal_to_fraction(1.5), (3, 2)); } #[test] fn test_decimal_to_fraction_6() { assert_eq!(decimal_to_fraction(6.25), (25, 4)); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/math/perfect_numbers.rs
src/math/perfect_numbers.rs
pub fn is_perfect_number(num: usize) -> bool { let mut sum = 0; for i in 1..num - 1 { if num.is_multiple_of(i) { sum += i; } } num == sum } pub fn perfect_numbers(max: usize) -> Vec<usize> { let mut result: Vec<usize> = Vec::new(); // It is not known if there are any odd perfect numbers, so we go around all the numbers. for i in 1..=max { if is_perfect_number(i) { result.push(i); } } result } #[cfg(test)] mod tests { use super::*; #[test] fn basic() { assert!(is_perfect_number(6)); assert!(is_perfect_number(28)); assert!(is_perfect_number(496)); assert!(is_perfect_number(8128)); assert!(!is_perfect_number(5)); assert!(!is_perfect_number(86)); assert!(!is_perfect_number(497)); assert!(!is_perfect_number(8120)); assert_eq!(perfect_numbers(10), vec![6]); assert_eq!(perfect_numbers(100), vec![6, 28]); assert_eq!(perfect_numbers(496), vec![6, 28, 496]); assert_eq!(perfect_numbers(1000), vec![6, 28, 496]); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/math/field.rs
src/math/field.rs
use core::fmt; use std::hash::{Hash, Hasher}; use std::ops::{Add, Div, Mul, Neg, Sub}; /// A field /// /// <https://en.wikipedia.org/wiki/Field_(mathematics)> pub trait Field: Neg<Output = Self> + Add<Output = Self> + Sub<Output = Self> + Mul<Output = Self> + Div<Output = Self> + Eq + Copy + fmt::Debug { const CHARACTERISTIC: u64; const ZERO: Self; const ONE: Self; /// Multiplicative inverse fn inverse(self) -> Self; /// Z-mod structure fn integer_mul(self, a: i64) -> Self; fn from_integer(a: i64) -> Self { Self::ONE.integer_mul(a) } /// Iterate over all elements in this field /// /// The iterator finishes only for finite fields. type ElementsIter: Iterator<Item = Self>; fn elements() -> Self::ElementsIter; } /// Prime field of order `P`, that is, finite field `GF(P) = ℤ/Pℤ` /// /// Only primes `P` <= 2^63 - 25 are supported, because the field elements are represented by `i64`. // TODO: Extend field implementation for any prime `P` by e.g. using u32 blocks. #[derive(Clone, Copy)] pub struct PrimeField<const P: u64> { a: i64, } impl<const P: u64> PrimeField<P> { /// Reduces the representation into the range [0, p) fn reduce(self) -> Self { let Self { a } = self; let p: i64 = P.try_into().expect("module not fitting into signed 64 bit"); let a = a.rem_euclid(p); assert!(a >= 0); Self { a } } /// Returns the positive integer in the range [0, p) representing this element pub fn to_integer(&self) -> u64 { self.reduce().a as u64 } } impl<const P: u64> From<i64> for PrimeField<P> { fn from(a: i64) -> Self { Self { a } } } impl<const P: u64> PartialEq for PrimeField<P> { fn eq(&self, other: &Self) -> bool { self.reduce().a == other.reduce().a } } impl<const P: u64> Eq for PrimeField<P> {} impl<const P: u64> Neg for PrimeField<P> { type Output = Self; fn neg(self) -> Self::Output { Self { a: -self.a } } } impl<const P: u64> Add for PrimeField<P> { type Output = Self; fn add(self, rhs: Self) -> Self::Output { Self { a: self.a.checked_add(rhs.a).unwrap_or_else(|| { let x = self.reduce(); let y = rhs.reduce(); x.a + y.a }), } } } impl<const P: u64> Sub for PrimeField<P> { type Output = Self; fn sub(self, rhs: Self) -> Self::Output { Self { a: self.a.checked_sub(rhs.a).unwrap_or_else(|| { let x = self.reduce(); let y = rhs.reduce(); x.a - y.a }), } } } impl<const P: u64> Mul for PrimeField<P> { type Output = Self; fn mul(self, rhs: Self) -> Self::Output { Self { a: self.a.checked_mul(rhs.a).unwrap_or_else(|| { let x = self.reduce(); let y = rhs.reduce(); x.a * y.a }), } } } impl<const P: u64> Div for PrimeField<P> { type Output = Self; #[allow(clippy::suspicious_arithmetic_impl)] fn div(self, rhs: Self) -> Self::Output { self * rhs.inverse() } } impl<const P: u64> fmt::Debug for PrimeField<P> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let x = self.reduce(); write!(f, "{}", x.reduce().a) } } impl<const P: u64> Field for PrimeField<P> { const CHARACTERISTIC: u64 = P; const ZERO: Self = Self { a: 0 }; const ONE: Self = Self { a: 1 }; fn inverse(self) -> Self { assert_ne!(self.a, 0); Self { a: mod_inverse( self.a, P.try_into().expect("module not fitting into signed 64 bit"), ), } } fn integer_mul(self, mut n: i64) -> Self { if n == 0 { return Self::ZERO; } let mut x = self; if n < 0 { x = -x; n = -n; } let mut y = Self::ZERO; while n > 1 { if n % 2 == 1 { y = y + x; n -= 1; } x = x + x; n /= 2; } x + y } type ElementsIter = PrimeFieldElementsIter<P>; fn elements() -> Self::ElementsIter { PrimeFieldElementsIter::default() } } #[derive(Default)] pub struct PrimeFieldElementsIter<const P: u64> { x: i64, } impl<const P: u64> Iterator for PrimeFieldElementsIter<P> { type Item = PrimeField<P>; fn next(&mut self) -> Option<Self::Item> { if self.x as u64 == P { None } else { let res = PrimeField::from_integer(self.x); self.x += 1; Some(res) } } } impl<const P: u64> Hash for PrimeField<P> { fn hash<H: Hasher>(&self, state: &mut H) { let Self { a } = self.reduce(); state.write_i64(a); } } // TODO: should we use extended_euclidean_algorithm adjusted to i64? fn mod_inverse(mut a: i64, mut b: i64) -> i64 { let mut s = 1; let mut t = 0; let step = |x, y, q| (y, x - q * y); while b != 0 { let q = a / b; (a, b) = step(a, b, q); (s, t) = step(s, t, q); } assert!(a == 1 || a == -1); a * s } #[cfg(test)] mod tests { use std::collections::HashSet; use super::*; #[test] fn test_field_elements() { fn test<const P: u64>() { let expected: HashSet<PrimeField<P>> = (0..P as i64).map(Into::into).collect(); for gen in 1..P - 1 { // every field element != 0 generates the whole field additively let gen = PrimeField::from(gen as i64); let mut generated: HashSet<PrimeField<P>> = std::iter::once(gen).collect(); let mut x = gen; for _ in 0..P { x = x + gen; generated.insert(x); } assert_eq!(generated, expected); } } test::<5>(); test::<7>(); test::<11>(); test::<13>(); test::<17>(); test::<19>(); test::<23>(); test::<71>(); test::<101>(); } #[test] fn large_prime_field() { const P: u64 = 2_u64.pow(63) - 25; // largest prime fitting into i64 type F = PrimeField<P>; let x = F::from(P as i64 - 1); let y = x.inverse(); assert_eq!(x * y, F::ONE); } #[test] fn inverse() { fn test<const P: u64>() { for x in -7..7 { let x = PrimeField::<P>::from(x); if x != PrimeField::ZERO { // multiplicative assert_eq!(x.inverse() * x, PrimeField::ONE); assert_eq!(x * x.inverse(), PrimeField::ONE); assert_eq!((x.inverse().a * x.a).rem_euclid(P as i64), 1); assert_eq!(x / x, PrimeField::ONE); } // additive assert_eq!(x + (-x), PrimeField::ZERO); assert_eq!((-x) + x, PrimeField::ZERO); assert_eq!(x - x, PrimeField::ZERO); } } test::<5>(); test::<7>(); test::<11>(); test::<13>(); test::<17>(); test::<19>(); test::<23>(); test::<71>(); test::<101>(); } #[test] fn test_mod_inverse() { assert_eq!(mod_inverse(-6, 7), 1); assert_eq!(mod_inverse(-5, 7), -3); assert_eq!(mod_inverse(-4, 7), -2); assert_eq!(mod_inverse(-3, 7), 2); assert_eq!(mod_inverse(-2, 7), 3); assert_eq!(mod_inverse(-1, 7), -1); assert_eq!(mod_inverse(1, 7), 1); assert_eq!(mod_inverse(2, 7), -3); assert_eq!(mod_inverse(3, 7), -2); assert_eq!(mod_inverse(4, 7), 2); assert_eq!(mod_inverse(5, 7), 3); assert_eq!(mod_inverse(6, 7), -1); } #[test] fn integer_mul() { type F = PrimeField<23>; for x in 0..23 { let x = F { a: x }; for n in -7..7 { assert_eq!(x.integer_mul(n), F { a: n * x.a }); } } } #[test] fn from_integer() { type F = PrimeField<23>; for x in -100..100 { assert_eq!(F::from_integer(x), F { a: x }); } assert_eq!(F::from(0), F::ZERO); assert_eq!(F::from(1), F::ONE); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/math/doomsday.rs
src/math/doomsday.rs
const T: [i32; 12] = [0, 3, 2, 5, 0, 3, 5, 1, 4, 6, 2, 4]; pub fn doomsday(y: i32, m: i32, d: i32) -> i32 { let y = if m < 3 { y - 1 } else { y }; (y + y / 4 - y / 100 + y / 400 + T[(m - 1) as usize] + d) % 7 } pub fn get_week_day(y: i32, m: i32, d: i32) -> String { let day = doomsday(y, m, d); let day_str = match day { 0 => "Sunday", 1 => "Monday", 2 => "Tuesday", 3 => "Wednesday", 4 => "Thursday", 5 => "Friday", 6 => "Saturday", _ => "Unknown", }; day_str.to_string() } #[cfg(test)] mod tests { use super::*; #[test] fn doomsday_test() { assert_eq!(get_week_day(1990, 3, 21), "Wednesday"); assert_eq!(get_week_day(2000, 8, 24), "Thursday"); assert_eq!(get_week_day(2000, 10, 13), "Friday"); assert_eq!(get_week_day(2001, 4, 18), "Wednesday"); assert_eq!(get_week_day(2002, 3, 19), "Tuesday"); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/math/square_pyramidal_numbers.rs
src/math/square_pyramidal_numbers.rs
// https://en.wikipedia.org/wiki/Square_pyramidal_number // 1² + 2² + ... = ... (total) pub fn square_pyramidal_number(n: u64) -> u64 { n * (n + 1) * (2 * n + 1) / 6 } #[cfg(test)] mod tests { use super::*; #[test] fn test0() { assert_eq!(0, square_pyramidal_number(0)); assert_eq!(1, square_pyramidal_number(1)); assert_eq!(5, square_pyramidal_number(2)); assert_eq!(14, square_pyramidal_number(3)); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/math/gaussian_elimination.rs
src/math/gaussian_elimination.rs
// Gaussian Elimination of Quadratic Matrices // Takes an augmented matrix as input, returns vector of results // Wikipedia reference: augmented matrix: https://en.wikipedia.org/wiki/Augmented_matrix // Wikipedia reference: algorithm: https://en.wikipedia.org/wiki/Gaussian_elimination pub fn gaussian_elimination(matrix: &mut [Vec<f32>]) -> Vec<f32> { let size = matrix.len(); assert_eq!(size, matrix[0].len() - 1); for i in 0..size - 1 { for j in i..size - 1 { echelon(matrix, i, j); } } for i in (1..size).rev() { eliminate(matrix, i); } // Disable cargo clippy warnings about needless range loops. // Checking the diagonal like this is simpler than any alternative. #[allow(clippy::needless_range_loop)] for i in 0..size { if matrix[i][i] == 0f32 { println!("Infinitely many solutions"); } } let mut result: Vec<f32> = vec![0f32; size]; for i in 0..size { result[i] = matrix[i][size] / matrix[i][i]; } result } fn echelon(matrix: &mut [Vec<f32>], i: usize, j: usize) { let size = matrix.len(); if matrix[i][i] == 0f32 { } else { let factor = matrix[j + 1][i] / matrix[i][i]; (i..=size).for_each(|k| { matrix[j + 1][k] -= factor * matrix[i][k]; }); } } fn eliminate(matrix: &mut [Vec<f32>], i: usize) { let size = matrix.len(); if matrix[i][i] == 0f32 { } else { for j in (1..=i).rev() { let factor = matrix[j - 1][i] / matrix[i][i]; for k in (0..=size).rev() { matrix[j - 1][k] -= factor * matrix[i][k]; } } } } #[cfg(test)] mod tests { use super::gaussian_elimination; #[test] fn test_gauss() { let mut matrix: Vec<Vec<f32>> = vec![ vec![1.5, 2.0, 1.0, -1.0, -2.0, 1.0, 1.0], vec![3.0, 3.0, -1.0, 16.0, 18.0, 1.0, 1.0], vec![1.0, 1.0, 3.0, -2.0, -6.0, 1.0, 1.0], vec![1.0, 1.0, 99.0, 19.0, 2.0, 1.0, 1.0], vec![1.0, -2.0, 16.0, 1.0, 9.0, 10.0, 1.0], vec![1.0, 3.0, 1.0, -5.0, 1.0, 1.0, 95.0], ]; let result = vec![ -264.05893, 159.63196, -6.156921, 35.310387, -18.806696, 81.67839, ]; assert_eq!(gaussian_elimination(&mut matrix), result); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/math/random.rs
src/math/random.rs
/* Permuted Congruential Generator https://en.wikipedia.org/wiki/Permuted_congruential_generator Note that this is _NOT_ intended for serious applications. Use this generator at your own risk and only use your own values instead of the default ones if you really know what you are doing. */ pub struct PCG32 { state: u64, multiplier: u64, increment: u64, } pub const PCG32_MULTIPLIER: u64 = 6364136223846793005_u64; pub const PCG32_INCREMENT: u64 = 1442695040888963407_u64; pub struct IterMut<'a> { pcg: &'a mut PCG32, } impl PCG32 { /// `stream` should be less than 1 << 63 pub fn new(seed: u64, multiplier: u64, stream: u64) -> Self { // We should make sure that increment is odd let increment = (stream << 1) | 1; let mut pcg = PCG32 { state: seed.wrapping_add(increment), multiplier, increment, }; pcg.next(); pcg } pub fn new_default(seed: u64) -> Self { let multiplier = PCG32_MULTIPLIER; let increment = PCG32_INCREMENT; let mut pcg = PCG32 { state: seed.wrapping_add(increment), multiplier, increment, }; pcg.next(); pcg } #[inline] pub fn next(&mut self) { self.state = self .state .wrapping_mul(self.multiplier) .wrapping_add(self.increment); } #[inline] /// Advance the PCG by `delta` steps in O(lg(`delta`)) time. By passing /// a negative i64 as u64, it can go back too. pub fn advance(&mut self, mut delta: u64) { let mut acc_mult = 1u64; let mut acc_incr = 0u64; let mut curr_mlt = self.multiplier; let mut curr_inc = self.increment; while delta > 0 { if delta & 1 != 0 { acc_mult = acc_mult.wrapping_mul(curr_mlt); acc_incr = acc_incr.wrapping_mul(curr_mlt).wrapping_add(curr_inc); } curr_inc = curr_mlt.wrapping_add(1).wrapping_mul(curr_inc); curr_mlt = curr_mlt.wrapping_mul(curr_mlt); delta >>= 1; } self.state = acc_mult.wrapping_mul(self.state).wrapping_add(acc_incr); } #[inline] pub fn get_u32(&mut self) -> u32 { let mut x = self.state; let count = (x >> 59) as u32; self.next(); x ^= x >> 18; ((x >> 27) as u32).rotate_right(count) } #[inline] pub fn get_u64(&mut self) -> u64 { self.get_u32() as u64 ^ ((self.get_u32() as u64) << 32) } #[inline] pub fn get_u16(&mut self) -> (u16, u16) { let res = self.get_u32(); (res as u16, (res >> 16) as u16) } #[inline] pub fn get_u8(&mut self) -> (u8, u8, u8, u8) { let res = self.get_u32(); ( res as u8, (res >> 8) as u8, (res >> 16) as u8, (res >> 24) as u8, ) } #[inline] pub fn get_state(&self) -> u64 { self.state } pub fn iter_mut(&mut self) -> IterMut<'_> { IterMut { pcg: self } } } impl Iterator for IterMut<'_> { type Item = u32; fn next(&mut self) -> Option<Self::Item> { Some(self.pcg.get_u32()) } } #[cfg(test)] mod tests { use super::*; #[test] fn no_birthday() { // If the distribution is not almost uniform, the probability of // birthday paradox increases. For n=2^32 and k=1e5, the probability // of not having a collision is about (1 - (k+1)/n) ^ (k/2) which is // 0.3121 for this (n, k). // So this test is a (dumb) test for distribution, and for speed. This // is only basic sanity checking, as the actual algorithm was // rigorously tested by others before. let numbers = 1e5 as usize; let mut pcg = PCG32::new_default(314159); let mut pcg2 = PCG32::new_default(314159); assert_eq!(pcg.get_u32(), pcg2.get_u32()); let mut randoms: Vec<u32> = pcg.iter_mut().take(numbers).collect::<Vec<u32>>(); pcg2.advance(1000); assert_eq!(pcg2.get_u32(), randoms[1000]); pcg2.advance((-1001_i64) as u64); assert_eq!(pcg2.get_u32(), randoms[0]); randoms.sort_unstable(); randoms.dedup(); assert_eq!(randoms.len(), numbers); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/math/bell_numbers.rs
src/math/bell_numbers.rs
use num_bigint::BigUint; use num_traits::{One, Zero}; use std::sync::RwLock; /// Returns the number of ways you can select r items given n options fn n_choose_r(n: u32, r: u32) -> BigUint { if r == n || r == 0 { return One::one(); } if r > n { return Zero::zero(); } // Any combination will only need to be computed once, thus giving no need to // memoize this function let product: BigUint = (0..r).fold(BigUint::one(), |acc, x| { (acc * BigUint::from(n - x)) / BigUint::from(x + 1) }); product } /// A memoization table for storing previous results struct MemTable { buffer: Vec<BigUint>, } impl MemTable { const fn new() -> Self { MemTable { buffer: Vec::new() } } fn get(&self, n: usize) -> Option<BigUint> { if n == 0 || n == 1 { Some(BigUint::one()) } else if let Some(entry) = self.buffer.get(n) { if *entry == BigUint::zero() { None } else { Some(entry.clone()) } } else { None } } fn set(&mut self, n: usize, b: BigUint) { self.buffer[n] = b; } #[inline] fn capacity(&self) -> usize { self.buffer.capacity() } #[inline] fn resize(&mut self, new_size: usize) { if new_size > self.buffer.len() { self.buffer.resize(new_size, Zero::zero()); } } } // Implemented with RwLock so it is accessible across threads static LOOKUP_TABLE_LOCK: RwLock<MemTable> = RwLock::new(MemTable::new()); pub fn bell_number(n: u32) -> BigUint { let needs_resize; // Check if number is already in lookup table { let lookup_table = LOOKUP_TABLE_LOCK.read().unwrap(); if let Some(entry) = lookup_table.get(n as usize) { return entry; } needs_resize = (n + 1) as usize > lookup_table.capacity(); } // Resize table before recursion so that if more values need to be added during recursion the table isn't // reallocated every single time if needs_resize { let mut lookup_table = LOOKUP_TABLE_LOCK.write().unwrap(); lookup_table.resize((n + 1) as usize); } let new_bell_number: BigUint = (0..n).map(|x| bell_number(x) * n_choose_r(n - 1, x)).sum(); // Add new number to lookup table { let mut lookup_table = LOOKUP_TABLE_LOCK.write().unwrap(); lookup_table.set(n as usize, new_bell_number.clone()); } new_bell_number } #[cfg(test)] pub mod tests { use super::*; use std::str::FromStr; #[test] fn test_choose_zero() { for i in 1..100 { assert_eq!(n_choose_r(i, 0), One::one()); } } #[test] fn test_combination() { let five_choose_1 = BigUint::from(5u32); assert_eq!(n_choose_r(5, 1), five_choose_1); assert_eq!(n_choose_r(5, 4), five_choose_1); let ten_choose_3 = BigUint::from(120u32); assert_eq!(n_choose_r(10, 3), ten_choose_3); assert_eq!(n_choose_r(10, 7), ten_choose_3); let fourty_two_choose_thirty = BigUint::from_str("11058116888").unwrap(); assert_eq!(n_choose_r(42, 30), fourty_two_choose_thirty); assert_eq!(n_choose_r(42, 12), fourty_two_choose_thirty); } #[test] fn test_bell_numbers() { let bell_one = BigUint::from(1u32); assert_eq!(bell_number(1), bell_one); let bell_three = BigUint::from(5u32); assert_eq!(bell_number(3), bell_three); let bell_eight = BigUint::from(4140u32); assert_eq!(bell_number(8), bell_eight); let bell_six = BigUint::from(203u32); assert_eq!(bell_number(6), bell_six); let bell_twenty_six = BigUint::from_str("49631246523618756274").unwrap(); assert_eq!(bell_number(26), bell_twenty_six); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false
TheAlgorithms/Rust
https://github.com/TheAlgorithms/Rust/blob/38024b01c29eb05f733d480f88f19f0c06922a85/src/math/elliptic_curve.rs
src/math/elliptic_curve.rs
use std::collections::HashSet; use std::fmt; use std::hash::{Hash, Hasher}; use std::ops::{Add, Neg, Sub}; use crate::math::field::{Field, PrimeField}; use crate::math::quadratic_residue::legendre_symbol; /// Elliptic curve defined by `y^2 = x^3 + Ax + B` over a prime field `F` of /// characteristic != 2, 3 /// /// The coefficients of the elliptic curve are the constant parameters `A` and `B`. /// /// Points form an abelian group with the neutral element [`EllipticCurve::infinity`]. The points /// are represented via affine coordinates ([`EllipticCurve::new`]) except for the points /// at infinity ([`EllipticCurve::infinity`]). /// /// # Example /// /// ``` /// use the_algorithms_rust::math::{EllipticCurve, PrimeField}; /// type E = EllipticCurve<PrimeField<7>, 1, 0>; /// let P = E::new(0, 0).expect("not on curve E"); /// assert_eq!(P + P, E::infinity()); /// ``` #[derive(Clone, Copy)] pub struct EllipticCurve<F, const A: i64, const B: i64> { infinity: bool, x: F, y: F, } impl<F: Field, const A: i64, const B: i64> EllipticCurve<F, A, B> { /// Point at infinity also the neutral element of the group pub fn infinity() -> Self { Self::check_invariants(); Self { infinity: true, x: F::ZERO, y: F::ZERO, } } /// Affine point /// /// /// Return `None` if the coordinates are not on the curve pub fn new(x: impl Into<F>, y: impl Into<F>) -> Option<Self> { Self::check_invariants(); let x = x.into(); let y = y.into(); if Self::contains(x, y) { Some(Self { infinity: false, x, y, }) } else { None } } /// Return `true` if this is the point at infinity pub fn is_infinity(&self) -> bool { self.infinity } /// The affine x-coordinate of the point pub fn x(&self) -> &F { &self.x } /// The affine y-coordinate of the point pub fn y(&self) -> &F { &self.y } /// The discrimant of the elliptic curve pub const fn discriminant() -> i64 { // Note: we can't return an element of F here, because it is not // possible to declare a trait function as const (cf. // <https://doc.rust-lang.org/error_codes/E0379.html>) (-16 * (4 * A * A * A + 27 * B * B)) % (F::CHARACTERISTIC as i64) } fn contains(x: F, y: F) -> bool { y * y == x * x * x + x.integer_mul(A) + F::ONE.integer_mul(B) } const fn check_invariants() { assert!(F::CHARACTERISTIC != 2); assert!(F::CHARACTERISTIC != 3); assert!(Self::discriminant() != 0); } } /// Elliptic curve methods over a prime field impl<const P: u64, const A: i64, const B: i64> EllipticCurve<PrimeField<P>, A, B> { /// Naive calculation of points via enumeration // TODO: Implement via generators pub fn points() -> impl Iterator<Item = Self> { std::iter::once(Self::infinity()).chain( PrimeField::elements() .flat_map(|x| PrimeField::elements().filter_map(move |y| Self::new(x, y))), ) } /// Number of points on the elliptic curve over `F`, that is, `#E(F)` pub fn cardinality() -> usize { // TODO: implement counting for big P Self::cardinality_counted_legendre() } /// Number of points on the elliptic curve over `F`, that is, `#E(F)` /// /// We simply count the number of points for each x coordinate and sum them up. /// For that, we first precompute the table of all squares in `F`. /// /// Time complexity: O(P) <br> /// Space complexity: O(P) /// /// Only fast for small fields. pub fn cardinality_counted_table() -> usize { let squares: HashSet<_> = PrimeField::<P>::elements().map(|x| x * x).collect(); 1 + PrimeField::elements() .map(|x| { let y_square = x * x * x + x.integer_mul(A) + PrimeField::from_integer(B); if y_square == PrimeField::ZERO { 1 } else if squares.contains(&y_square) { 2 } else { 0 } }) .sum::<usize>() } /// Number of points on the elliptic curve over `F`, that is, `#E(F)` /// /// We count the number of points for each x coordinate by using the [Legendre symbol] _(X | /// P)_: /// /// _1 + (x^3 + Ax + B | P),_ /// /// The total number of points is then: /// /// _#E(F) = 1 + P + Σ_x (x^3 + Ax + B | P)_ for _x_ in _F_. /// /// Time complexity: O(P) <br> /// Space complexity: O(1) /// /// Only fast for small fields. /// /// [Legendre symbol]: https://en.wikipedia.org/wiki/Legendre_symbol pub fn cardinality_counted_legendre() -> usize { let cardinality: i64 = 1 + P as i64 + PrimeField::<P>::elements() .map(|x| { let y_square = x * x * x + x.integer_mul(A) + PrimeField::from_integer(B); let y_square_int = y_square.to_integer(); legendre_symbol(y_square_int, P) }) .sum::<i64>(); cardinality .try_into() .expect("invalid legendre cardinality") } } /// Group law impl<F: Field, const A: i64, const B: i64> Add for EllipticCurve<F, A, B> { type Output = Self; fn add(self, p: Self) -> Self::Output { if self.infinity { p } else if p.infinity { self } else if self.x == p.x && self.y == -p.y { // mirrored Self::infinity() } else { let slope = if self.x == p.x { ((self.x * self.x).integer_mul(3) + F::from_integer(A)) / self.y.integer_mul(2) } else { (self.y - p.y) / (self.x - p.x) }; let x = slope * slope - self.x - p.x; let y = -self.y + slope * (self.x - x); Self::new(x, y).expect("elliptic curve group law failed") } } } /// Inverse impl<F: Field, const A: i64, const B: i64> Neg for EllipticCurve<F, A, B> { type Output = Self; fn neg(self) -> Self::Output { if self.infinity { self } else { Self::new(self.x, -self.y).expect("elliptic curves are x-axis symmetric") } } } /// Difference impl<F: Field, const A: i64, const B: i64> Sub for EllipticCurve<F, A, B> { type Output = Self; fn sub(self, p: Self) -> Self::Output { self + (-p) } } /// Debug representation via projective coordinates impl<F: fmt::Debug, const A: i64, const B: i64> fmt::Debug for EllipticCurve<F, A, B> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if self.infinity { f.write_str("(0:0:1)") } else { write!(f, "({:?}:{:?}:1)", self.x, self.y) } } } /// Equality of the elliptic curve points (short-circuit at infinity) impl<F: Field, const A: i64, const B: i64> PartialEq for EllipticCurve<F, A, B> { fn eq(&self, other: &Self) -> bool { (self.infinity && other.infinity) || (self.infinity == other.infinity && self.x == other.x && self.y == other.y) } } impl<F: Field, const A: i64, const B: i64> Eq for EllipticCurve<F, A, B> {} impl<F: Field + Hash, const A: i64, const B: i64> Hash for EllipticCurve<F, A, B> { fn hash<H: Hasher>(&self, state: &mut H) { if self.infinity { state.write_u8(1); F::ZERO.hash(state); F::ZERO.hash(state); } else { state.write_u8(0); self.x.hash(state); self.y.hash(state); } } } #[cfg(test)] mod tests { use std::collections::HashSet; use std::time::Instant; use super::*; #[test] #[should_panic] fn test_char_2_panic() { EllipticCurve::<PrimeField<2>, -1, 1>::infinity(); } #[test] #[should_panic] fn test_char_3_panic() { EllipticCurve::<PrimeField<2>, -1, 1>::infinity(); } #[test] #[should_panic] fn test_singular_panic() { EllipticCurve::<PrimeField<5>, 0, 0>::infinity(); } #[test] fn e_5_1_0_group_table() { type F = PrimeField<5>; type E = EllipticCurve<F, 1, 0>; assert_eq!(E::points().count(), 4); let [a, b, c, d] = [ E::new(0, 0).unwrap(), E::infinity(), E::new(2, 0).unwrap(), E::new(3, 0).unwrap(), ]; assert_eq!(a + a, b); assert_eq!(a + b, a); assert_eq!(a + c, d); assert_eq!(a + d, c); assert_eq!(b + a, a); assert_eq!(b + b, b); assert_eq!(b + c, c); assert_eq!(b + d, d); assert_eq!(c + a, d); assert_eq!(c + b, c); assert_eq!(c + c, b); assert_eq!(c + d, a); assert_eq!(d + a, c); assert_eq!(d + b, d); assert_eq!(d + c, a); assert_eq!(d + d, b); } #[test] fn group_law() { fn test<const P: u64>() { type E<const P: u64> = EllipticCurve<PrimeField<P>, 1, 0>; let o = E::<P>::infinity(); assert_eq!(-o, o); let points: Vec<_> = E::points().collect(); for &p in &points { assert_eq!(p + (-p), o); // inverse assert_eq!((-p) + p, o); // inverse assert_eq!(p - p, o); //inverse assert_eq!(p + o, p); // neutral assert_eq!(o + p, p); //neutral for &q in &points { assert_eq!(p + q, q + p); // commutativity // associativity for &s in &points { assert_eq!((p + q) + s, p + (q + s)); } } } } test::<5>(); test::<7>(); test::<11>(); test::<13>(); test::<17>(); test::<19>(); test::<23>(); } #[test] fn cardinality() { fn test<const P: u64>(expected: usize) { type E<const P: u64> = EllipticCurve<PrimeField<P>, 1, 0>; assert_eq!(E::<P>::cardinality(), expected); assert_eq!(E::<P>::cardinality_counted_table(), expected); assert_eq!(E::<P>::cardinality_counted_legendre(), expected); } test::<5>(4); test::<7>(8); test::<11>(12); test::<13>(20); test::<17>(16); test::<19>(20); test::<23>(24); } #[test] #[ignore = "slow test for measuring time"] fn cardinality_perf() { const P: u64 = 1000003; type E = EllipticCurve<PrimeField<P>, 1, 0>; const EXPECTED: usize = 1000004; let now = Instant::now(); assert_eq!(E::cardinality_counted_table(), EXPECTED); println!("cardinality_counted_table : {:?}", now.elapsed()); let now = Instant::now(); assert_eq!(E::cardinality_counted_legendre(), EXPECTED); println!("cardinality_counted_legendre : {:?}", now.elapsed()); } #[test] #[ignore = "slow test showing that cadinality is not yet feasible to compute for a large prime"] fn cardinality_large_prime() { const P: u64 = 2_u64.pow(63) - 25; // largest prime fitting into i64 type E = EllipticCurve<PrimeField<P>, 1, 0>; const EXPECTED: usize = 9223372041295506260; let now = Instant::now(); assert_eq!(E::cardinality(), EXPECTED); println!("cardinality: {:?}", now.elapsed()); } #[test] fn test_points() { type F = PrimeField<5>; type E = EllipticCurve<F, 1, 0>; let points: HashSet<_> = E::points().collect(); let expected: HashSet<_> = [ E::infinity(), E::new(0, 0).unwrap(), E::new(2, 0).unwrap(), E::new(3, 0).unwrap(), ] .into_iter() .collect(); assert_eq!(points, expected); } }
rust
MIT
38024b01c29eb05f733d480f88f19f0c06922a85
2026-01-04T15:37:39.002409Z
false